pax_global_header00006660000000000000000000000064147370137650014527gustar00rootroot0000000000000052 comment=2190cd9d1fc047af477d5e6897cc283799f54064 python-beanie-1.29.0/000077500000000000000000000000001473701376500143625ustar00rootroot00000000000000python-beanie-1.29.0/.github/000077500000000000000000000000001473701376500157225ustar00rootroot00000000000000python-beanie-1.29.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001473701376500201055ustar00rootroot00000000000000python-beanie-1.29.0/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000006721473701376500226040ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve title: "[BUG]" labels: '' assignees: '' --- **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** ```python Please add a code snippet here, that reproduces the problem completely ``` **Expected behavior** A clear and concise description of what you expected to happen. **Additional context** Add any other context about the problem here. python-beanie-1.29.0/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000007461473701376500221040ustar00rootroot00000000000000blank_issues_enabled: true contact_links: - name: Question url: 'https://github.com/roman-right/beanie/discussions/new?category=question' about: Ask a question about how to use Beanie using github discussions - name: Feature Request url: 'https://github.com/roman-right/beanie/discussions/new?category=feature-request' about: > If you think we should add a new feature to Beanie, please start a discussion, once it attracts wider support, it can be migrated to an issuepython-beanie-1.29.0/.github/scripts/000077500000000000000000000000001473701376500174115ustar00rootroot00000000000000python-beanie-1.29.0/.github/scripts/handlers/000077500000000000000000000000001473701376500212115ustar00rootroot00000000000000python-beanie-1.29.0/.github/scripts/handlers/__init__.py000066400000000000000000000000001473701376500233100ustar00rootroot00000000000000python-beanie-1.29.0/.github/scripts/handlers/gh.py000066400000000000000000000054731473701376500221720ustar00rootroot00000000000000import subprocess from dataclasses import dataclass from datetime import datetime from typing import List import requests # type: ignore @dataclass class PullRequest: number: int title: str user: str user_url: str url: str class GitHubHandler: def __init__( self, username: str, repository: str, current_version: str, new_version: str, ): self.username = username self.repository = repository self.base_url = f"https://api.github.com/repos/{username}/{repository}" self.current_version = current_version self.new_version = new_version self.commits = self.get_commits_after_tag(current_version) self.prs = [self.get_pr_for_commit(commit) for commit in self.commits] def get_commits_after_tag(self, tag: str) -> List[str]: result = subprocess.run( ["git", "log", f"{tag}..HEAD", "--pretty=format:%H"], stdout=subprocess.PIPE, text=True, ) return result.stdout.split() def get_pr_for_commit(self, commit_sha: str) -> PullRequest: url = f"{self.base_url}/commits/{commit_sha}/pulls" response = requests.get(url) response.raise_for_status() pr_data = response.json()[0] return PullRequest( number=pr_data["number"], title=pr_data["title"], user=pr_data["user"]["login"], user_url=pr_data["user"]["html_url"], url=pr_data["html_url"], ) def build_markdown_for_many_prs(self) -> str: markdown = f"\n## [{self.new_version}] - {datetime.now().strftime('%Y-%m-%d')}\n" for pr in self.prs: markdown += ( f"### {pr.title.capitalize()}\n" f"- Author - [{pr.user}]({pr.user_url})\n" f"- PR <{pr.url}>\n" ) markdown += f"\n[{self.new_version}]: https://pypi.org/project/{self.repository}/{self.new_version}\n" return markdown def commit_changes(self): self.run_git_command( ["git", "config", "--global", "user.name", "github-actions[bot]"] ) self.run_git_command( [ "git", "config", "--global", "user.email", "github-actions[bot]@users.noreply.github.com", ] ) self.run_git_command(["git", "add", "."]) self.run_git_command( ["git", "commit", "-m", f"Bump version to {self.new_version}"] ) self.run_git_command(["git", "tag", self.new_version]) self.git_push() def git_push(self): self.run_git_command(["git", "push", "origin", "main", "--tags"]) @staticmethod def run_git_command(command: List[str]): subprocess.run(command, check=True) python-beanie-1.29.0/.github/scripts/handlers/version.py000066400000000000000000000071611473701376500232550ustar00rootroot00000000000000import subprocess from pathlib import Path import requests # type: ignore import toml from gh import GitHubHandler class SemVer: def __init__(self, version: str): self.version = version self.major, self.minor, self.patch = map(int, self.version.split(".")) def increment_minor(self): return SemVer(f"{self.major}.{self.minor + 1}.0") def __str__(self): return self.version def __eq__(self, other): return self.version == other.version def __gt__(self, other): return ( (self.major > other.major) or (self.major == other.major and self.minor > other.minor) or ( self.major == other.major and self.minor == other.minor and self.patch > other.patch ) ) class VersionHandler: PACKAGE_NAME = "beanie" ROOT_PATH = Path(__file__).parent.parent.parent.parent def __init__(self): self.pyproject = self.ROOT_PATH / "pyproject.toml" self.init_py = self.ROOT_PATH / "beanie" / "__init__.py" self.changelog = self.ROOT_PATH / "docs" / "changelog.md" self.current_version = self.parse_version_from_pyproject( self.pyproject ) self.pypi_version = self.get_version_from_pypi() if self.current_version < self.pypi_version: raise ValueError("Current version is less than pypi version") if self.current_version == self.pypi_version: self.current_version = self.current_version.increment_minor() self.update_files() else: self.flit_publish() @staticmethod def parse_version_from_pyproject(pyproject: Path) -> SemVer: toml_data = toml.loads(pyproject.read_text()) return SemVer(toml_data["project"]["version"]) def get_version_from_pypi(self) -> SemVer: response = requests.get( f"https://pypi.org/pypi/{self.PACKAGE_NAME}/json" ) if response.status_code == 200: return SemVer(response.json()["info"]["version"]) raise ValueError("Can't get version from pypi") def update_files(self): self.update_pyproject_version() self.update_file_versions([self.init_py]) self.update_changelog() def update_pyproject_version(self): pyproject = toml.loads(self.pyproject.read_text()) pyproject["project"]["version"] = str(self.current_version) self.pyproject.write_text(toml.dumps(pyproject)) def update_file_versions(self, files_to_update): for file_path in files_to_update: content = file_path.read_text() content = content.replace( str(self.pypi_version), str(self.current_version) ) file_path.write_text(content) def update_changelog(self): handler = GitHubHandler( "BeanieODM", "beanie", str(self.pypi_version), str(self.current_version), ) changelog_content = handler.build_markdown_for_many_prs() changelog_lines = self.changelog.read_text().splitlines() new_changelog_lines = [] inserted = False for line in changelog_lines: new_changelog_lines.append(line) if line.strip() == "# Changelog" and not inserted: new_changelog_lines.append(changelog_content) inserted = True self.changelog.write_text("\n".join(new_changelog_lines)) handler.commit_changes() def flit_publish(self): subprocess.run(["flit", "publish"], check=True) if __name__ == "__main__": VersionHandler() python-beanie-1.29.0/.github/workflows/000077500000000000000000000000001473701376500177575ustar00rootroot00000000000000python-beanie-1.29.0/.github/workflows/close_inactive_issues.yml000066400000000000000000000016101473701376500250620ustar00rootroot00000000000000name: Close inactive issues on: schedule: - cron: "30 1 * * *" jobs: close-issues: runs-on: ubuntu-latest permissions: issues: write pull-requests: write steps: - uses: actions/stale@v5 with: stale-issue-message: 'This issue is stale because it has been open 30 days with no activity.' stale-pr-message: 'This PR is stale because it has been open 45 days with no activity.' close-issue-message: 'This issue was closed because it has been stalled for 14 days with no activity.' close-pr-message: 'This PR was closed because it has been stalled for 14 days with no activity.' exempt-issue-labels: 'bug,feature-request,typing bug,feature request,doc,documentation' days-before-issue-stale: 30 days-before-pr-stale: 45 days-before-issue-close: 14 days-before-pr-close: 14python-beanie-1.29.0/.github/workflows/github-actions-publish-docs.yml000066400000000000000000000005651473701376500260220ustar00rootroot00000000000000name: Publish docs on: push: branches: - main jobs: publish_docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v2 with: python-version: 3.10.9 - name: install dependencies run: pip3 install .[doc] - name: publish docs run: bash scripts/publish_docs.shpython-beanie-1.29.0/.github/workflows/github-actions-publish-project.yml000066400000000000000000000005651473701376500265400ustar00rootroot00000000000000name: Publish project on: push: branches: - main jobs: publish_project: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: install flit run: pip3 install flit - name: publish project env: FLIT_USERNAME: __token__ FLIT_PASSWORD: ${{ secrets.FLIT_PASSWORD }} run: flit publishpython-beanie-1.29.0/.github/workflows/github-actions-tests.yml000066400000000000000000000022571473701376500245700ustar00rootroot00000000000000name: Tests on: pull_request: jobs: pre-commit: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v5 with: python-version: 3.12 - uses: pre-commit/action@v3.0.1 run-tests: strategy: fail-fast: false matrix: python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12", "3.13" ] mongodb-version: [ "4.4", "5.0", "6.0", "7.0", "8.0" ] pydantic-version: [ "1.10.18", "2.9.2" , "2.10.4"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: pip cache-dependency-path: pyproject.toml - name: Start MongoDB uses: supercharge/mongodb-github-action@1.11.0 with: mongodb-version: ${{ matrix.mongodb-version }} mongodb-replica-set: test-rs - name: install dependencies run: pip install .[test,ci] - name: install pydantic run: pip install pydantic==${{ matrix.pydantic-version }} - name: run tests env: PYTHON_JIT: 1 run: pytest -v python-beanie-1.29.0/.gitignore000066400000000000000000000051141473701376500163530ustar00rootroot00000000000000config.cnf *.pyc *.iml */*.pytest* .rnd ### Python template # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ # Translations *.mo *.pot # Django stuff: *.log .static_storage/ .media/ local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ ### VirtualEnv template # Virtualenv # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ .Python [Bb]in [Ii]nclude [Ll]ib [Ll]ib64 [Ll]ocal pyvenv.cfg .venv pip-selfcheck.json ### JetBrains template # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 # User-specific stuff: .idea/**/workspace.xml .idea/**/tasks.xml .idea/dictionaries # Sensitive or high-churn files: .idea/**/dataSources/ .idea/**/dataSources.ids .idea/**/dataSources.xml .idea/**/dataSources.local.xml .idea/**/sqlDataSources.xml .idea/**/dynamic.xml .idea/**/uiDesigner.xml # Gradle: .idea/**/gradle.xml .idea/**/libraries # CMake cmake-build-debug/ cmake-build-release/ # Mongo Explorer plugin: .idea/**/mongoSettings.xml ## File-based project format: *.iws ## Plugin-specific files: # IntelliJ out/ # mpeltonen/sbt-idea plugin .idea_modules/ # JIRA plugin atlassian-ide-plugin.xml # Cursive Clojure plugin .idea/replstate.xml # Crashlytics plugin (for Android Studio and IntelliJ) com_crashlytics_export_strings.xml crashlytics.properties crashlytics-build.properties fabric.properties .idea .pytest_cache docs/api docs/_rst tags tests/assets/tmp src/api_files/storage_dir docker-compose-aws.yml tilt_modules # Poetry stuff poetry.lock .pdm-python python-beanie-1.29.0/.pre-commit-config.yaml000066400000000000000000000005511473701376500206440ustar00rootroot00000000000000repos: - repo: https://github.com/charliermarsh/ruff-pre-commit rev: v0.6.9 hooks: - id: ruff args: [ --fix ] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.11.2 hooks: - id: mypy additional_dependencies: - types-click - types-toml exclude: ^tests/ python-beanie-1.29.0/.pypirc000066400000000000000000000004021473701376500156650ustar00rootroot00000000000000[distutils] index-servers = pypi testpypi [pypi] repository = https://upload.pypi.org/legacy/ username = __token__ password = ${PYPI_TOKEN} [testpypi] repository = https://test.pypi.org/legacy/ username = roman-right password = =$C[wT}^]5EWvX(p#9Popython-beanie-1.29.0/CODE_OF_CONDUCT.md000066400000000000000000000002071473701376500171600ustar00rootroot00000000000000Code of Conduct --------------- Please check [this page in the documentation](https://roman-right.github.io/beanie/code-of-conduct/). python-beanie-1.29.0/CONTRIBUTING.md000066400000000000000000000001571473701376500166160ustar00rootroot00000000000000Contributing ------------ Please check [this page in the documentation](https://beanie-odm.dev/development/). python-beanie-1.29.0/LICENSE000066400000000000000000000261171473701376500153760ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2021 Roman Korolev Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. python-beanie-1.29.0/README.md000066400000000000000000000127411473701376500156460ustar00rootroot00000000000000[![Beanie](https://raw.githubusercontent.com/roman-right/beanie/main/assets/logo/white_bg.svg)](https://github.com/roman-right/beanie) [![shields badge](https://shields.io/badge/-docs-blue)](https://beanie-odm.dev) [![pypi](https://img.shields.io/pypi/v/beanie.svg)](https://pypi.python.org/pypi/beanie) ## 📢 Important Update 📢 We are excited to announce that Beanie is transitioning from solo development to a team-based approach! This move will help us enhance the project with new features and more collaborative development. At this moment we are establishing a board of members that will decide all the future steps of the project. We are looking for contributors and maintainers to join the board. ### Join Us If you are interested in contributing or want to stay updated, please join our Discord channel. We're looking forward to your ideas and contributions! [Join our Discord](https://discord.gg/AwwTrbCASP) Let’s make Beanie better, together! ## Overview [Beanie](https://github.com/roman-right/beanie) - is an asynchronous Python object-document mapper (ODM) for MongoDB. Data models are based on [Pydantic](https://pydantic-docs.helpmanual.io/). When using Beanie each database collection has a corresponding `Document` that is used to interact with that collection. In addition to retrieving data, Beanie allows you to add, update, or delete documents from the collection as well. Beanie saves you time by removing boilerplate code, and it helps you focus on the parts of your app that actually matter. Data and schema migrations are supported by Beanie out of the box. There is a synchronous version of Beanie ODM - [Bunnet](https://github.com/roman-right/bunnet) ## Installation ### PIP ```shell pip install beanie ``` ### Poetry ```shell poetry add beanie ``` For more installation options (eg: `aws`, `gcp`, `srv` ...) you can look in the [getting started](./docs/getting-started.md#optional-dependencies) ## Example ```python import asyncio from typing import Optional from motor.motor_asyncio import AsyncIOMotorClient from pydantic import BaseModel from beanie import Document, Indexed, init_beanie class Category(BaseModel): name: str description: str class Product(Document): name: str # You can use normal types just like in pydantic description: Optional[str] = None price: Indexed(float) # You can also specify that a field should correspond to an index category: Category # You can include pydantic models as well # This is an asynchronous example, so we will access it from an async function async def example(): # Beanie uses Motor async client under the hood client = AsyncIOMotorClient("mongodb://user:pass@host:27017") # Initialize beanie with the Product document class await init_beanie(database=client.db_name, document_models=[Product]) chocolate = Category(name="Chocolate", description="A preparation of roasted and ground cacao seeds.") # Beanie documents work just like pydantic models tonybar = Product(name="Tony's", price=5.95, category=chocolate) # And can be inserted into the database await tonybar.insert() # You can find documents with pythonic syntax product = await Product.find_one(Product.price < 10) # And update them await product.set({Product.name:"Gold bar"}) if __name__ == "__main__": asyncio.run(example()) ``` ## Links ### Documentation - **[Doc](https://beanie-odm.dev/)** - Tutorial, API documentation, and development guidelines. ### Example Projects - **[fastapi-cosmos-beanie](https://github.com/tonybaloney/ants-azure-demos/tree/master/fastapi-cosmos-beanie)** - FastAPI + Beanie ODM + Azure Cosmos Demo Application by [Anthony Shaw](https://github.com/tonybaloney) - **[fastapi-beanie-jwt](https://github.com/flyinactor91/fastapi-beanie-jwt)** - Sample FastAPI server with JWT auth and Beanie ODM by [Michael duPont](https://github.com/flyinactor91) - **[Shortify](https://github.com/IHosseini083/Shortify)** - URL shortener RESTful API (FastAPI + Beanie ODM + JWT & OAuth2) by [ Iliya Hosseini](https://github.com/IHosseini083) - **[LCCN Predictor](https://github.com/baoliay2008/lccn_predictor)** - Leetcode contest rating predictor (FastAPI + Beanie ODM + React) by [L. Bao](https://github.com/baoliay2008) ### Articles - **[Announcing Beanie - MongoDB ODM](https://dev.to/romanright/announcing-beanie-mongodb-odm-56e)** - **[Build a Cocktail API with Beanie and MongoDB](https://developer.mongodb.com/article/beanie-odm-fastapi-cocktails/)** - **[MongoDB indexes with Beanie](https://dev.to/romanright/mongodb-indexes-with-beanie-43e8)** - **[Beanie Projections. Reducing network and database load.](https://dev.to/romanright/beanie-projections-reducing-network-and-database-load-3bih)** - **[Beanie 1.0 - Query Builder](https://dev.to/romanright/announcing-beanie-1-0-mongodb-odm-with-query-builder-4mbl)** - **[Beanie 1.8 - Relations, Cache, Actions and more!](https://dev.to/romanright/announcing-beanie-odm-18-relations-cache-actions-and-more-24ef)** ### Resources - **[GitHub](https://github.com/roman-right/beanie)** - GitHub page of the project - **[Changelog](https://beanie-odm.dev/changelog)** - list of all the valuable changes - **[Discord](https://discord.gg/AwwTrbCASP)** - ask your questions, share ideas or just say `Hello!!` ---- Supported by [JetBrains](https://jb.gg/OpenSource) [![JetBrains](https://raw.githubusercontent.com/roman-right/beanie/main/assets/logo/jetbrains.svg)](https://jb.gg/OpenSource) python-beanie-1.29.0/assets/000077500000000000000000000000001473701376500156645ustar00rootroot00000000000000python-beanie-1.29.0/assets/logo/000077500000000000000000000000001473701376500166245ustar00rootroot00000000000000python-beanie-1.29.0/assets/logo/jetbrains.svg000066400000000000000000000114161473701376500213310ustar00rootroot00000000000000 python-beanie-1.29.0/assets/logo/logo.svg000066400000000000000000000056431473701376500203150ustar00rootroot00000000000000 python-beanie-1.29.0/assets/logo/white_bg.svg000066400000000000000000000343561473701376500211500ustar00rootroot00000000000000 image/svg+xml python-beanie-1.29.0/assets/logo/with_text.svg000066400000000000000000000132321473701376500213650ustar00rootroot00000000000000 python-beanie-1.29.0/beanie/000077500000000000000000000000001473701376500156055ustar00rootroot00000000000000python-beanie-1.29.0/beanie/__init__.py000066400000000000000000000033751473701376500177260ustar00rootroot00000000000000from beanie.migrations.controllers.free_fall import free_fall_migration from beanie.migrations.controllers.iterative import iterative_migration from beanie.odm.actions import ( After, Before, Delete, Insert, Replace, Save, SaveChanges, Update, ValidateOnSave, after_event, before_event, ) from beanie.odm.bulk import BulkWriter from beanie.odm.custom_types import DecimalAnnotation from beanie.odm.custom_types.bson.binary import BsonBinary from beanie.odm.documents import ( Document, DocumentWithSoftDelete, MergeStrategy, ) from beanie.odm.enums import SortDirection from beanie.odm.fields import ( BackLink, BeanieObjectId, DeleteRules, Indexed, Link, PydanticObjectId, WriteRules, ) from beanie.odm.queries.update import UpdateResponse from beanie.odm.settings.timeseries import Granularity, TimeSeriesConfig from beanie.odm.union_doc import UnionDoc from beanie.odm.utils.init import init_beanie from beanie.odm.views import View __version__ = "1.29.0" __all__ = [ # ODM "Document", "DocumentWithSoftDelete", "View", "UnionDoc", "init_beanie", "PydanticObjectId", "BeanieObjectId", "Indexed", "TimeSeriesConfig", "Granularity", "SortDirection", "MergeStrategy", # Actions "before_event", "after_event", "Insert", "Replace", "Save", "SaveChanges", "ValidateOnSave", "Delete", "Before", "After", "Update", # Bulk Write "BulkWriter", # Migrations "iterative_migration", "free_fall_migration", # Relations "Link", "BackLink", "WriteRules", "DeleteRules", # Custom Types "DecimalAnnotation", "BsonBinary", # UpdateResponse "UpdateResponse", ] python-beanie-1.29.0/beanie/exceptions.py000066400000000000000000000016551473701376500203470ustar00rootroot00000000000000class WrongDocumentUpdateStrategy(Exception): pass class DocumentNotFound(Exception): pass class DocumentAlreadyCreated(Exception): pass class DocumentWasNotSaved(Exception): pass class CollectionWasNotInitialized(Exception): pass class MigrationException(Exception): pass class ReplaceError(Exception): pass class StateManagementIsTurnedOff(Exception): pass class StateNotSaved(Exception): pass class RevisionIdWasChanged(Exception): pass class NotSupported(Exception): pass class MongoDBVersionError(Exception): pass class ViewWasNotInitialized(Exception): pass class ViewHasNoSettings(Exception): pass class UnionHasNoRegisteredDocs(Exception): pass class UnionDocNotInited(Exception): pass class DocWasNotRegisteredInUnionClass(Exception): pass class Deprecation(Exception): pass class ApplyChangesException(Exception): pass python-beanie-1.29.0/beanie/executors/000077500000000000000000000000001473701376500176265ustar00rootroot00000000000000python-beanie-1.29.0/beanie/executors/__init__.py000066400000000000000000000000001473701376500217250ustar00rootroot00000000000000python-beanie-1.29.0/beanie/executors/migrate.py000066400000000000000000000145021473701376500216320ustar00rootroot00000000000000import asyncio import logging import os import shutil from datetime import datetime from pathlib import Path from typing import Any import click import toml from beanie.migrations import template from beanie.migrations.database import DBHandler from beanie.migrations.models import RunningDirections, RunningMode from beanie.migrations.runner import MigrationNode logging.basicConfig(format="%(message)s", level=logging.INFO) class MigrationSettings: def __init__(self, **kwargs: Any): self.direction = ( kwargs.get("direction") or self.get_env_value("direction") or self.get_from_toml("direction") or RunningDirections.FORWARD ) self.distance = int( kwargs.get("distance") or self.get_env_value("distance") or self.get_from_toml("distance") or 0 ) self.connection_uri = str( kwargs.get("connection_uri") or self.get_env_value("connection_uri") or self.get_from_toml("connection_uri") ) self.database_name = str( kwargs.get("database_name") or self.get_env_value("database_name") or self.get_from_toml("database_name") ) self.path = Path( kwargs.get("path") or self.get_env_value("path") or self.get_from_toml("path") ) self.allow_index_dropping = bool( kwargs.get("allow_index_dropping") or self.get_env_value("allow_index_dropping") or self.get_from_toml("allow_index_dropping") or False ) self.use_transaction = bool(kwargs.get("use_transaction")) @staticmethod def get_env_value(field_name) -> Any: if field_name == "connection_uri": value = ( os.environ.get("BEANIE_URI") or os.environ.get("BEANIE_CONNECTION_URI") or os.environ.get("BEANIE_CONNECTION_STRING") or os.environ.get("BEANIE_MONGODB_DSN") or os.environ.get("BEANIE_MONGODB_URI") or os.environ.get("beanie_uri") or os.environ.get("beanie_connection_uri") or os.environ.get("beanie_connection_string") or os.environ.get("beanie_mongodb_dsn") or os.environ.get("beanie_mongodb_uri") ) elif field_name == "database_name": value = ( os.environ.get("BEANIE_DB") or os.environ.get("BEANIE_DB_NAME") or os.environ.get("BEANIE_DATABASE_NAME") or os.environ.get("beanie_db") or os.environ.get("beanie_db_name") or os.environ.get("beanie_database_name") ) else: value = os.environ.get( f"BEANIE_{field_name.upper()}" ) or os.environ.get(f"beanie_{field_name.lower()}") return value @staticmethod def get_from_toml(field_name) -> Any: path = Path("pyproject.toml") if path.is_file(): val = ( toml.load(path) .get("tool", {}) .get("beanie", {}) .get("migrations", {}) ) else: val = {} return val.get(field_name) @click.group() def migrations(): pass async def run_migrate(settings: MigrationSettings): DBHandler.set_db(settings.connection_uri, settings.database_name) root = await MigrationNode.build(settings.path) mode = RunningMode( direction=settings.direction, distance=settings.distance ) await root.run( mode=mode, allow_index_dropping=settings.allow_index_dropping, use_transaction=settings.use_transaction, ) @migrations.command() @click.option( "--forward", "direction", required=False, flag_value="FORWARD", help="Roll the migrations forward. This is default", ) @click.option( "--backward", "direction", required=False, flag_value="BACKWARD", help="Roll the migrations backward", ) @click.option( "-d", "--distance", required=False, help="How many migrations should be done since the current? " "0 - all the migrations. Default is 0", ) @click.option( "-uri", "--connection-uri", required=False, type=str, help="MongoDB connection URI", ) @click.option( "-db", "--database_name", required=False, type=str, help="DataBase name" ) @click.option( "-p", "--path", required=False, type=str, help="Path to the migrations directory", ) @click.option( "--allow-index-dropping/--forbid-index-dropping", required=False, default=False, help="if allow-index-dropping is set, Beanie will drop indexes from your collection", ) @click.option( "--use-transaction/--no-use-transaction", required=False, default=True, help="Enable or disable the use of transactions during migration. " "When enabled (--use-transaction), Beanie uses transactions for migration, " "which necessitates a replica set. When disabled (--no-use-transaction), " "migrations occur without transactions.", ) def migrate( direction, distance, connection_uri, database_name, path, allow_index_dropping, use_transaction, ): settings_kwargs = {} if direction: settings_kwargs["direction"] = direction if distance: settings_kwargs["distance"] = distance if connection_uri: settings_kwargs["connection_uri"] = connection_uri if database_name: settings_kwargs["database_name"] = database_name if path: settings_kwargs["path"] = path if allow_index_dropping: settings_kwargs["allow_index_dropping"] = allow_index_dropping settings_kwargs["use_transaction"] = use_transaction settings = MigrationSettings(**settings_kwargs) asyncio.run(run_migrate(settings)) @migrations.command() @click.option("-n", "--name", required=True, type=str, help="Migration name") @click.option( "-p", "--path", required=True, type=str, help="Path to the migrations directory", ) def new_migration(name, path): path = Path(path) ts_string = datetime.now().strftime("%Y%m%d%H%M%S") file_name = f"{ts_string}_{name}.py" shutil.copy(template.__file__, path / file_name) if __name__ == "__main__": migrations() python-beanie-1.29.0/beanie/migrations/000077500000000000000000000000001473701376500177615ustar00rootroot00000000000000python-beanie-1.29.0/beanie/migrations/__init__.py000066400000000000000000000000001473701376500220600ustar00rootroot00000000000000python-beanie-1.29.0/beanie/migrations/controllers/000077500000000000000000000000001473701376500223275ustar00rootroot00000000000000python-beanie-1.29.0/beanie/migrations/controllers/__init__.py000066400000000000000000000000001473701376500244260ustar00rootroot00000000000000python-beanie-1.29.0/beanie/migrations/controllers/base.py000066400000000000000000000005701473701376500236150ustar00rootroot00000000000000from abc import ABC, abstractmethod from typing import List, Type from beanie.odm.documents import Document class BaseMigrationController(ABC): def __init__(self, function): self.function = function @abstractmethod async def run(self, session): pass @property @abstractmethod def models(self) -> List[Type[Document]]: pass python-beanie-1.29.0/beanie/migrations/controllers/free_fall.py000066400000000000000000000017041473701376500246220ustar00rootroot00000000000000from inspect import signature from typing import Any, List, Type from beanie.migrations.controllers.base import BaseMigrationController from beanie.odm.documents import Document def free_fall_migration(document_models: List[Type[Document]]): class FreeFallMigrationController(BaseMigrationController): def __init__(self, function): self.function = function self.function_signature = signature(function) self.document_models = document_models def __call__(self, *args: Any, **kwargs: Any): pass @property def models(self) -> List[Type[Document]]: return self.document_models async def run(self, session): function_kwargs = {"session": session} if "self" in self.function_signature.parameters: function_kwargs["self"] = None await self.function(**function_kwargs) return FreeFallMigrationController python-beanie-1.29.0/beanie/migrations/controllers/iterative.py000066400000000000000000000115131473701376500246760ustar00rootroot00000000000000import asyncio from inspect import isclass, signature from typing import Any, List, Optional, Type, Union from beanie.migrations.controllers.base import BaseMigrationController from beanie.migrations.utils import update_dict from beanie.odm.documents import Document from beanie.odm.utils.pydantic import IS_PYDANTIC_V2, parse_model class DummyOutput: def __init__(self): super(DummyOutput, self).__setattr__("_internal_structure_dict", {}) def __setattr__(self, key, value): self._internal_structure_dict[key] = value def __getattr__(self, item): try: return self._internal_structure_dict[item] except KeyError: self._internal_structure_dict[item] = DummyOutput() return self._internal_structure_dict[item] def dict(self, to_parse: Optional[Union[dict, "DummyOutput"]] = None): if to_parse is None: to_parse = self input_dict = ( to_parse._internal_structure_dict if isinstance(to_parse, DummyOutput) else to_parse ) result_dict = {} for key, value in input_dict.items(): if isinstance(value, (DummyOutput, dict)): result_dict[key] = self.dict(to_parse=value) else: result_dict[key] = value return result_dict def iterative_migration( document_models: Optional[List[Type[Document]]] = None, batch_size: int = 10000, ): class IterativeMigration(BaseMigrationController): def __init__(self, function): self.function = function self.function_signature = signature(function) input_signature = self.function_signature.parameters.get( "input_document" ) if input_signature is None: raise RuntimeError("input_signature must not be None") self.input_document_model: Type[Document] = ( input_signature.annotation ) output_signature = self.function_signature.parameters.get( "output_document" ) if output_signature is None: raise RuntimeError("output_signature must not be None") self.output_document_model: Type[Document] = ( output_signature.annotation ) if ( not isclass(self.input_document_model) or not issubclass(self.input_document_model, Document) or not isclass(self.output_document_model) or not issubclass(self.output_document_model, Document) ): raise TypeError( "input_document and output_document " "must have annotation of Document subclass" ) self.batch_size = batch_size def __call__(self, *args: Any, **kwargs: Any): pass @property def models(self) -> List[Type[Document]]: preset_models = document_models if preset_models is None: preset_models = [] return preset_models + [ self.input_document_model, self.output_document_model, ] async def run(self, session): output_documents = [] all_migration_ops = [] async for input_document in self.input_document_model.find_all( session=session ): output = DummyOutput() function_kwargs = { "input_document": input_document, "output_document": output, } if "self" in self.function_signature.parameters: function_kwargs["self"] = None await self.function(**function_kwargs) output_dict = ( input_document.dict() if not IS_PYDANTIC_V2 else input_document.model_dump() ) update_dict(output_dict, output.dict()) output_document = parse_model( self.output_document_model, output_dict ) output_documents.append(output_document) if len(output_documents) == self.batch_size: all_migration_ops.append( self.output_document_model.replace_many( documents=output_documents, session=session ) ) output_documents = [] if output_documents: all_migration_ops.append( self.output_document_model.replace_many( documents=output_documents, session=session ) ) await asyncio.gather(*all_migration_ops) return IterativeMigration python-beanie-1.29.0/beanie/migrations/database.py000066400000000000000000000006351473701376500221030ustar00rootroot00000000000000import motor.motor_asyncio class DBHandler: @classmethod def set_db(cls, uri, db_name): cls.client = motor.motor_asyncio.AsyncIOMotorClient(uri) cls.database = cls.client[db_name] @classmethod def get_cli(cls): return cls.client if hasattr(cls, "client") else None @classmethod def get_db(cls): return cls.database if hasattr(cls, "database") else None python-beanie-1.29.0/beanie/migrations/models.py000066400000000000000000000012221473701376500216130ustar00rootroot00000000000000from datetime import datetime from enum import Enum from typing import List, Optional from pydantic import Field from pydantic.main import BaseModel from beanie.odm.documents import Document class MigrationLog(Document): ts: datetime = Field(default_factory=datetime.now) name: str is_current: bool class Settings: name = "migrations_log" class RunningDirections(str, Enum): FORWARD = "FORWARD" BACKWARD = "BACKWARD" class RunningMode(BaseModel): direction: RunningDirections distance: int = 0 class ParsedMigrations(BaseModel): path: str names: List[str] current: Optional[MigrationLog] = None python-beanie-1.29.0/beanie/migrations/runner.py000066400000000000000000000207121473701376500216460ustar00rootroot00000000000000import logging import types from importlib.machinery import SourceFileLoader from pathlib import Path from typing import List, Optional, Type from motor.motor_asyncio import AsyncIOMotorClientSession, AsyncIOMotorDatabase from beanie.migrations.controllers.iterative import BaseMigrationController from beanie.migrations.database import DBHandler from beanie.migrations.models import ( MigrationLog, RunningDirections, RunningMode, ) from beanie.odm.documents import Document from beanie.odm.utils.init import init_beanie logger = logging.getLogger(__name__) class MigrationNode: def __init__( self, name: str, forward_class: Optional[Type[Document]] = None, backward_class: Optional[Type[Document]] = None, next_migration: Optional["MigrationNode"] = None, prev_migration: Optional["MigrationNode"] = None, ): """ Node of the migration linked list :param name: name of the migration :param forward_class: Forward class of the migration :param backward_class: Backward class of the migration :param next_migration: link to the next migration :param prev_migration: link to the previous migration """ self.name = name self.forward_class = forward_class self.backward_class = backward_class self.next_migration = next_migration self.prev_migration = prev_migration @staticmethod async def clean_current_migration(): await MigrationLog.find( {"is_current": True}, ).update({"$set": {"is_current": False}}) async def update_current_migration(self): """ Set sel as a current migration :return: """ await self.clean_current_migration() await MigrationLog(is_current=True, name=self.name).insert() async def run( self, mode: RunningMode, allow_index_dropping: bool, use_transaction: bool, ): """ Migrate :param mode: RunningMode :param allow_index_dropping: if index dropping is allowed :return: None """ if mode.direction == RunningDirections.FORWARD: migration_node = self.next_migration if migration_node is None: return None if mode.distance == 0: logger.info("Running migrations forward without limit") while True: await migration_node.run_forward( allow_index_dropping=allow_index_dropping, use_transaction=use_transaction, ) migration_node = migration_node.next_migration if migration_node is None: break else: logger.info(f"Running {mode.distance} migrations forward") for i in range(mode.distance): await migration_node.run_forward( allow_index_dropping=allow_index_dropping, use_transaction=use_transaction, ) migration_node = migration_node.next_migration if migration_node is None: break elif mode.direction == RunningDirections.BACKWARD: migration_node = self if mode.distance == 0: logger.info("Running migrations backward without limit") while True: await migration_node.run_backward( allow_index_dropping=allow_index_dropping, use_transaction=use_transaction, ) migration_node = migration_node.prev_migration if migration_node is None: break else: logger.info(f"Running {mode.distance} migrations backward") for i in range(mode.distance): await migration_node.run_backward( allow_index_dropping=allow_index_dropping, use_transaction=use_transaction, ) migration_node = migration_node.prev_migration if migration_node is None: break async def run_forward( self, allow_index_dropping: bool, use_transaction: bool ): if self.forward_class is not None: await self.run_migration_class( self.forward_class, allow_index_dropping=allow_index_dropping, use_transaction=use_transaction, ) await self.update_current_migration() async def run_backward( self, allow_index_dropping: bool, use_transaction: bool ): if self.backward_class is not None: await self.run_migration_class( self.backward_class, allow_index_dropping=allow_index_dropping, use_transaction=use_transaction, ) if self.prev_migration is not None: await self.prev_migration.update_current_migration() else: await self.clean_current_migration() async def run_migration_class( self, cls: Type, allow_index_dropping: bool, use_transaction: bool ): """ Run Backward or Forward migration class :param cls: :param allow_index_dropping: if index dropping is allowed :return: """ migrations = [ getattr(cls, migration) for migration in dir(cls) if isinstance(getattr(cls, migration), BaseMigrationController) ] client = DBHandler.get_cli() db = DBHandler.get_db() if client is None: raise RuntimeError("client must not be None") async with await client.start_session() as s: if use_transaction: async with s.start_transaction(): await self.run_migrations( migrations, db, allow_index_dropping, s ) else: await self.run_migrations( migrations, db, allow_index_dropping, s ) async def run_migrations( self, migrations: List[BaseMigrationController], db: AsyncIOMotorDatabase, allow_index_dropping: bool, session: AsyncIOMotorClientSession, ) -> None: for migration in migrations: for model in migration.models: await init_beanie( database=db, document_models=[model], # type: ignore allow_index_dropping=allow_index_dropping, ) # TODO this is slow logger.info( f"Running migration {migration.function.__name__} " f"from module {self.name}" ) await migration.run(session=session) @classmethod async def build(cls, path: Path): """ Build the migrations linked list :param path: Relative path to the migrations directory :return: """ logger.info("Building migration list") names = [] for modulepath in path.glob("*.py"): names.append(modulepath.name) names.sort() db = DBHandler.get_db() await init_beanie( database=db, document_models=[MigrationLog], # type: ignore ) current_migration = await MigrationLog.find_one({"is_current": True}) root_migration_node = cls("root") prev_migration_node = root_migration_node for name in names: loader = SourceFileLoader( (path / name).stem, str((path / name).absolute()) ) module = types.ModuleType(loader.name) loader.exec_module(module) forward_class = getattr(module, "Forward", None) backward_class = getattr(module, "Backward", None) migration_node = cls( name=name, prev_migration=prev_migration_node, forward_class=forward_class, backward_class=backward_class, ) prev_migration_node.next_migration = migration_node prev_migration_node = migration_node if ( current_migration is not None and current_migration.name == name ): root_migration_node = migration_node return root_migration_node python-beanie-1.29.0/beanie/migrations/template.py000066400000000000000000000000511473701376500221420ustar00rootroot00000000000000class Forward: ... class Backward: ... python-beanie-1.29.0/beanie/migrations/utils.py000066400000000000000000000002621473701376500214730ustar00rootroot00000000000000def update_dict(d, u): for k, v in u.items(): if isinstance(v, dict): d[k] = update_dict(d.get(k, {}), v) else: d[k] = v return d python-beanie-1.29.0/beanie/odm/000077500000000000000000000000001473701376500163645ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/__init__.py000066400000000000000000000000001473701376500204630ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/actions.py000066400000000000000000000155501473701376500204040ustar00rootroot00000000000000import asyncio import inspect from enum import Enum from functools import wraps from typing import ( TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar, Union, ) from typing_extensions import ParamSpec if TYPE_CHECKING: from beanie.odm.documents import AsyncDocMethod, DocType, Document P = ParamSpec("P") R = TypeVar("R") class EventTypes(str, Enum): INSERT = "INSERT" REPLACE = "REPLACE" SAVE = "SAVE" SAVE_CHANGES = "SAVE_CHANGES" VALIDATE_ON_SAVE = "VALIDATE_ON_SAVE" DELETE = "DELETE" UPDATE = "UPDATE" Insert = EventTypes.INSERT Replace = EventTypes.REPLACE Save = EventTypes.SAVE SaveChanges = EventTypes.SAVE_CHANGES ValidateOnSave = EventTypes.VALIDATE_ON_SAVE Delete = EventTypes.DELETE Update = EventTypes.UPDATE class ActionDirections(str, Enum): # TODO think about this name BEFORE = "BEFORE" AFTER = "AFTER" Before = ActionDirections.BEFORE After = ActionDirections.AFTER class ActionRegistry: _actions: Dict[ Type["Document"], Dict[EventTypes, Dict[ActionDirections, List[Callable[..., Any]]]], ] = {} @classmethod def clean_actions(cls, document_class: Type["Document"]): if cls._actions.get(document_class) is not None: del cls._actions[document_class] @classmethod def add_action( cls, document_class: Type["Document"], event_types: List[EventTypes], action_direction: ActionDirections, funct: Callable, ): """ Add action to the action registry :param document_class: document class :param event_types: List[EventTypes] :param action_direction: ActionDirections - before or after :param funct: Callable - function """ if cls._actions.get(document_class) is None: cls._actions[document_class] = { action_type: { action_direction: [] for action_direction in ActionDirections } for action_type in EventTypes } for event_type in event_types: cls._actions[document_class][event_type][action_direction].append( funct ) @classmethod def get_action_list( cls, document_class: Type["Document"], event_type: EventTypes, action_direction: ActionDirections, ) -> List[Callable]: """ Get stored action list :param document_class: Type - document class :param event_type: EventTypes - type of needed event :param action_direction: ActionDirections - before or after :return: List[Callable] - list of stored methods """ if document_class not in cls._actions: return [] return cls._actions[document_class][event_type][action_direction] @classmethod async def run_actions( cls, instance: "Document", event_type: EventTypes, action_direction: ActionDirections, exclude: List[Union[ActionDirections, str]], ): """ Run actions :param instance: Document - object of the Document subclass :param event_type: EventTypes - event types :param action_direction: ActionDirections - before or after """ if action_direction in exclude: return document_class = instance.__class__ actions_list = cls.get_action_list( document_class, event_type, action_direction ) coros = [] for action in actions_list: if action.__name__ in exclude: continue if inspect.iscoroutinefunction(action): coros.append(action(instance)) elif inspect.isfunction(action): action(instance) await asyncio.gather(*coros) # `Any` because there is arbitrary attribute assignment on this type F = TypeVar("F", bound=Any) def register_action( event_types: Tuple[Union[List[EventTypes], EventTypes], ...], action_direction: ActionDirections, ) -> Callable[[F], F]: """ Decorator. Base registration method. Used inside `before_event` and `after_event` :param event_types: Union[List[EventTypes], EventTypes] - event types :param action_direction: ActionDirections - before or after :return: """ final_event_types = [] for event_type in event_types: if isinstance(event_type, list): final_event_types.extend(event_type) else: final_event_types.append(event_type) def decorator(f: F) -> F: f.has_action = True f.event_types = final_event_types f.action_direction = action_direction return f return decorator def before_event( *args: Union[List[EventTypes], EventTypes], ) -> Callable[[F], F]: """ Decorator. It adds action, which should run before mentioned one or many events happen :param args: Union[List[EventTypes], EventTypes] - event types :return: None """ return register_action( action_direction=ActionDirections.BEFORE, event_types=args ) def after_event( *args: Union[List[EventTypes], EventTypes], ) -> Callable[[F], F]: """ Decorator. It adds action, which should run after mentioned one or many events happen :param args: Union[List[EventTypes], EventTypes] - event types :return: None """ return register_action( action_direction=ActionDirections.AFTER, event_types=args ) def wrap_with_actions( event_type: EventTypes, ) -> Callable[ ["AsyncDocMethod[DocType, P, R]"], "AsyncDocMethod[DocType, P, R]" ]: """ Helper function to wrap Document methods with before and after event listeners :param event_type: EventTypes - event types :return: None """ def decorator( f: "AsyncDocMethod[DocType, P, R]", ) -> "AsyncDocMethod[DocType, P, R]": @wraps(f) async def wrapper( self: "DocType", *args: P.args, skip_actions: Optional[List[Union[ActionDirections, str]]] = None, **kwargs: P.kwargs, ) -> R: if skip_actions is None: skip_actions = [] await ActionRegistry.run_actions( self, event_type=event_type, action_direction=ActionDirections.BEFORE, exclude=skip_actions, ) result = await f( self, *args, skip_actions=skip_actions, # type: ignore[arg-type] **kwargs, ) await ActionRegistry.run_actions( self, event_type=event_type, action_direction=ActionDirections.AFTER, exclude=skip_actions, ) return result return wrapper return decorator python-beanie-1.29.0/beanie/odm/bulk.py000066400000000000000000000142401473701376500176740ustar00rootroot00000000000000from __future__ import annotations from types import TracebackType from typing import TYPE_CHECKING, Any, List, Mapping, Optional, Type, Union from motor.motor_asyncio import AsyncIOMotorClientSession from pymongo import ( DeleteMany, DeleteOne, InsertOne, ReplaceOne, UpdateMany, UpdateOne, ) from pymongo.results import BulkWriteResult if TYPE_CHECKING: from beanie import Document from beanie.odm.union_doc import UnionDoc _WriteOp = Union[ InsertOne[Mapping[str, Any]], DeleteOne, DeleteMany, ReplaceOne[Mapping[str, Any]], UpdateOne, UpdateMany, ] class BulkWriter: """ A utility class for managing and executing bulk operations. This class facilitates the efficient execution of multiple database operations (e.g., inserts, updates, deletes, replacements) in a single batch. It supports asynchronous context management and ensures that all queued operations are committed upon exiting the context. Attributes: session Optional[AsyncIOMotorClientSession]: The motor session used for transactional operations. Defaults to None, meaning no session is used. ordered Optional[bool]: Specifies whether operations are executed sequentially (default) or in parallel. - If True, operations are performed serially, stopping at the first failure. - If False, operations may be executed in arbitrary order, and all operations are attempted regardless of individual failures. bypass_document_validation Optional[bool]: If True, document-level validation is bypassed for all operations in the bulk write. This applies to MongoDB's schema validation rules, allowing documents that do not meet validation criteria to be inserted or modified. Defaults to False. comment Optional[Any]: A user-provided comment attached to the bulk operation command, useful for auditing and debugging purposes. operations List[Union[DeleteMany, DeleteOne, InsertOne, ReplaceOne, UpdateMany, UpdateOne]]: A list of MongoDB operations queued for bulk execution. object_class Type[Union[Document, UnionDoc]]: The document model class associated with the operations. Parameters: session Optional[AsyncIOMotorClientSession]: The motor session for transaction support. Defaults to None (no session). ordered Optional[bool]: Specifies whether operations are executed in sequence (True) or in parallel (False). Defaults to True. bypass_document_validation Optional[bool]: Allows the bulk operation to bypass document-level validation. This is particularly useful when working with schemas that are being phased in or for bulk imports where strict validation may not be necessary. Defaults to False. comment Optional[Any]: A custom comment attached to the bulk operation. Defaults to None. object_class Type[Union[Document, UnionDoc]]: The document model class associated with the operations. """ def __init__( self, session: Optional[AsyncIOMotorClientSession] = None, ordered: bool = True, object_class: Optional[Type[Union[Document, UnionDoc]]] = None, bypass_document_validation: bool = False, comment: Optional[Any] = None, ) -> None: self.operations: List[_WriteOp] = [] self.session = session self.ordered = ordered self.object_class = object_class self.bypass_document_validation = bypass_document_validation self.comment = comment self._collection_name: str async def __aenter__(self) -> "BulkWriter": return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType], ) -> None: if exc_type is None: await self.commit() async def commit(self) -> Optional[BulkWriteResult]: """ Commit all queued operations to the database. Executes all queued operations in a single bulk write request. If there are no operations to commit, it returns ``None``. :return: The result of the bulk write operation if operations are committed. Returns ``None`` if there are no operations to execute. :rtype: Optional[BulkWriteResult] :raises ValueError: If the object_class is not specified before committing. """ if not self.operations: return None if not self.object_class: raise ValueError( "The document model class must be specified before committing operations." ) return await self.object_class.get_motor_collection().bulk_write( self.operations, ordered=self.ordered, bypass_document_validation=self.bypass_document_validation, session=self.session, comment=self.comment, ) def add_operation( self, object_class: Type[Union[Document, UnionDoc]], operation: _WriteOp, ): """ Add an operation to the queue. This method adds a MongoDB operation to the BulkWriter's operation queue. All operations in the queue must belong to the same collection. :param object_class: Type[Union[Document, UnionDoc]] The document model class associated with the operation. :param operation: Union[DeleteMany, DeleteOne, InsertOne, ReplaceOne, UpdateMany, UpdateOne] The MongoDB operation to add to the queue. :raises ValueError: If the collection differs from the one already associated with the BulkWriter. """ if self.object_class is None: self.object_class = object_class self._collection_name = object_class.get_collection_name() else: if object_class.get_collection_name() != self._collection_name: raise ValueError( "All the operations should be for a same collection name" ) self.operations.append(operation) python-beanie-1.29.0/beanie/odm/cache.py000066400000000000000000000024441473701376500200050ustar00rootroot00000000000000import collections import datetime from datetime import timedelta, timezone from typing import Any, Optional from pydantic import BaseModel, Field class CachedItem(BaseModel): timestamp: datetime.datetime = Field( default_factory=lambda: datetime.datetime.now(tz=timezone.utc) ) value: Any class LRUCache: def __init__(self, capacity: int, expiration_time: timedelta): self.capacity: int = capacity self.expiration_time: timedelta = expiration_time self.cache: collections.OrderedDict = collections.OrderedDict() def get(self, key) -> Optional[CachedItem]: try: item: CachedItem = self.cache.pop(key) if ( datetime.datetime.now(tz=timezone.utc) - item.timestamp > self.expiration_time ): return None self.cache[key] = item return item.value except KeyError: return None def set(self, key, value) -> None: try: self.cache.pop(key) except KeyError: if len(self.cache) >= self.capacity: self.cache.popitem(last=False) self.cache[key] = CachedItem(value=value) @staticmethod def create_key(*args): return str(args) # TODO think about this python-beanie-1.29.0/beanie/odm/custom_types/000077500000000000000000000000001473701376500211225ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/custom_types/__init__.py000066400000000000000000000003561473701376500232370ustar00rootroot00000000000000from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 if IS_PYDANTIC_V2: from beanie.odm.custom_types.decimal import DecimalAnnotation else: from decimal import Decimal as DecimalAnnotation __all__ = [ "DecimalAnnotation", ] python-beanie-1.29.0/beanie/odm/custom_types/bson/000077500000000000000000000000001473701376500220635ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/custom_types/bson/__init__.py000066400000000000000000000000001473701376500241620ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/custom_types/bson/binary.py000066400000000000000000000010441473701376500237200ustar00rootroot00000000000000from typing import Any import bson import pydantic from typing_extensions import Annotated from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 def _to_bson_binary(value: Any) -> bson.Binary: return value if isinstance(value, bson.Binary) else bson.Binary(value) if IS_PYDANTIC_V2: BsonBinary = Annotated[ bson.Binary, pydantic.PlainValidator(_to_bson_binary) ] else: class BsonBinary(bson.Binary): # type: ignore[no-redef] @classmethod def __get_validators__(cls): yield _to_bson_binary python-beanie-1.29.0/beanie/odm/custom_types/decimal.py000066400000000000000000000003721473701376500230740ustar00rootroot00000000000000import decimal import bson import pydantic from typing_extensions import Annotated DecimalAnnotation = Annotated[ decimal.Decimal, pydantic.BeforeValidator( lambda v: v.to_decimal() if isinstance(v, bson.Decimal128) else v ), ] python-beanie-1.29.0/beanie/odm/custom_types/re.py000066400000000000000000000010671473701376500221060ustar00rootroot00000000000000import re import bson import pydantic from typing_extensions import Annotated from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 def _to_bson_regex(v): return v.try_compile() if isinstance(v, bson.Regex) else v if IS_PYDANTIC_V2: Pattern = Annotated[ re.Pattern, pydantic.BeforeValidator( lambda v: v.try_compile() if isinstance(v, bson.Regex) else v ), ] else: class Pattern(bson.Regex): # type: ignore[no-redef] @classmethod def __get_validators__(cls): yield _to_bson_regex python-beanie-1.29.0/beanie/odm/documents.py000066400000000000000000001363231473701376500207470ustar00rootroot00000000000000import asyncio import warnings from datetime import datetime, timezone from enum import Enum from typing import ( TYPE_CHECKING, Any, Callable, ClassVar, Coroutine, Dict, Iterable, List, Mapping, Optional, Tuple, Type, TypeVar, Union, ) from uuid import UUID, uuid4 from bson import DBRef, ObjectId from lazy_model import LazyModel from motor.motor_asyncio import AsyncIOMotorClientSession from pydantic import ( ConfigDict, Field, PrivateAttr, ValidationError, ) from pydantic.class_validators import root_validator from pydantic.main import BaseModel from pymongo import InsertOne from pymongo.errors import DuplicateKeyError from pymongo.results import ( DeleteResult, InsertManyResult, ) from typing_extensions import Concatenate, ParamSpec, Self, TypeAlias from beanie.exceptions import ( CollectionWasNotInitialized, DocumentNotFound, DocumentWasNotSaved, NotSupported, ReplaceError, RevisionIdWasChanged, ) from beanie.odm.actions import ( ActionDirections, EventTypes, wrap_with_actions, ) from beanie.odm.bulk import BulkWriter from beanie.odm.cache import LRUCache from beanie.odm.enums import SortDirection from beanie.odm.fields import ( BackLink, DeleteRules, ExpressionField, Link, LinkInfo, LinkTypes, PydanticObjectId, WriteRules, ) from beanie.odm.interfaces.aggregate import AggregateInterface from beanie.odm.interfaces.detector import ModelType from beanie.odm.interfaces.find import FindInterface from beanie.odm.interfaces.getters import OtherGettersInterface from beanie.odm.interfaces.inheritance import InheritanceInterface from beanie.odm.interfaces.setters import SettersInterface from beanie.odm.models import ( InspectionError, InspectionResult, InspectionStatuses, ) from beanie.odm.operators.find.comparison import In from beanie.odm.operators.update.general import ( CurrentDate, Inc, SetRevisionId, Unset, ) from beanie.odm.operators.update.general import ( Set as SetOperator, ) from beanie.odm.queries.find import FindMany, FindOne from beanie.odm.queries.update import UpdateMany, UpdateResponse from beanie.odm.settings.document import DocumentSettings from beanie.odm.utils.dump import get_dict, get_top_level_nones from beanie.odm.utils.parsing import apply_changes, merge_models from beanie.odm.utils.pydantic import ( IS_PYDANTIC_V2, get_extra_field_info, get_field_type, get_model_dump, get_model_fields, parse_model, parse_object_as, ) from beanie.odm.utils.self_validation import validate_self_before from beanie.odm.utils.state import ( previous_saved_state_needed, save_state_after, saved_state_needed, ) from beanie.odm.utils.typing import extract_id_class if IS_PYDANTIC_V2: from pydantic import model_validator if TYPE_CHECKING: from beanie.odm.views import View FindType = TypeVar("FindType", bound=Union["Document", "View"]) DocType = TypeVar("DocType", bound="Document") P = ParamSpec("P") R = TypeVar("R") # can describe both sync and async, where R itself is a coroutine AnyDocMethod: TypeAlias = Callable[Concatenate[DocType, P], R] # describes only async AsyncDocMethod: TypeAlias = Callable[ Concatenate[DocType, P], Coroutine[Any, Any, R] ] DocumentProjectionType = TypeVar("DocumentProjectionType", bound=BaseModel) def json_schema_extra(schema: Dict[str, Any], model: Type["Document"]) -> None: # remove excluded fields from the json schema properties = schema.get("properties") if not properties: return for k, field in get_model_fields(model).items(): k = field.alias or k if k not in properties: continue field_info = field if IS_PYDANTIC_V2 else field.field_info if field_info.exclude: del properties[k] def document_alias_generator(s: str) -> str: if s == "id": return "_id" return s class MergeStrategy(str, Enum): local = "local" remote = "remote" class Document( LazyModel, SettersInterface, InheritanceInterface, FindInterface, AggregateInterface, OtherGettersInterface, ): """ Document Mapping class. Fields: - `id` - MongoDB document ObjectID "_id" field. Mapped to the PydanticObjectId class """ if IS_PYDANTIC_V2: model_config = ConfigDict( json_schema_extra=json_schema_extra, populate_by_name=True, alias_generator=document_alias_generator, ) else: class Config: json_encoders = {ObjectId: str} allow_population_by_field_name = True fields = {"id": "_id"} schema_extra = staticmethod(json_schema_extra) id: Optional[PydanticObjectId] = Field( default=None, description="MongoDB document ObjectID" ) # State revision_id: Optional[UUID] = Field(default=None, exclude=True) _saved_state: Optional[Dict[str, Any]] = PrivateAttr(default=None) _previous_saved_state: Optional[Dict[str, Any]] = PrivateAttr(default=None) # Relations _link_fields: ClassVar[Optional[Dict[str, LinkInfo]]] = None # Cache _cache: ClassVar[Optional[LRUCache]] = None # Settings _document_settings: ClassVar[Optional[DocumentSettings]] = None # Database _database_major_version: ClassVar[int] = 4 def __init__(self, *args: Any, **kwargs: Any) -> None: super(Document, self).__init__(*args, **kwargs) self.get_motor_collection() @classmethod def _fill_back_refs(cls, values): if cls._link_fields: for field_name, link_info in cls._link_fields.items(): if ( link_info.link_type in [LinkTypes.BACK_DIRECT, LinkTypes.OPTIONAL_BACK_DIRECT] and field_name not in values ): values[field_name] = BackLink[link_info.document_class]( link_info.document_class ) if ( link_info.link_type in [LinkTypes.BACK_LIST, LinkTypes.OPTIONAL_BACK_LIST] and field_name not in values ): values[field_name] = [ BackLink[link_info.document_class]( link_info.document_class ) ] return values if IS_PYDANTIC_V2: @model_validator(mode="before") def fill_back_refs(cls, values): return cls._fill_back_refs(values) else: @root_validator(pre=True) def fill_back_refs(cls, values): return cls._fill_back_refs(values) @classmethod async def get( cls: Type["DocType"], document_id: Any, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Optional["DocType"]: """ Get document by id, returns None if document does not exist :param document_id: PydanticObjectId - document id :param session: Optional[AsyncIOMotorClientSession] - motor session :param ignore_cache: bool - ignore cache (if it is turned on) :param **pymongo_kwargs: pymongo native parameters for find operation :return: Union["Document", None] """ if not isinstance( document_id, extract_id_class(get_field_type(get_model_fields(cls)["id"])), ): document_id = parse_object_as( get_field_type(get_model_fields(cls)["id"]), document_id ) return await cls.find_one( {"_id": document_id}, session=session, ignore_cache=ignore_cache, fetch_links=fetch_links, with_children=with_children, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) async def sync(self, merge_strategy: MergeStrategy = MergeStrategy.remote): """ Sync the document with the database :param merge_strategy: MergeStrategy - how to merge the document :return: None """ if ( merge_strategy == MergeStrategy.local and self.get_settings().use_state_management is False ): raise ValueError( "State management must be turned on to use local merge strategy" ) if self.id is None: raise DocumentWasNotSaved document = await self.find_one({"_id": self.id}) if document is None: raise DocumentNotFound if merge_strategy == MergeStrategy.local: original_changes = self.get_changes() new_state = document.get_saved_state() if new_state is None: raise DocumentWasNotSaved changes_to_apply = self._collect_updates( new_state, original_changes ) merge_models(self, document) apply_changes(changes_to_apply, self) elif merge_strategy == MergeStrategy.remote: merge_models(self, document) else: raise ValueError("Invalid merge strategy") @wrap_with_actions(EventTypes.INSERT) @save_state_after @validate_self_before async def insert( self: Self, *, link_rule: WriteRules = WriteRules.DO_NOTHING, session: Optional[AsyncIOMotorClientSession] = None, skip_actions: Optional[List[Union[ActionDirections, str]]] = None, ) -> Self: """ Insert the document (self) to the collection :return: self """ if self.get_settings().use_revision: self.revision_id = uuid4() if link_rule == WriteRules.WRITE: link_fields = self.get_link_fields() if link_fields is not None: for field_info in link_fields.values(): value = getattr(self, field_info.field_name) if field_info.link_type in [ LinkTypes.DIRECT, LinkTypes.OPTIONAL_DIRECT, ]: if isinstance(value, Document): await value.save( link_rule=WriteRules.WRITE, session=session ) if field_info.link_type in [ LinkTypes.LIST, LinkTypes.OPTIONAL_LIST, ]: if isinstance(value, List): await asyncio.gather( *[ obj.save( link_rule=WriteRules.WRITE, session=session, ) for obj in value if isinstance(obj, Document) ] ) result = await self.get_motor_collection().insert_one( get_dict( self, to_db=True, keep_nulls=self.get_settings().keep_nulls ), session=session, ) new_id = result.inserted_id if not isinstance( new_id, extract_id_class(get_field_type(get_model_fields(self)["id"])), ): new_id = parse_object_as( get_field_type(get_model_fields(self)["id"]), new_id ) self.id = new_id return self async def create( self: Self, session: Optional[AsyncIOMotorClientSession] = None, ) -> Self: """ The same as self.insert() :return: self """ return await self.insert(session=session) @classmethod async def insert_one( cls: Type[DocType], document: DocType, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional["BulkWriter"] = None, link_rule: WriteRules = WriteRules.DO_NOTHING, ) -> Optional[DocType]: """ Insert one document to the collection :param document: Document - document to insert :param session: AsyncIOMotorClientSession - motor session :param bulk_writer: "BulkWriter" - Beanie bulk writer :param link_rule: InsertRules - hot to manage link fields :return: DocType """ if not isinstance(document, cls): raise TypeError( "Inserting document must be of the original document class" ) if bulk_writer is None: return await document.insert(link_rule=link_rule, session=session) else: if link_rule == WriteRules.WRITE: raise NotSupported( "Cascade insert with bulk writing not supported" ) bulk_writer.add_operation( type(document), InsertOne( get_dict( document, to_db=True, keep_nulls=document.get_settings().keep_nulls, ) ), ) return None @classmethod async def insert_many( cls: Type[DocType], documents: Iterable[DocType], session: Optional[AsyncIOMotorClientSession] = None, link_rule: WriteRules = WriteRules.DO_NOTHING, **pymongo_kwargs: Any, ) -> InsertManyResult: """ Insert many documents to the collection :param documents: List["Document"] - documents to insert :param session: AsyncIOMotorClientSession - motor session :param link_rule: InsertRules - how to manage link fields :return: InsertManyResult """ if link_rule == WriteRules.WRITE: raise NotSupported( "Cascade insert not supported for insert many method" ) documents_list = [ get_dict( document, to_db=True, keep_nulls=document.get_settings().keep_nulls, ) for document in documents ] return await cls.get_motor_collection().insert_many( documents_list, session=session, **pymongo_kwargs ) @wrap_with_actions(EventTypes.REPLACE) @save_state_after @validate_self_before async def replace( self: Self, ignore_revision: bool = False, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, link_rule: WriteRules = WriteRules.DO_NOTHING, skip_actions: Optional[List[Union[ActionDirections, str]]] = None, ) -> Self: """ Fully update the document in the database :param session: Optional[AsyncIOMotorClientSession] - motor session. :param ignore_revision: bool - do force replace. Used when revision based protection is turned on. :param bulk_writer: "BulkWriter" - Beanie bulk writer :return: self """ if self.id is None: raise ValueError("Document must have an id") if bulk_writer is not None and link_rule != WriteRules.DO_NOTHING: raise NotSupported if link_rule == WriteRules.WRITE: link_fields = self.get_link_fields() if link_fields is not None: for field_info in link_fields.values(): value = getattr(self, field_info.field_name) if field_info.link_type in [ LinkTypes.DIRECT, LinkTypes.OPTIONAL_DIRECT, LinkTypes.BACK_DIRECT, LinkTypes.OPTIONAL_BACK_DIRECT, ]: if isinstance(value, Document): await value.replace( link_rule=link_rule, bulk_writer=bulk_writer, ignore_revision=ignore_revision, session=session, ) if field_info.link_type in [ LinkTypes.LIST, LinkTypes.OPTIONAL_LIST, LinkTypes.BACK_LIST, LinkTypes.OPTIONAL_BACK_LIST, ]: if isinstance(value, List): await asyncio.gather( *[ obj.replace( link_rule=link_rule, bulk_writer=bulk_writer, ignore_revision=ignore_revision, session=session, ) for obj in value if isinstance(obj, Document) ] ) use_revision_id = self.get_settings().use_revision find_query: Dict[str, Any] = {"_id": self.id} if use_revision_id and not ignore_revision: find_query["revision_id"] = self.revision_id self.revision_id = uuid4() try: await self.find_one(find_query).replace_one( self, session=session, bulk_writer=bulk_writer, ) except DocumentNotFound: if use_revision_id and not ignore_revision: raise RevisionIdWasChanged else: raise DocumentNotFound return self @wrap_with_actions(EventTypes.SAVE) @save_state_after @validate_self_before async def save( self: Self, session: Optional[AsyncIOMotorClientSession] = None, link_rule: WriteRules = WriteRules.DO_NOTHING, ignore_revision: bool = False, **kwargs: Any, ) -> Self: """ Update an existing model in the database or insert it if it does not yet exist. :param session: Optional[AsyncIOMotorClientSession] - motor session. :param link_rule: WriteRules - rules how to deal with links on writing :param ignore_revision: bool - do force save. :return: self """ if link_rule == WriteRules.WRITE: link_fields = self.get_link_fields() if link_fields is not None: for field_info in link_fields.values(): value = getattr(self, field_info.field_name) if field_info.link_type in [ LinkTypes.DIRECT, LinkTypes.OPTIONAL_DIRECT, LinkTypes.BACK_DIRECT, LinkTypes.OPTIONAL_BACK_DIRECT, ]: if isinstance(value, Document): await value.save( link_rule=link_rule, session=session ) if field_info.link_type in [ LinkTypes.LIST, LinkTypes.OPTIONAL_LIST, LinkTypes.BACK_LIST, LinkTypes.OPTIONAL_BACK_LIST, ]: if isinstance(value, List): await asyncio.gather( *[ obj.save( link_rule=link_rule, session=session ) for obj in value if isinstance(obj, Document) ] ) if self.get_settings().keep_nulls is False: return await self.update( SetOperator( get_dict( self, to_db=True, keep_nulls=self.get_settings().keep_nulls, ) ), Unset(get_top_level_nones(self)), session=session, ignore_revision=ignore_revision, upsert=True, **kwargs, ) else: return await self.update( SetOperator( get_dict( self, to_db=True, keep_nulls=self.get_settings().keep_nulls, ) ), session=session, ignore_revision=ignore_revision, upsert=True, **kwargs, ) @saved_state_needed @wrap_with_actions(EventTypes.SAVE_CHANGES) @validate_self_before async def save_changes( self: Self, ignore_revision: bool = False, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, skip_actions: Optional[List[Union[ActionDirections, str]]] = None, ) -> Optional[Self]: """ Save changes. State management usage must be turned on :param ignore_revision: bool - ignore revision id, if revision is turned on :param bulk_writer: "BulkWriter" - Beanie bulk writer :return: Optional[self] """ if not self.is_changed: return None changes = self.get_changes() if self.get_settings().keep_nulls is False: return await self.update( SetOperator(changes), Unset(get_top_level_nones(self)), ignore_revision=ignore_revision, session=session, bulk_writer=bulk_writer, ) else: return await self.set( changes, ignore_revision=ignore_revision, session=session, bulk_writer=bulk_writer, ) @classmethod async def replace_many( cls: Type[DocType], documents: List[DocType], session: Optional[AsyncIOMotorClientSession] = None, ) -> None: """ Replace list of documents :param documents: List["Document"] :param session: Optional[AsyncIOMotorClientSession] - motor session. :return: None """ ids_list = [document.id for document in documents] if await cls.find(In(cls.id, ids_list)).count() != len(ids_list): raise ReplaceError( "Some of the documents are not exist in the collection" ) async with BulkWriter(session=session) as bulk_writer: for document in documents: await document.replace( bulk_writer=bulk_writer, session=session ) @wrap_with_actions(EventTypes.UPDATE) @save_state_after async def update( self: Self, *args: Union[Dict[Any, Any], Mapping[Any, Any]], ignore_revision: bool = False, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, skip_actions: Optional[List[Union[ActionDirections, str]]] = None, skip_sync: Optional[bool] = None, **pymongo_kwargs: Any, ) -> Self: """ Partially update the document in the database :param args: *Union[dict, Mapping] - the modifications to apply. :param session: AsyncIOMotorClientSession - motor session. :param ignore_revision: bool - force update. Will update even if revision id is not the same, as stored :param bulk_writer: "BulkWriter" - Beanie bulk writer :param pymongo_kwargs: pymongo native parameters for update operation :return: self """ arguments: list[Any] = list(args) if skip_sync is not None: raise DeprecationWarning( "skip_sync parameter is not supported. The document get synced always using atomic operation." ) use_revision_id = self.get_settings().use_revision if self.id is not None: find_query: Dict[str, Any] = {"_id": self.id} else: find_query = {"_id": PydanticObjectId()} if use_revision_id and not ignore_revision: find_query["revision_id"] = self.revision_id if use_revision_id: new_revision_id = uuid4() arguments.append(SetRevisionId(new_revision_id)) try: result = await self.find_one(find_query).update( *arguments, session=session, response_type=UpdateResponse.NEW_DOCUMENT, bulk_writer=bulk_writer, **pymongo_kwargs, ) except DuplicateKeyError: raise RevisionIdWasChanged if bulk_writer is None: if use_revision_id and not ignore_revision and result is None: raise RevisionIdWasChanged merge_models(self, result) return self @classmethod def update_all( cls, *args: Union[dict, Mapping], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ) -> UpdateMany: """ Partially update all the documents :param args: *Union[dict, Mapping] - the modifications to apply. :param session: AsyncIOMotorClientSession - motor session. :param bulk_writer: "BulkWriter" - Beanie bulk writer :param **pymongo_kwargs: pymongo native parameters for find operation :return: UpdateMany query """ return cls.find_all().update_many( *args, session=session, bulk_writer=bulk_writer, **pymongo_kwargs ) def set( self: Self, expression: Dict[Union[ExpressionField, str, Any], Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, skip_sync: Optional[bool] = None, **kwargs: Any, ) -> Coroutine[None, None, Self]: """ Set values Example: ```python class Sample(Document): one: int await Document.find(Sample.one == 1).set({Sample.one: 100}) ``` Uses [Set operator](operators/update.md#set) :param expression: Dict[Union[ExpressionField, str, Any], Any] - keys and values to set :param session: Optional[AsyncIOMotorClientSession] - motor session :param bulk_writer: Optional[BulkWriter] - bulk writer :param skip_sync: bool - skip doc syncing. Available for the direct instances only :return: self """ return self.update( SetOperator(expression), session=session, bulk_writer=bulk_writer, skip_sync=skip_sync, **kwargs, ) def current_date( self: Self, expression: Dict[Union[datetime, ExpressionField, str], Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, skip_sync: Optional[bool] = None, **kwargs: Any, ) -> Coroutine[None, None, Self]: """ Set current date Uses [CurrentDate operator](operators/update.md#currentdate) :param expression: Dict[Union[datetime, ExpressionField, str], Any] :param session: Optional[AsyncIOMotorClientSession] - motor session :param bulk_writer: Optional[BulkWriter] - bulk writer :param skip_sync: bool - skip doc syncing. Available for the direct instances only :return: self """ return self.update( CurrentDate(expression), session=session, bulk_writer=bulk_writer, skip_sync=skip_sync, **kwargs, ) def inc( self: Self, expression: Dict[Union[ExpressionField, float, int, str], Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, skip_sync: Optional[bool] = None, **kwargs: Any, ) -> Coroutine[None, None, Self]: """ Increment Example: ```python class Sample(Document): one: int await Document.find(Sample.one == 1).inc({Sample.one: 100}) ``` Uses [Inc operator](operators/update.md#inc) :param expression: Dict[Union[ExpressionField, float, int, str], Any] :param session: Optional[AsyncIOMotorClientSession] - motor session :param bulk_writer: Optional[BulkWriter] - bulk writer :param skip_sync: bool - skip doc syncing. Available for the direct instances only :return: self """ return self.update( Inc(expression), session=session, bulk_writer=bulk_writer, skip_sync=skip_sync, **kwargs, ) @wrap_with_actions(EventTypes.DELETE) async def delete( self, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, link_rule: DeleteRules = DeleteRules.DO_NOTHING, skip_actions: Optional[List[Union[ActionDirections, str]]] = None, **pymongo_kwargs: Any, ) -> Optional[DeleteResult]: """ Delete the document :param session: Optional[AsyncIOMotorClientSession] - motor session. :param bulk_writer: "BulkWriter" - Beanie bulk writer :param link_rule: DeleteRules - rules for link fields :param **pymongo_kwargs: pymongo native parameters for delete operation :return: Optional[DeleteResult] - pymongo DeleteResult instance. """ if link_rule == DeleteRules.DELETE_LINKS: link_fields = self.get_link_fields() if link_fields is not None: for field_info in link_fields.values(): value = getattr(self, field_info.field_name) if field_info.link_type in [ LinkTypes.DIRECT, LinkTypes.OPTIONAL_DIRECT, LinkTypes.BACK_DIRECT, LinkTypes.OPTIONAL_BACK_DIRECT, ]: if isinstance(value, Document): await value.delete( link_rule=DeleteRules.DELETE_LINKS, **pymongo_kwargs, ) if field_info.link_type in [ LinkTypes.LIST, LinkTypes.OPTIONAL_LIST, LinkTypes.BACK_LIST, LinkTypes.OPTIONAL_BACK_LIST, ]: if isinstance(value, List): await asyncio.gather( *[ obj.delete( link_rule=DeleteRules.DELETE_LINKS, **pymongo_kwargs, ) for obj in value if isinstance(obj, Document) ] ) return await self.find_one({"_id": self.id}).delete( session=session, bulk_writer=bulk_writer, **pymongo_kwargs ) @classmethod async def delete_all( cls, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ) -> Optional[DeleteResult]: """ Delete all the documents :param session: Optional[AsyncIOMotorClientSession] - motor session. :param bulk_writer: "BulkWriter" - Beanie bulk writer :param **pymongo_kwargs: pymongo native parameters for delete operation :return: Optional[DeleteResult] - pymongo DeleteResult instance. """ return await cls.find_all().delete( session=session, bulk_writer=bulk_writer, **pymongo_kwargs ) # State management @classmethod def use_state_management(cls) -> bool: """ Is state management turned on :return: bool """ return cls.get_settings().use_state_management @classmethod def state_management_save_previous(cls) -> bool: """ Should we save the previous state after a commit to database :return: bool """ return cls.get_settings().state_management_save_previous @classmethod def state_management_replace_objects(cls) -> bool: """ Should objects be replaced when using state management :return: bool """ return cls.get_settings().state_management_replace_objects def _save_state(self) -> None: """ Save current document state. Internal method :return: None """ if self.use_state_management() and self.id is not None: if self.state_management_save_previous(): self._previous_saved_state = self._saved_state self._saved_state = get_dict( self, to_db=True, keep_nulls=self.get_settings().keep_nulls, exclude={"revision_id"}, ) def get_saved_state(self) -> Optional[Dict[str, Any]]: """ Saved state getter. It is protected property. :return: Optional[Dict[str, Any]] - saved state """ return self._saved_state def get_previous_saved_state(self) -> Optional[Dict[str, Any]]: """ Previous state getter. It is a protected property. :return: Optional[Dict[str, Any]] - previous state """ return self._previous_saved_state @property @saved_state_needed def is_changed(self) -> bool: if self._saved_state == get_dict( self, to_db=True, keep_nulls=self.get_settings().keep_nulls, exclude={"revision_id"}, ): return False return True @property @saved_state_needed @previous_saved_state_needed def has_changed(self) -> bool: if ( self._previous_saved_state is None or self._previous_saved_state == self._saved_state ): return False return True def _collect_updates( self, old_dict: Dict[str, Any], new_dict: Dict[str, Any] ) -> Dict[str, Any]: """ Compares old_dict with new_dict and returns field paths that have been updated Args: old_dict: dict1 new_dict: dict2 Returns: dictionary with updates """ updates = {} if old_dict.keys() - new_dict.keys(): updates = new_dict else: for field_name, field_value in new_dict.items(): if field_value != old_dict.get(field_name): if not self.state_management_replace_objects() and ( isinstance(field_value, dict) and isinstance(old_dict.get(field_name), dict) ): if old_dict.get(field_name) is None: updates[field_name] = field_value elif isinstance(field_value, dict) and isinstance( old_dict.get(field_name), dict ): field_data = self._collect_updates( old_dict.get(field_name), # type: ignore field_value, ) for k, v in field_data.items(): updates[f"{field_name}.{k}"] = v else: updates[field_name] = field_value return updates @saved_state_needed def get_changes(self) -> Dict[str, Any]: return self._collect_updates( self._saved_state, # type: ignore get_dict( self, to_db=True, keep_nulls=self.get_settings().keep_nulls, exclude={"revision_id"}, ), ) @saved_state_needed @previous_saved_state_needed def get_previous_changes(self) -> Dict[str, Any]: if self._previous_saved_state is None: return {} return self._collect_updates( self._previous_saved_state, self._saved_state, # type: ignore ) @saved_state_needed def rollback(self) -> None: if self.is_changed: for key, value in self._saved_state.items(): # type: ignore if key == "_id": setattr(self, "id", value) else: setattr(self, key, value) # Other @classmethod def get_settings(cls) -> DocumentSettings: """ Get document settings, which was created on the initialization step :return: DocumentSettings class """ if cls._document_settings is None: raise CollectionWasNotInitialized return cls._document_settings @classmethod async def inspect_collection( cls, session: Optional[AsyncIOMotorClientSession] = None ) -> InspectionResult: """ Check, if documents, stored in the MongoDB collection are compatible with the Document schema :return: InspectionResult """ inspection_result = InspectionResult() async for json_document in cls.get_motor_collection().find( {}, session=session ): try: parse_model(cls, json_document) except ValidationError as e: if inspection_result.status == InspectionStatuses.OK: inspection_result.status = InspectionStatuses.FAIL inspection_result.errors.append( InspectionError( document_id=json_document["_id"], error=str(e) ) ) return inspection_result @classmethod def _check_hidden_fields(cls): hidden_fields = [ (name, field) for name, field in get_model_fields(cls).items() if get_extra_field_info(field, "hidden") is True ] if not hidden_fields: return warnings.warn( f"{cls.__name__}: 'hidden=True' is deprecated, please use 'exclude=True'", DeprecationWarning, stacklevel=2, ) if IS_PYDANTIC_V2: for name, field in hidden_fields: field.exclude = True del field.json_schema_extra["hidden"] cls.model_rebuild(force=True) else: for name, field in hidden_fields: field.field_info.exclude = True del field.field_info.extra["hidden"] cls.__exclude_fields__[name] = True @wrap_with_actions(event_type=EventTypes.VALIDATE_ON_SAVE) async def validate_self(self, *args: Any, **kwargs: Any): # TODO: it can be sync, but needs some actions controller improvements if self.get_settings().validate_on_save: new_model = parse_model(self.__class__, get_model_dump(self)) merge_models(self, new_model) def to_ref(self): if self.id is None: raise DocumentWasNotSaved("Can not create dbref without id") return DBRef(self.get_motor_collection().name, self.id) async def fetch_link(self, field: Union[str, Any]): ref_obj = getattr(self, field, None) if isinstance(ref_obj, Link): value = await ref_obj.fetch(fetch_links=True) setattr(self, field, value) if isinstance(ref_obj, list) and ref_obj: values = await Link.fetch_list(ref_obj, fetch_links=True) setattr(self, field, values) async def fetch_all_links(self): coros = [] link_fields = self.get_link_fields() if link_fields is not None: for ref in link_fields.values(): coros.append(self.fetch_link(ref.field_name)) # TODO lists await asyncio.gather(*coros) @classmethod def get_link_fields(cls) -> Optional[Dict[str, LinkInfo]]: return cls._link_fields @classmethod def get_model_type(cls) -> ModelType: return ModelType.Document @classmethod async def distinct( cls, key: str, filter: Optional[Mapping[str, Any]] = None, session: Optional[AsyncIOMotorClientSession] = None, **kwargs: Any, ) -> list: return await cls.get_motor_collection().distinct( key, filter, session, **kwargs ) @classmethod def link_from_id(cls, id: Any): ref = DBRef(id=id, collection=cls.get_collection_name()) return Link(ref, document_class=cls) @classmethod def bulk_writer( cls, session: Optional[AsyncIOMotorClientSession] = None, ordered: bool = True, bypass_document_validation: bool = False, comment: Optional[Any] = None, ) -> BulkWriter: """ Returns a BulkWriter instance for handling bulk write operations. :param session: ClientSession The session instance used for transactional operations. :param ordered: bool If ``True`` (the default), requests will be performed on the server serially, in the order provided. If an error occurs, all remaining operations are aborted. If ``False``, requests will be performed on the server in arbitrary order, possibly in parallel, and all operations will be attempted. :param bypass_document_validation: bool, optional If ``True``, allows the write to opt-out of document-level validation. Default is ``False``. :param comment: str, optional A user-provided comment to attach to the BulkWriter. :returns: BulkWriter An instance of BulkWriter configured with the provided settings. Example Usage: -------------- This method is typically used within an asynchronous context manager. .. code-block:: python async with Document.bulk_writer(ordered=True) as bulk: await Document.insert_one(Document(field="value"), bulk_writer=bulk) """ return BulkWriter( session, ordered, cls, bypass_document_validation, comment ) class DocumentWithSoftDelete(Document): deleted_at: Optional[datetime] = None def is_deleted(self) -> bool: return self.deleted_at is not None async def hard_delete( self, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, link_rule: DeleteRules = DeleteRules.DO_NOTHING, skip_actions: Optional[List[Union[ActionDirections, str]]] = None, **pymongo_kwargs: Any, ) -> Optional[DeleteResult]: return await super().delete( session=session, bulk_writer=bulk_writer, link_rule=link_rule, skip_actions=skip_actions, **pymongo_kwargs, ) async def delete( self, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, link_rule: DeleteRules = DeleteRules.DO_NOTHING, skip_actions: Optional[List[Union[ActionDirections, str]]] = None, **pymongo_kwargs, ) -> Optional[DeleteResult]: self.deleted_at = datetime.now(tz=timezone.utc) await self.save() return None @classmethod def find_many_in_all( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type["DocumentProjectionType"]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: return cls._find_many_query_class(document_model=cls).find_many( *args, sort=sort, skip=skip, limit=limit, projection_model=projection_model, session=session, ignore_cache=ignore_cache, fetch_links=fetch_links, lazy_parse=lazy_parse, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) @classmethod def find_many( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type["DocumentProjectionType"]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: args = cls._add_class_id_filter(args, with_children) + ( {"deleted_at": None}, ) return cls._find_many_query_class(document_model=cls).find_many( *args, sort=sort, skip=skip, limit=limit, projection_model=projection_model, session=session, ignore_cache=ignore_cache, fetch_links=fetch_links, lazy_parse=lazy_parse, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) @classmethod def find_one( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type["DocumentProjectionType"]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[FindOne[FindType], FindOne["DocumentProjectionType"]]: args = cls._add_class_id_filter(args, with_children) + ( {"deleted_at": None}, ) return cls._find_one_query_class(document_model=cls).find_one( *args, projection_model=projection_model, session=session, ignore_cache=ignore_cache, fetch_links=fetch_links, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) python-beanie-1.29.0/beanie/odm/enums.py000066400000000000000000000004701473701376500200660ustar00rootroot00000000000000from enum import Enum import pymongo class SortDirection(int, Enum): """ Sorting directions """ ASCENDING = pymongo.ASCENDING DESCENDING = pymongo.DESCENDING class InspectionStatuses(str, Enum): """ Statuses of the collection inspection """ FAIL = "FAIL" OK = "OK" python-beanie-1.29.0/beanie/odm/fields.py000066400000000000000000000516021473701376500202100ustar00rootroot00000000000000from __future__ import annotations import asyncio from collections import OrderedDict from dataclasses import dataclass from enum import Enum from typing import ( TYPE_CHECKING, Any, Dict, Generic, List, Optional, Tuple, Type, TypeVar, Union, ) from typing import OrderedDict as OrderedDictType from bson import DBRef, ObjectId from bson.errors import InvalidId from pydantic import BaseModel from pymongo import ASCENDING, IndexModel from typing_extensions import get_args from beanie.odm.enums import SortDirection from beanie.odm.operators.find.comparison import ( GT, GTE, LT, LTE, NE, Eq, In, ) from beanie.odm.registry import DocsRegistry from beanie.odm.utils.parsing import parse_obj from beanie.odm.utils.pydantic import ( IS_PYDANTIC_V2, IS_PYDANTIC_V2_10, get_field_type, get_model_fields, parse_object_as, ) if IS_PYDANTIC_V2: from pydantic import ( GetCoreSchemaHandler, GetJsonSchemaHandler, TypeAdapter, ) from pydantic.json_schema import JsonSchemaValue from pydantic_core.core_schema import ( CoreSchema, ValidationInfo, any_schema, dict_schema, json_or_python_schema, no_info_after_validator_function, no_info_plain_validator_function, plain_serializer_function_ser_schema, simple_ser_schema, str_schema, typed_dict_field, typed_dict_schema, union_schema, with_info_plain_validator_function, ) else: from pydantic.fields import ModelField from pydantic.json import ENCODERS_BY_TYPE if TYPE_CHECKING: from beanie.odm.documents import DocType @dataclass(frozen=True) class IndexedAnnotation: _indexed: Tuple[int, Dict[str, Any]] def Indexed(typ=None, index_type=ASCENDING, **kwargs: Any): """ If `typ` is defined, returns a subclass of `typ` with an extra attribute `_indexed` as a tuple: - Index 0: `index_type` such as `pymongo.ASCENDING` - Index 1: `kwargs` passed to `IndexModel` When instantiated the type of the result will actually be `typ`. When `typ` is not defined, returns an `IndexedAnnotation` instance, to be used as metadata in `Annotated` fields. Example: ```py # Both fields would have the same behavior class MyModel(BaseModel): field1: Indexed(str, unique=True) field2: Annotated[str, Indexed(unique=True)] ``` """ if typ is None: return IndexedAnnotation(_indexed=(index_type, kwargs)) class NewType(typ): _indexed = (index_type, kwargs) def __new__(cls, *args: Any, **kwargs: Any): return typ.__new__(typ, *args, **kwargs) if IS_PYDANTIC_V2: @classmethod def __get_pydantic_core_schema__( cls, _source_type: Type[Any], _handler: GetCoreSchemaHandler ) -> CoreSchema: custom_type = getattr( typ, "__get_pydantic_core_schema__", None ) if custom_type is not None: return custom_type(_source_type, _handler) return no_info_after_validator_function( lambda v: v, simple_ser_schema(typ.__name__) ) NewType.__name__ = f"Indexed {typ.__name__}" return NewType class PydanticObjectId(ObjectId): """ Object Id field. Compatible with Pydantic. """ @classmethod def _validate(cls, v): if isinstance(v, bytes): v = v.decode("utf-8") try: return PydanticObjectId(v) except (InvalidId, TypeError): raise ValueError("Id must be of type PydanticObjectId") if IS_PYDANTIC_V2: @classmethod def __get_pydantic_core_schema__( cls, source_type: Type[Any], handler: GetCoreSchemaHandler ) -> CoreSchema: if not IS_PYDANTIC_V2_10: return no_info_plain_validator_function( cls._validate, metadata={ "pydantic_js_input_core_schema": str_schema( pattern="^[0-9a-f]{24}$", min_length=24, max_length=24, ) }, serialization=plain_serializer_function_ser_schema( lambda instance: str(instance), return_schema=str_schema(), when_used="json", ), ) return no_info_plain_validator_function( cls._validate, json_schema_input_schema=str_schema( pattern="^[0-9a-f]{24}$", min_length=24, max_length=24, ), serialization=plain_serializer_function_ser_schema( lambda instance: str(instance), return_schema=str_schema(), when_used="json", ), ) @classmethod def __get_pydantic_json_schema__( cls, schema: CoreSchema, handler: GetJsonSchemaHandler ) -> JsonSchemaValue: json_schema = handler(schema) json_schema.update( type="string", example="5eb7cf5a86d9755df3a6c593", ) return json_schema else: @classmethod def __get_validators__(cls): yield cls._validate @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]): field_schema.update( type="string", example="5eb7cf5a86d9755df3a6c593", ) if not IS_PYDANTIC_V2: ENCODERS_BY_TYPE[PydanticObjectId] = ( str # it is a workaround to force pydantic make json schema for this field ) BeanieObjectId = PydanticObjectId class ExpressionField(str): def __getitem__(self, item): """ Get sub field :param item: name of the subfield :return: ExpressionField """ return ExpressionField(f"{self}.{item}") def __getattr__(self, item): """ Get sub field :param item: name of the subfield :return: ExpressionField """ return ExpressionField(f"{self}.{item}") def __hash__(self): return hash(str(self)) def __eq__(self, other): if isinstance(other, ExpressionField): return super(ExpressionField, self).__eq__(other) return Eq(field=self, other=other) def __gt__(self, other): return GT(field=self, other=other) def __ge__(self, other): return GTE(field=self, other=other) def __lt__(self, other): return LT(field=self, other=other) def __le__(self, other): return LTE(field=self, other=other) def __ne__(self, other): return NE(field=self, other=other) def __pos__(self): return self, SortDirection.ASCENDING def __neg__(self): return self, SortDirection.DESCENDING def __copy__(self): return self def __deepcopy__(self, memo): return self class DeleteRules(str, Enum): DO_NOTHING = "DO_NOTHING" DELETE_LINKS = "DELETE_LINKS" class WriteRules(str, Enum): DO_NOTHING = "DO_NOTHING" WRITE = "WRITE" class LinkTypes(str, Enum): DIRECT = "DIRECT" OPTIONAL_DIRECT = "OPTIONAL_DIRECT" LIST = "LIST" OPTIONAL_LIST = "OPTIONAL_LIST" BACK_DIRECT = "BACK_DIRECT" BACK_LIST = "BACK_LIST" OPTIONAL_BACK_DIRECT = "OPTIONAL_BACK_DIRECT" OPTIONAL_BACK_LIST = "OPTIONAL_BACK_LIST" class LinkInfo(BaseModel): field_name: str lookup_field_name: str document_class: Type[BaseModel] # Document class link_type: LinkTypes nested_links: Optional[Dict] = None is_fetchable: bool = True T = TypeVar("T") class Link(Generic[T]): def __init__(self, ref: DBRef, document_class: Type[T]): self.ref = ref self.document_class = document_class async def fetch(self, fetch_links: bool = False) -> Union[T, Link[T]]: result = await self.document_class.get( # type: ignore self.ref.id, with_children=True, fetch_links=fetch_links ) return result or self @classmethod async def fetch_one(cls, link: Link[T]): return await link.fetch() @classmethod async def fetch_list( cls, links: List[Union[Link[T], DocType]], fetch_links: bool = False, ): """ Fetch list that contains links and documents :param links: :param fetch_links: :return: """ data = Link.repack_links(links) # type: ignore ids_to_fetch = [] document_class = None for doc_id, link in data.items(): if isinstance(link, Link): if document_class is None: document_class = link.document_class else: if document_class != link.document_class: raise ValueError( "All the links must have the same model class" ) ids_to_fetch.append(link.ref.id) if ids_to_fetch: fetched_models = await document_class.find( # type: ignore In("_id", ids_to_fetch), with_children=True, fetch_links=fetch_links, ).to_list() for model in fetched_models: data[model.id] = model return list(data.values()) @staticmethod def repack_links( links: List[Union[Link[T], DocType]], ) -> OrderedDictType[Any, Any]: result = OrderedDict() for link in links: if isinstance(link, Link): result[link.ref.id] = link else: result[link.id] = link return result @classmethod async def fetch_many(cls, links: List[Link[T]]) -> List[Union[T, Link[T]]]: coros = [] for link in links: coros.append(link.fetch()) return await asyncio.gather(*coros) if IS_PYDANTIC_V2: @staticmethod def serialize(value: Union[Link[T], BaseModel]): if isinstance(value, Link): return value.to_dict() return value.model_dump(mode="json") @classmethod def wrapped_validate( cls, source_type: Type[Any], handler: GetCoreSchemaHandler ): def validate( v: Union[Link[T], T, DBRef, dict[str, Any]], validation_info: ValidationInfo, ) -> Link[T] | T: document_class = DocsRegistry.evaluate_fr( # type: ignore get_args(source_type)[0] ) if isinstance(v, DBRef): return cls(ref=v, document_class=document_class) if isinstance(v, Link): return v if isinstance(v, dict) and v.keys() == {"id", "collection"}: return cls( ref=DBRef( collection=v["collection"], id=TypeAdapter( document_class.model_fields["id"].annotation ).validate_python(v["id"]), ), document_class=document_class, ) if isinstance(v, dict) or isinstance(v, BaseModel): return parse_obj(document_class, v) # Default fallback case for unknown type new_id = TypeAdapter( document_class.model_fields["id"].annotation ).validate_python(v) ref = DBRef( collection=document_class.get_collection_name(), id=new_id ) return cls(ref=ref, document_class=document_class) return validate @classmethod def __get_pydantic_core_schema__( cls, source_type: Type[Any], handler: GetCoreSchemaHandler ) -> CoreSchema: return json_or_python_schema( python_schema=with_info_plain_validator_function( cls.wrapped_validate(source_type, handler) ), json_schema=union_schema( [ typed_dict_schema( { "id": typed_dict_field(str_schema()), "collection": typed_dict_field(str_schema()), } ), dict_schema( keys_schema=str_schema(), values_schema=any_schema(), ), ] ), serialization=plain_serializer_function_ser_schema( function=lambda instance: cls.serialize(instance), when_used="json-unless-none", ), ) else: @classmethod def __get_validators__(cls): yield cls._validate @classmethod def _validate( cls, v: Union[Link[T], T, DBRef, dict[str, Any]], field: ModelField, ) -> Link[T] | T: document_class = DocsRegistry.evaluate_fr( # type: ignore field.sub_fields[0].type_ ) if isinstance(v, DBRef): return cls(ref=v, document_class=document_class) if isinstance(v, Link): return v if isinstance(v, dict) or isinstance(v, BaseModel): return parse_obj(document_class, v) # Default fallback case for unknown type new_id = parse_object_as( get_field_type(get_model_fields(document_class)["id"]), v ) ref = DBRef( collection=document_class.get_collection_name(), id=new_id ) return cls(ref=ref, document_class=document_class) @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]): field_schema.clear() field_schema.update( { "anyOf": [ { "properties": { "id": {"type": "string", "title": "Id"}, "collection": { "type": "string", "title": "Collection", }, }, "type": "object", "required": ["id", "collection"], }, {"type": "object"}, ], } ) def to_ref(self): return self.ref def to_dict(self): return {"id": str(self.ref.id), "collection": self.ref.collection} if not IS_PYDANTIC_V2: ENCODERS_BY_TYPE[Link] = lambda o: o.to_dict() class BackLink(Generic[T]): """Back reference to a document""" def __init__(self, document_class: Type[T]): self.document_class = document_class if IS_PYDANTIC_V2: @classmethod def wrapped_validate( cls, source_type: Type[Any], handler: GetCoreSchemaHandler ): def validate( v: Union[T, dict[str, Any]], validation_info: ValidationInfo ) -> BackLink[T] | T: document_class = DocsRegistry.evaluate_fr( # type: ignore get_args(source_type)[0] ) if isinstance(v, dict) or isinstance(v, BaseModel): return parse_obj(document_class, v) return cls(document_class=document_class) return validate @classmethod def __get_pydantic_core_schema__( cls, source_type: Type[Any], handler: GetCoreSchemaHandler ) -> CoreSchema: # NOTE: BackLinks are only virtual fields, they shouldn't be serialized nor appear in the schema. return json_or_python_schema( python_schema=with_info_plain_validator_function( cls.wrapped_validate(source_type, handler) ), json_schema=dict_schema( keys_schema=str_schema(), values_schema=any_schema(), ), serialization=plain_serializer_function_ser_schema( lambda instance: cls.to_dict(instance), return_schema=dict_schema(), when_used="json-unless-none", ), ) else: @classmethod def __get_validators__(cls): yield cls._validate @classmethod def _validate( cls, v: Union[T, dict[str, Any]], field: ModelField ) -> BackLink[T] | T: document_class = DocsRegistry.evaluate_fr( # type: ignore field.sub_fields[0].type_ ) if isinstance(v, dict) or isinstance(v, BaseModel): return parse_obj(document_class, v) return cls(document_class=document_class) @classmethod def __modify_schema__(cls, field_schema: Dict[str, Any]): field_schema.clear() field_schema.update( { "anyOf": [ { "properties": { "id": {"type": "string", "title": "Id"}, "collection": { "type": "string", "title": "Collection", }, }, "type": "object", "required": ["id", "collection"], }, {"type": "object"}, ], } ) def to_dict(self) -> dict[str, str]: document_class = DocsRegistry.evaluate_fr(self.document_class) # type: ignore return {"collection": document_class.get_collection_name()} if not IS_PYDANTIC_V2: ENCODERS_BY_TYPE[BackLink] = lambda o: o.to_dict() class IndexModelField: def __init__(self, index: IndexModel): self.index = index self.name = index.document["name"] self.fields = tuple(sorted(self.index.document["key"])) self.options = tuple( sorted( (k, v) for k, v in self.index.document.items() if k not in ["key", "v"] ) ) def __eq__(self, other): return self.fields == other.fields and self.options == other.options def __repr__(self): return f"IndexModelField({self.name}, {self.fields}, {self.options})" @staticmethod def list_difference( left: List[IndexModelField], right: List[IndexModelField] ): result = [] for index in left: if index not in right: result.append(index) return result @staticmethod def list_to_index_model(left: List[IndexModelField]): return [index.index for index in left] @classmethod def from_motor_index_information(cls, index_info: dict): result = [] for name, details in index_info.items(): fields = details["key"] if ("_id", 1) in fields: continue options = {k: v for k, v in details.items() if k != "key"} index_model = IndexModelField( IndexModel(fields, name=name, **options) ) result.append(index_model) return result def same_fields(self, other: IndexModelField): return self.fields == other.fields @staticmethod def find_index_with_the_same_fields( indexes: List[IndexModelField], index: IndexModelField ): for i in indexes: if i.same_fields(index): return i return None @staticmethod def merge_indexes( left: List[IndexModelField], right: List[IndexModelField] ): left_dict = {index.fields: index for index in left} right_dict = {index.fields: index for index in right} left_dict.update(right_dict) return list(left_dict.values()) @classmethod def _validate(cls, v: Any) -> "IndexModelField": if isinstance(v, IndexModel): return IndexModelField(v) else: return IndexModelField(IndexModel(v)) if IS_PYDANTIC_V2: @classmethod def __get_pydantic_core_schema__( cls, source_type: Type[Any], handler: GetCoreSchemaHandler ) -> CoreSchema: return no_info_plain_validator_function(cls._validate) else: @classmethod def __get_validators__(cls): yield cls._validate python-beanie-1.29.0/beanie/odm/interfaces/000077500000000000000000000000001473701376500205075ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/interfaces/__init__.py000066400000000000000000000000001473701376500226060ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/interfaces/aggregate.py000066400000000000000000000045201473701376500230100ustar00rootroot00000000000000from abc import abstractmethod from typing import Any, Dict, Optional, Type, TypeVar, Union, overload from motor.motor_asyncio import AsyncIOMotorClientSession from pydantic import BaseModel from beanie.odm.queries.aggregation import AggregationQuery from beanie.odm.queries.find import FindMany DocType = TypeVar("DocType", bound="AggregateInterface") DocumentProjectionType = TypeVar("DocumentProjectionType", bound=BaseModel) class AggregateInterface: @classmethod @abstractmethod def find_all(cls) -> FindMany: pass @overload @classmethod def aggregate( cls: Type[DocType], aggregation_pipeline: list, projection_model: None = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, **pymongo_kwargs: Any, ) -> AggregationQuery[Dict[str, Any]]: ... @overload @classmethod def aggregate( cls: Type[DocType], aggregation_pipeline: list, projection_model: Type[DocumentProjectionType], session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, **pymongo_kwargs: Any, ) -> AggregationQuery[DocumentProjectionType]: ... @classmethod def aggregate( cls: Type[DocType], aggregation_pipeline: list, projection_model: Optional[Type[DocumentProjectionType]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, **pymongo_kwargs: Any, ) -> Union[ AggregationQuery[Dict[str, Any]], AggregationQuery[DocumentProjectionType], ]: """ Aggregate over collection. Returns [AggregationQuery](query.md#aggregationquery) query object :param aggregation_pipeline: list - aggregation pipeline :param projection_model: Type[BaseModel] :param session: Optional[AsyncIOMotorClientSession] :param ignore_cache: bool :param **pymongo_kwargs: pymongo native parameters for aggregate operation :return: [AggregationQuery](query.md#aggregationquery) """ return cls.find_all().aggregate( aggregation_pipeline=aggregation_pipeline, projection_model=projection_model, session=session, ignore_cache=ignore_cache, **pymongo_kwargs, ) python-beanie-1.29.0/beanie/odm/interfaces/aggregation_methods.py000066400000000000000000000125241473701376500250770ustar00rootroot00000000000000from abc import abstractmethod from typing import Any, Dict, List, Optional, Union, cast from motor.motor_asyncio import AsyncIOMotorClientSession from beanie.odm.fields import ExpressionField class AggregateMethods: """ Aggregate methods """ @abstractmethod def aggregate( self, aggregation_pipeline, projection_model=None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, ): ... async def sum( self, field: Union[str, ExpressionField], session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, ) -> Optional[float]: """ Sum of values of the given field Example: ```python class Sample(Document): price: int count: int sum_count = await Document.find(Sample.price <= 100).sum(Sample.count) ``` :param field: Union[str, ExpressionField] :param session: Optional[AsyncIOMotorClientSession] - motor session :param ignore_cache: bool :return: float - sum. None if there are no items. """ pipeline = [ {"$group": {"_id": None, "sum": {"$sum": f"${field}"}}}, {"$project": {"_id": 0, "sum": 1}}, ] # As we did not supply a projection we can safely cast the type (hinting to mypy that we know the type) result: List[Dict[str, Any]] = cast( List[Dict[str, Any]], await self.aggregate( aggregation_pipeline=pipeline, session=session, ignore_cache=ignore_cache, ).to_list(), # type: ignore # TODO: pyright issue, fix ) if not result: return None return result[0]["sum"] async def avg( self, field, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, ) -> Optional[float]: """ Average of values of the given field Example: ```python class Sample(Document): price: int count: int avg_count = await Document.find(Sample.price <= 100).avg(Sample.count) ``` :param field: Union[str, ExpressionField] :param session: Optional[AsyncIOMotorClientSession] - motor session :param ignore_cache: bool :return: Optional[float] - avg. None if there are no items. """ pipeline = [ {"$group": {"_id": None, "avg": {"$avg": f"${field}"}}}, {"$project": {"_id": 0, "avg": 1}}, ] result: List[Dict[str, Any]] = cast( List[Dict[str, Any]], await self.aggregate( aggregation_pipeline=pipeline, session=session, ignore_cache=ignore_cache, ).to_list(), # type: ignore # TODO: pyright issue, fix ) if not result: return None return result[0]["avg"] async def max( self, field: Union[str, ExpressionField], session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, ) -> Optional[float]: """ Max of the values of the given field Example: ```python class Sample(Document): price: int count: int max_count = await Document.find(Sample.price <= 100).max(Sample.count) ``` :param field: Union[str, ExpressionField] :param session: Optional[AsyncIOMotorClientSession] - motor session :return: float - max. None if there are no items. """ pipeline = [ {"$group": {"_id": None, "max": {"$max": f"${field}"}}}, {"$project": {"_id": 0, "max": 1}}, ] result: List[Dict[str, Any]] = cast( List[Dict[str, Any]], await self.aggregate( aggregation_pipeline=pipeline, session=session, ignore_cache=ignore_cache, ).to_list(), # type: ignore # TODO: pyright issue, fix ) if not result: return None return result[0]["max"] async def min( self, field: Union[str, ExpressionField], session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, ) -> Optional[float]: """ Min of the values of the given field Example: ```python class Sample(Document): price: int count: int min_count = await Document.find(Sample.price <= 100).min(Sample.count) ``` :param field: Union[str, ExpressionField] :param session: Optional[AsyncIOMotorClientSession] - motor session :return: float - min. None if there are no items. """ pipeline = [ {"$group": {"_id": None, "min": {"$min": f"${field}"}}}, {"$project": {"_id": 0, "min": 1}}, ] result: List[Dict[str, Any]] = cast( List[Dict[str, Any]], await self.aggregate( aggregation_pipeline=pipeline, session=session, ignore_cache=ignore_cache, ).to_list(), # type: ignore # TODO: pyright issue, fix ) if not result: return None return result[0]["min"] python-beanie-1.29.0/beanie/odm/interfaces/clone.py000066400000000000000000000001451473701376500221610ustar00rootroot00000000000000from copy import deepcopy class CloneInterface: def clone(self): return deepcopy(self) python-beanie-1.29.0/beanie/odm/interfaces/detector.py000066400000000000000000000003631473701376500226740ustar00rootroot00000000000000from enum import Enum class ModelType(str, Enum): Document = "Document" View = "View" UnionDoc = "UnionDoc" class DetectionInterface: @classmethod def get_model_type(cls) -> ModelType: return ModelType.Document python-beanie-1.29.0/beanie/odm/interfaces/find.py000066400000000000000000000424771473701376500220170ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Iterable from typing import ( TYPE_CHECKING, Any, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload, ) from motor.motor_asyncio import AsyncIOMotorClientSession from pydantic import ( BaseModel, ) from beanie.odm.enums import SortDirection from beanie.odm.interfaces.detector import ModelType from beanie.odm.queries.find import FindMany, FindOne from beanie.odm.settings.base import ItemSettings if TYPE_CHECKING: from beanie.odm.documents import Document from beanie.odm.union_doc import UnionDoc from beanie.odm.views import View DocumentProjectionType = TypeVar("DocumentProjectionType", bound=BaseModel) FindType = TypeVar("FindType", bound=Union["Document", "UnionDoc", "View"]) class FindInterface: # Customization # Query builders could be replaced in the inherited classes _find_one_query_class: ClassVar[Type] = FindOne _find_many_query_class: ClassVar[Type] = FindMany _inheritance_inited: bool = False _class_id: ClassVar[Optional[str]] _children: ClassVar[Dict[str, Type]] @classmethod @abstractmethod def get_model_type(cls) -> ModelType: pass @classmethod @abstractmethod def get_settings(cls) -> ItemSettings: pass @overload @classmethod def find_one( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: None = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindOne[FindType]: ... @overload @classmethod def find_one( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Type["DocumentProjectionType"], session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindOne["DocumentProjectionType"]: ... @classmethod def find_one( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type["DocumentProjectionType"]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[FindOne[FindType], FindOne["DocumentProjectionType"]]: """ Find one document by criteria. Returns [FindOne](query.md#findone) query object. When awaited this will either return a document or None if no document exists for the search criteria. :param args: *Mapping[str, Any] - search criteria :param projection_model: Optional[Type[BaseModel]] - projection model :param session: Optional[AsyncIOMotorClientSession] - motor session instance :param ignore_cache: bool :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) :return: [FindOne](query.md#findone) - find query instance """ args = cls._add_class_id_filter(args, with_children) return cls._find_one_query_class(document_model=cls).find_one( *args, projection_model=projection_model, session=session, ignore_cache=ignore_cache, fetch_links=fetch_links, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) @overload @classmethod def find_many( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: None = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindMany[FindType]: ... @overload @classmethod def find_many( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type["DocumentProjectionType"]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindMany["DocumentProjectionType"]: ... @classmethod def find_many( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type["DocumentProjectionType"]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: """ Find many documents by criteria. Returns [FindMany](query.md#findmany) query object :param args: *Mapping[str, Any] - search criteria :param skip: Optional[int] - The number of documents to omit. :param limit: Optional[int] - The maximum number of results to return. :param sort: Union[None, str, List[Tuple[str, SortDirection]]] - A key or a list of (key, direction) pairs specifying the sort order for this query. :param projection_model: Optional[Type[BaseModel]] - projection model :param session: Optional[AsyncIOMotorClientSession] - motor session :param ignore_cache: bool :param lazy_parse: bool :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) :return: [FindMany](query.md#findmany) - query instance """ args = cls._add_class_id_filter(args, with_children) return cls._find_many_query_class(document_model=cls).find_many( *args, sort=sort, skip=skip, limit=limit, projection_model=projection_model, session=session, ignore_cache=ignore_cache, fetch_links=fetch_links, lazy_parse=lazy_parse, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) @overload @classmethod def find( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: None = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindMany[FindType]: ... @overload @classmethod def find( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Type["DocumentProjectionType"], skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindMany["DocumentProjectionType"]: ... @classmethod def find( # type: ignore cls: Type[FindType], *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type["DocumentProjectionType"]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: """ The same as find_many """ return cls.find_many( *args, skip=skip, limit=limit, sort=sort, projection_model=projection_model, session=session, ignore_cache=ignore_cache, fetch_links=fetch_links, with_children=with_children, lazy_parse=lazy_parse, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) @overload @classmethod def find_all( # type: ignore cls: Type[FindType], skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: None = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindMany[FindType]: ... @overload @classmethod def find_all( # type: ignore cls: Type[FindType], skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: Optional[Type["DocumentProjectionType"]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindMany["DocumentProjectionType"]: ... @classmethod def find_all( # type: ignore cls: Type[FindType], skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: Optional[Type["DocumentProjectionType"]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: """ Get all the documents :param skip: Optional[int] - The number of documents to omit. :param limit: Optional[int] - The maximum number of results to return. :param sort: Union[None, str, List[Tuple[str, SortDirection]]] - A key or a list of (key, direction) pairs specifying the sort order for this query. :param projection_model: Optional[Type[BaseModel]] - projection model :param session: Optional[AsyncIOMotorClientSession] - motor session :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) :return: [FindMany](query.md#findmany) - query instance """ return cls.find_many( {}, skip=skip, limit=limit, sort=sort, projection_model=projection_model, session=session, ignore_cache=ignore_cache, with_children=with_children, lazy_parse=lazy_parse, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) @overload @classmethod def all( # type: ignore cls: Type[FindType], projection_model: None = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindMany[FindType]: ... @overload @classmethod def all( # type: ignore cls: Type[FindType], projection_model: Type["DocumentProjectionType"], skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> FindMany["DocumentProjectionType"]: ... @classmethod def all( # type: ignore cls: Type[FindType], projection_model: Optional[Type["DocumentProjectionType"]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, with_children: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[FindMany[FindType], FindMany["DocumentProjectionType"]]: """ the same as find_all """ return cls.find_all( skip=skip, limit=limit, sort=sort, projection_model=projection_model, session=session, ignore_cache=ignore_cache, with_children=with_children, lazy_parse=lazy_parse, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) @classmethod async def count(cls) -> int: """ Number of documents in the collections The same as find_all().count() :return: int """ return await cls.find_all().count() # type: ignore @classmethod def _add_class_id_filter(cls, args: Tuple, with_children: bool = False): # skip if _class_id is already added if any( ( True for a in args if isinstance(a, Iterable) and cls.get_settings().class_id in a ) ): return args if ( cls.get_model_type() == ModelType.Document and cls._inheritance_inited ): if not with_children: args += ({cls.get_settings().class_id: cls._class_id},) else: args += ( { cls.get_settings().class_id: { "$in": [cls._class_id] + [cname for cname in cls._children.keys()] } }, ) if cls.get_settings().union_doc: args += ( { cls.get_settings().class_id: cls.get_settings().union_doc_alias }, ) return args python-beanie-1.29.0/beanie/odm/interfaces/getters.py000066400000000000000000000012401473701376500225330ustar00rootroot00000000000000from abc import abstractmethod from motor.motor_asyncio import AsyncIOMotorCollection from beanie.odm.settings.base import ItemSettings class OtherGettersInterface: @classmethod @abstractmethod def get_settings(cls) -> ItemSettings: pass @classmethod def get_motor_collection(cls) -> AsyncIOMotorCollection: return cls.get_settings().motor_collection @classmethod def get_collection_name(cls) -> str: return cls.get_settings().name # type: ignore @classmethod def get_bson_encoders(cls): return cls.get_settings().bson_encoders @classmethod def get_link_fields(cls): return None python-beanie-1.29.0/beanie/odm/interfaces/inheritance.py000066400000000000000000000007001473701376500233470ustar00rootroot00000000000000from typing import ( ClassVar, Dict, Optional, Type, ) class InheritanceInterface: _children: ClassVar[Dict[str, Type]] _parent: ClassVar[Optional[Type]] _inheritance_inited: ClassVar[bool] _class_id: ClassVar[Optional[str]] = None @classmethod def add_child(cls, name: str, clas: Type): cls._children[name] = clas if cls._parent is not None: cls._parent.add_child(name, clas) python-beanie-1.29.0/beanie/odm/interfaces/session.py000066400000000000000000000007571473701376500225550ustar00rootroot00000000000000from typing import Optional from motor.motor_asyncio import AsyncIOMotorClientSession class SessionMethods: """ Session methods """ def set_session(self, session: Optional[AsyncIOMotorClientSession] = None): """ Set motor session :param session: Optional[AsyncIOMotorClientSession] - motor session :return: """ if session is not None: self.session: Optional[AsyncIOMotorClientSession] = session return self python-beanie-1.29.0/beanie/odm/interfaces/setters.py000066400000000000000000000012551473701376500225550ustar00rootroot00000000000000from typing import ClassVar, Optional from beanie.odm.settings.document import DocumentSettings class SettersInterface: _document_settings: ClassVar[Optional[DocumentSettings]] @classmethod def set_collection(cls, collection): """ Collection setter """ cls._document_settings.motor_collection = collection @classmethod def set_database(cls, database): """ Database setter """ cls._document_settings.motor_db = database @classmethod def set_collection_name(cls, name: str): """ Collection name setter """ cls._document_settings.name = name # type: ignore python-beanie-1.29.0/beanie/odm/interfaces/update.py000066400000000000000000000061501473701376500223450ustar00rootroot00000000000000from abc import abstractmethod from datetime import datetime from typing import Any, Dict, Mapping, Optional, Union from motor.motor_asyncio import AsyncIOMotorClientSession from beanie.odm.bulk import BulkWriter from beanie.odm.fields import ExpressionField from beanie.odm.operators.update.general import ( CurrentDate, Inc, Set, ) class UpdateMethods: """ Update methods """ @abstractmethod def update( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **kwargs: Any, ): return self def set( self, expression: Dict[Union[ExpressionField, str, Any], Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **kwargs: Any, ): """ Set values Example: ```python class Sample(Document): one: int await Document.find(Sample.one == 1).set({Sample.one: 100}) ``` Uses [Set operator](operators/update.md#set) :param expression: Dict[Union[ExpressionField, str, Any], Any] - keys and values to set :param session: Optional[AsyncIOMotorClientSession] - motor session :param bulk_writer: Optional[BulkWriter] - bulk writer :return: self """ return self.update( Set(expression), session=session, bulk_writer=bulk_writer, **kwargs ) def current_date( self, expression: Dict[Union[datetime, ExpressionField, str], Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **kwargs: Any, ): """ Set current date Uses [CurrentDate operator](operators/update.md#currentdate) :param expression: Dict[Union[datetime, ExpressionField, str], Any] :param session: Optional[AsyncIOMotorClientSession] - motor session :param bulk_writer: Optional[BulkWriter] - bulk writer :return: self """ return self.update( CurrentDate(expression), session=session, bulk_writer=bulk_writer, **kwargs, ) def inc( self, expression: Dict[Union[ExpressionField, float, int, str], Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **kwargs: Any, ): """ Increment Example: ```python class Sample(Document): one: int await Document.find(Sample.one == 1).inc({Sample.one: 100}) ``` Uses [Inc operator](operators/update.md#inc) :param expression: Dict[Union[ExpressionField, float, int, str], Any] :param session: Optional[AsyncIOMotorClientSession] - motor session :param bulk_writer: Optional[BulkWriter] - bulk writer :return: self """ return self.update( Inc(expression), session=session, bulk_writer=bulk_writer, **kwargs ) python-beanie-1.29.0/beanie/odm/models.py000066400000000000000000000007201473701376500202200ustar00rootroot00000000000000from typing import List from pydantic import BaseModel from beanie.odm.enums import InspectionStatuses from beanie.odm.fields import PydanticObjectId class InspectionError(BaseModel): """ Inspection error details """ document_id: PydanticObjectId error: str class InspectionResult(BaseModel): """ Collection inspection result """ status: InspectionStatuses = InspectionStatuses.OK errors: List[InspectionError] = [] python-beanie-1.29.0/beanie/odm/operators/000077500000000000000000000000001473701376500204025ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/operators/__init__.py000066400000000000000000000014601473701376500225140ustar00rootroot00000000000000from abc import abstractmethod from collections.abc import Mapping from copy import copy, deepcopy from typing import Any, Dict from typing import Mapping as MappingType class BaseOperator(Mapping): """ Base operator. """ @property @abstractmethod def query(self) -> MappingType[str, Any]: ... def __getitem__(self, item: str): return self.query[item] def __iter__(self): return iter(self.query) def __len__(self): return len(self.query) def __repr__(self): return repr(self.query) def __str__(self): return str(self.query) def __copy__(self): return copy(self.query) def __deepcopy__(self, memodict: Dict[str, Any] = {}): return deepcopy(self.query) def copy(self): return copy(self) python-beanie-1.29.0/beanie/odm/operators/find/000077500000000000000000000000001473701376500213225ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/operators/find/__init__.py000066400000000000000000000001641473701376500234340ustar00rootroot00000000000000from abc import ABC from beanie.odm.operators import BaseOperator class BaseFindOperator(BaseOperator, ABC): ... python-beanie-1.29.0/beanie/odm/operators/find/array.py000066400000000000000000000042111473701376500230100ustar00rootroot00000000000000from abc import ABC from typing import Any, Optional from beanie.odm.operators.find import BaseFindOperator class BaseFindArrayOperator(BaseFindOperator, ABC): ... class All(BaseFindArrayOperator): """ `$all` array query operator Example: ```python class Sample(Document): results: List[int] All(Sample.results, [80, 85]) ``` Will return query object like ```python {"results": {"$all": [80, 85]}} ``` MongoDB doc: """ def __init__( self, field, values: list, ): self.field = field self.values_list = values @property def query(self): return {self.field: {"$all": self.values_list}} class ElemMatch(BaseFindArrayOperator): """ `$elemMatch` array query operator Example: ```python class Sample(Document): results: List[int] ElemMatch(Sample.results, {"$in": [80, 85]}) ``` Will return query object like ```python {"results": {"$elemMatch": {"$in": [80, 85]}}} ``` MongoDB doc: """ def __init__( self, field, expression: Optional[dict] = None, **kwargs: Any, ): self.field = field if expression is None: self.expression = kwargs else: self.expression = expression @property def query(self): return {self.field: {"$elemMatch": self.expression}} class Size(BaseFindArrayOperator): """ `$size` array query operator Example: ```python class Sample(Document): results: List[int] Size(Sample.results, 2) ``` Will return query object like ```python {"results": {"$size": 2}} ``` MongoDB doc: """ def __init__( self, field, num: int, ): self.field = field self.num = num @property def query(self): return {self.field: {"$size": self.num}} python-beanie-1.29.0/beanie/odm/operators/find/bitwise.py000066400000000000000000000024211473701376500233410ustar00rootroot00000000000000from typing import Union from beanie.odm.fields import ExpressionField from beanie.odm.operators.find import BaseFindOperator class BaseFindBitwiseOperator(BaseFindOperator): operator = "" def __init__(self, field: Union[str, ExpressionField], bitmask): self.field = field self.bitmask = bitmask @property def query(self): return {self.field: {self.operator: self.bitmask}} class BitsAllClear(BaseFindBitwiseOperator): """ `$bitsAllClear` query operator MongoDB doc: """ operator = "$bitsAllClear" class BitsAllSet(BaseFindBitwiseOperator): """ `$bitsAllSet` query operator MongoDB doc: https://docs.mongodb.com/manual/reference/operator/query/bitsAllSet/ """ operator = "$bitsAllSet" class BitsAnyClear(BaseFindBitwiseOperator): """ `$bitsAnyClear` query operator MongoDB doc: https://docs.mongodb.com/manual/reference/operator/query/bitsAnyClear/ """ operator = "$bitsAnyClear" class BitsAnySet(BaseFindBitwiseOperator): """ `$bitsAnySet` query operator MongoDB doc: https://docs.mongodb.com/manual/reference/operator/query/bitsAnySet/ """ operator = "$bitsAnySet" python-beanie-1.29.0/beanie/odm/operators/find/comparison.py000066400000000000000000000067151473701376500240570ustar00rootroot00000000000000from beanie.odm.operators.find import BaseFindOperator class BaseFindComparisonOperator(BaseFindOperator): operator = "" def __init__( self, field, other, ) -> None: self.field = field self.other = other @property def query(self): return {self.field: {self.operator: self.other}} class Eq(BaseFindComparisonOperator): """ `equal` query operator Example: ```python class Product(Document): price: float Eq(Product.price, 2) ``` Will return query object like ```python {"price": 2} ``` MongoDB doc: """ @property def query(self): return {self.field: self.other} class GT(BaseFindComparisonOperator): """ `$gt` query operator Example: ```python class Product(Document): price: float GT(Product.price, 2) ``` Will return query object like ```python {"price": {"$gt": 2}} ``` MongoDB doc: """ operator = "$gt" class GTE(BaseFindComparisonOperator): """ `$gte` query operator Example: ```python class Product(Document): price: float GTE(Product.price, 2) ``` Will return query object like ```python {"price": {"$gte": 2}} ``` MongoDB doc: """ operator = "$gte" class In(BaseFindComparisonOperator): """ `$in` query operator Example: ```python class Product(Document): price: float In(Product.price, [2, 3, 4]) ``` Will return query object like ```python {"price": {"$in": [2, 3, 4]}} ``` MongoDB doc: """ operator = "$in" class NotIn(BaseFindComparisonOperator): """ `$nin` query operator Example: ```python class Product(Document): price: float NotIn(Product.price, [2, 3, 4]) ``` Will return query object like ```python {"price": {"$nin": [2, 3, 4]}} ``` MongoDB doc: """ operator = "$nin" class LT(BaseFindComparisonOperator): """ `$lt` query operator Example: ```python class Product(Document): price: float LT(Product.price, 2) ``` Will return query object like ```python {"price": {"$lt": 2}} ``` MongoDB doc: """ operator = "$lt" class LTE(BaseFindComparisonOperator): """ `$lte` query operator Example: ```python class Product(Document): price: float LTE(Product.price, 2) ``` Will return query object like ```python {"price": {"$lte": 2}} ``` MongoDB doc: """ operator = "$lte" class NE(BaseFindComparisonOperator): """ `$ne` query operator Example: ```python class Product(Document): price: float NE(Product.price, 2) ``` Will return query object like ```python {"price": {"$ne": 2}} ``` MongoDB doc: """ operator = "$ne" python-beanie-1.29.0/beanie/odm/operators/find/element.py000066400000000000000000000025301473701376500233250ustar00rootroot00000000000000from abc import ABC from typing import List, Union from beanie.odm.operators.find import BaseFindOperator class BaseFindElementOperator(BaseFindOperator, ABC): ... class Exists(BaseFindElementOperator): """ `$exists` query operator Example: ```python class Product(Document): price: float Exists(Product.price, True) ``` Will return query object like ```python {"price": {"$exists": True}} ``` MongoDB doc: """ def __init__( self, field, value: bool = True, ): self.field = field self.value = value @property def query(self): return {self.field: {"$exists": self.value}} class Type(BaseFindElementOperator): """ `$type` query operator Example: ```python class Product(Document): price: float Type(Product.price, "decimal") ``` Will return query object like ```python {"price": {"$type": "decimal"}} ``` MongoDB doc: """ def __init__(self, field, types: Union[List[str], str]): self.field = field self.types = types @property def query(self): return {self.field: {"$type": self.types}} python-beanie-1.29.0/beanie/odm/operators/find/evaluation.py000066400000000000000000000104571473701376500240520ustar00rootroot00000000000000from abc import ABC from typing import Optional from beanie.odm.operators.find import BaseFindOperator class BaseFindEvaluationOperator(BaseFindOperator, ABC): ... class Expr(BaseFindEvaluationOperator): """ `$type` query operator Example: ```python class Sample(Document): one: int two: int Expr({"$gt": [ "$one" , "$two" ]}) ``` Will return query object like ```python {"$expr": {"$gt": [ "$one" , "$two" ]}} ``` MongoDB doc: """ def __init__(self, expression: dict): self.expression = expression @property def query(self): return {"$expr": self.expression} class JsonSchema(BaseFindEvaluationOperator): """ `$jsonSchema` query operator MongoDB doc: """ def __init__(self, expression: dict): self.expression = expression @property def query(self): return {"$jsonSchema": self.expression} class Mod(BaseFindEvaluationOperator): """ `$mod` query operator Example: ```python class Sample(Document): one: int Mod(Sample.one, 4, 0) ``` Will return query object like ```python { "one": { "$mod": [ 4, 0 ] } } ``` MongoDB doc: """ def __init__(self, field, divisor: int, remainder: int): self.field = field self.divisor = divisor self.remainder = remainder @property def query(self): return {self.field: {"$mod": [self.divisor, self.remainder]}} class RegEx(BaseFindEvaluationOperator): """ `$regex` query operator MongoDB doc: """ def __init__( self, field, pattern: str, options: Optional[str] = None, ): self.field = field self.pattern = pattern self.options = options @property def query(self): expression = {"$regex": self.pattern} if self.options: expression["$options"] = self.options return {self.field: expression} class Text(BaseFindEvaluationOperator): """ `$text` query operator Example: ```python class Sample(Document): description: Indexed(str, pymongo.TEXT) Text("coffee") ``` Will return query object like ```python { "$text": { "$search": "coffee" , "$caseSensitive": False, "$diacriticSensitive": False } } ``` MongoDB doc: Note: if you need to run a query against Azure Cosmos DB for MongoDB, which does not support diacritic sensitivity yet, you can set `diacritic_sensitive` argument to `None` to exclude it from the query. """ def __init__( self, search: str, language: Optional[str] = None, case_sensitive: bool = False, diacritic_sensitive: Optional[bool] = False, ): """ :param search: str :param language: Optional[str] = None :param case_sensitive: bool = False :param diacritic_sensitive: Optional[bool] = False """ self.search = search self.language = language self.case_sensitive = case_sensitive self.diacritic_sensitive = diacritic_sensitive @property def query(self): expression = { "$text": { "$search": self.search, "$caseSensitive": self.case_sensitive, } } if self.language: expression["$text"]["$language"] = self.language if self.diacritic_sensitive is not None: expression["$text"]["$diacriticSensitive"] = ( self.diacritic_sensitive ) return expression class Where(BaseFindEvaluationOperator): """ `$where` query operator MongoDB doc: """ def __init__(self, expression: str): self.expression = expression @property def query(self): return {"$where": self.expression} python-beanie-1.29.0/beanie/odm/operators/find/geospatial.py000066400000000000000000000155131473701376500240310ustar00rootroot00000000000000from abc import ABC from enum import Enum from typing import List, Optional from beanie.odm.operators.find import BaseFindOperator class BaseFindGeospatialOperator(BaseFindOperator, ABC): ... class GeoIntersects(BaseFindGeospatialOperator): """ `$geoIntersects` query operator Example: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Collection: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] GeoIntersects(Place.geo, "Polygon", [[0,0], [1,1], [3,3]]) ``` Will return query object like ```python { "geo": { "$geoIntersects": { "$geometry": { "type": "Polygon", "coordinates": [[0,0], [1,1], [3,3]], } } } } ``` MongoDB doc: """ def __init__(self, field, geo_type: str, coordinates: List[List[float]]): self.field = field self.geo_type = geo_type self.coordinates = coordinates @property def query(self): return { self.field: { "$geoIntersects": { "$geometry": { "type": self.geo_type, "coordinates": self.coordinates, } } } } class GeoWithinTypes(str, Enum): Polygon = "Polygon" MultiPolygon = "MultiPolygon" class GeoWithin(BaseFindGeospatialOperator): """ `$geoWithin` query operator Example: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Collection: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] GeoWithin(Place.geo, "Polygon", [[0,0], [1,1], [3,3]]) ``` Will return query object like ```python { "geo": { "$geoWithin": { "$geometry": { "type": "Polygon", "coordinates": [[0,0], [1,1], [3,3]], } } } } ``` MongoDB doc: """ def __init__( self, field, geo_type: GeoWithinTypes, coordinates: List[List[float]] ): self.field = field self.geo_type = geo_type self.coordinates = coordinates @property def query(self): return { self.field: { "$geoWithin": { "$geometry": { "type": self.geo_type, "coordinates": self.coordinates, } } } } class Box(BaseFindGeospatialOperator): """ `$box` query operator Example: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Collection: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] Box(Place.geo, lower_left=[10,12], upper_right=[15,20]) ``` Will return query object like ```python { "geo": { "$geoWithin": { "$box": [[10, 12], [15, 20]] } } } ``` MongoDB doc: """ def __init__( self, field, lower_left: List[float], upper_right: List[float] ): self.field = field self.coordinates = [lower_left, upper_right] @property def query(self): return {self.field: {"$geoWithin": {"$box": self.coordinates}}} class Near(BaseFindGeospatialOperator): """ `$near` query operator Example: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Collection: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] Near(Place.geo, 1.2345, 2.3456, min_distance=500) ``` Will return query object like ```python { "geo": { "$near": { "$geometry": { "type": "Point", "coordinates": [1.2345, 2.3456], }, "$maxDistance": 500, } } } ``` MongoDB doc: """ operator = "$near" def __init__( self, field, longitude: float, latitude: float, max_distance: Optional[float] = None, min_distance: Optional[float] = None, ): self.field = field self.longitude = longitude self.latitude = latitude self.max_distance = max_distance self.min_distance = min_distance @property def query(self): expression = { self.field: { self.operator: { "$geometry": { "type": "Point", "coordinates": [self.longitude, self.latitude], }, } } } if self.max_distance: expression[self.field][self.operator]["$maxDistance"] = ( self.max_distance ) # type: ignore if self.min_distance: expression[self.field][self.operator]["$minDistance"] = ( self.min_distance ) # type: ignore return expression class NearSphere(Near): """ `$nearSphere` query operator Example: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Collection: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] NearSphere(Place.geo, 1.2345, 2.3456, min_distance=500) ``` Will return query object like ```python { "geo": { "$nearSphere": { "$geometry": { "type": "Point", "coordinates": [1.2345, 2.3456], }, "$maxDistance": 500, } } } ``` MongoDB doc: """ operator = "$nearSphere" python-beanie-1.29.0/beanie/odm/operators/find/logical.py000066400000000000000000000071471473701376500233170ustar00rootroot00000000000000from abc import ABC from typing import Any, Dict, Mapping, Union from beanie.odm.operators.find import BaseFindOperator class BaseFindLogicalOperator(BaseFindOperator, ABC): ... class LogicalOperatorForListOfExpressions(BaseFindLogicalOperator): # todo: handle query return typing operator: str = "" def __init__( self, *expressions: Union[ BaseFindOperator, Dict[str, Any], Mapping[str, Any], bool ], ): self.expressions = list(expressions) @property def query(self): if not self.expressions: raise AttributeError("At least one expression must be provided") if len(self.expressions) == 1: return self.expressions[0] return {self.operator: self.expressions} class Or(LogicalOperatorForListOfExpressions): """ `$or` query operator Example: ```python class Product(Document): price: float category: str Or(Product.price<10, Product.category=="Sweets") ``` Will return query object like ```python {"$or": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} ``` MongoDB doc: """ operator = "$or" class And(LogicalOperatorForListOfExpressions): """ `$and` query operator Example: ```python class Product(Document): price: float category: str And(Product.price<10, Product.category=="Sweets") ``` Will return query object like ```python {"$and": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} ``` MongoDB doc: """ operator = "$and" class Nor(BaseFindLogicalOperator): """ `$nor` query operator Example: ```python class Product(Document): price: float category: str Nor(Product.price<10, Product.category=="Sweets") ``` Will return query object like ```python {"$nor": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} ``` MongoDB doc: """ def __init__( self, *expressions: Union[ BaseFindOperator, Dict[str, Any], Mapping[str, Any], bool ], ): self.expressions = list(expressions) @property def query(self): return {"$nor": self.expressions} class Not(BaseFindLogicalOperator): """ `$not` query operator Example: ```python class Product(Document): price: float category: str Not(Product.price<10) ``` Will return query object like ```python {"$not": {"price": {"$lt": 10}}} ``` MongoDB doc: """ def __init__(self, expression: Mapping[str, Any]): self.expression = expression @property def query(self): if len(self.expression) == 1: expression_key = list(self.expression.keys())[0] if expression_key.startswith("$"): raise AttributeError( "Not operator can not be used with operators" ) value = self.expression[expression_key] if isinstance(value, dict): internal_key = list(value.keys())[0] if internal_key.startswith("$"): return {expression_key: {"$not": value}} return {expression_key: {"$not": {"$eq": value}}} raise AttributeError( "Not operator can only be used with one expression" ) python-beanie-1.29.0/beanie/odm/operators/update/000077500000000000000000000000001473701376500216645ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/operators/update/__init__.py000066400000000000000000000003501473701376500237730ustar00rootroot00000000000000from abc import abstractmethod from typing import Any, Mapping from beanie.odm.operators import BaseOperator class BaseUpdateOperator(BaseOperator): @property @abstractmethod def query(self) -> Mapping[str, Any]: ... python-beanie-1.29.0/beanie/odm/operators/update/array.py000066400000000000000000000046451473701376500233650ustar00rootroot00000000000000from beanie.odm.operators.update import BaseUpdateOperator class BaseUpdateArrayOperator(BaseUpdateOperator): operator = "" def __init__(self, expression): self.expression = expression @property def query(self): return {self.operator: self.expression} class AddToSet(BaseUpdateArrayOperator): """ `$addToSet` update array query operator Example: ```python class Sample(Document): results: List[int] AddToSet({Sample.results: 2}) ``` Will return query object like ```python {"$addToSet": {"results": 2}} ``` MongoDB docs: """ operator = "$addToSet" class Pop(BaseUpdateArrayOperator): """ `$pop` update array query operator Example: ```python class Sample(Document): results: List[int] Pop({Sample.results: 2}) ``` Will return query object like ```python {"$pop": {"results": -1}} ``` MongoDB docs: """ operator = "$pop" class Pull(BaseUpdateArrayOperator): """ `$pull` update array query operator Example: ```python class Sample(Document): results: List[int] Pull(In(Sample.result: [1,2,3,4,5]) ``` Will return query object like ```python {"$pull": { "results": { $in: [1,2,3,4,5] }}} ``` MongoDB docs: """ operator = "$pull" class Push(BaseUpdateArrayOperator): """ `$push` update array query operator Example: ```python class Sample(Document): results: List[int] Push({Sample.results: 1}) ``` Will return query object like ```python {"$push": { "results": 1}} ``` MongoDB docs: """ operator = "$push" class PullAll(BaseUpdateArrayOperator): """ `$pullAll` update array query operator Example: ```python class Sample(Document): results: List[int] PullAll({ Sample.results: [ 0, 5 ] }) ``` Will return query object like ```python {"$pullAll": { "results": [ 0, 5 ] }} ``` MongoDB docs: """ operator = "$pullAll" python-beanie-1.29.0/beanie/odm/operators/update/bitwise.py000066400000000000000000000007351473701376500237110ustar00rootroot00000000000000from abc import ABC from beanie.odm.operators.update import BaseUpdateOperator class BaseUpdateBitwiseOperator(BaseUpdateOperator, ABC): ... class Bit(BaseUpdateBitwiseOperator): """ `$bit` update query operator MongoDB doc: """ def __init__(self, expression: dict): self.expression = expression @property def query(self): return {"$bit": self.expression} python-beanie-1.29.0/beanie/odm/operators/update/general.py000066400000000000000000000075411473701376500236620ustar00rootroot00000000000000from beanie.odm.operators.update import BaseUpdateOperator class BaseUpdateGeneralOperator(BaseUpdateOperator): operator = "" def __init__(self, expression): self.expression = expression @property def query(self): return {self.operator: self.expression} class Set(BaseUpdateGeneralOperator): """ `$set` update query operator Example: ```python class Sample(Document): one: int Set({Sample.one: 2}) ``` Will return query object like ```python {"$set": {"one": 2}} ``` MongoDB doc: """ operator = "$set" class SetRevisionId: """ `$set` update query operator Example: ```python class Sample(Document): one: int Set({Sample.one: 2}) ``` Will return query object like ```python {"$set": {"one": 2}} ``` MongoDB doc: """ def __init__(self, revision_id): self.revision_id = revision_id self.operator = "$set" self.expression = {"revision_id": self.revision_id} @property def query(self): return {self.operator: self.expression} class CurrentDate(BaseUpdateGeneralOperator): """ `$currentDate` update query operator Example: ```python class Sample(Document): ts: datetime CurrentDate({Sample.ts: True}) ``` Will return query object like ```python {"$currentDate": {"ts": True}} ``` MongoDB doc: """ operator = "$currentDate" class Inc(BaseUpdateGeneralOperator): """ `$inc` update query operator Example: ```python class Sample(Document): one: int Inc({Sample.one: 2}) ``` Will return query object like ```python {"$inc": {"one": 2}} ``` MongoDB doc: """ operator = "$inc" class Min(BaseUpdateGeneralOperator): """ `$min` update query operator Example: ```python class Sample(Document): one: int Min({Sample.one: 2}) ``` Will return query object like ```python {"$min": {"one": 2}} ``` MongoDB doc: """ operator = "$min" class Max(BaseUpdateGeneralOperator): """ `$max` update query operator Example: ```python class Sample(Document): one: int Max({Sample.one: 2}) ``` Will return query object like ```python {"$max": {"one": 2}} ``` MongoDB doc: """ operator = "$max" class Mul(BaseUpdateGeneralOperator): """ `$mul` update query operator Example: ```python class Sample(Document): one: int Mul({Sample.one: 2}) ``` Will return query object like ```python {"$mul": {"one": 2}} ``` MongoDB doc: """ operator = "$mul" class Rename(BaseUpdateGeneralOperator): """ `$rename` update query operator MongoDB doc: """ operator = "$rename" class SetOnInsert(BaseUpdateGeneralOperator): """ `$setOnInsert` update query operator MongoDB doc: """ operator = "$setOnInsert" class Unset(BaseUpdateGeneralOperator): """ `$unset` update query operator MongoDB doc: """ operator = "$unset" python-beanie-1.29.0/beanie/odm/queries/000077500000000000000000000000001473701376500200415ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/queries/__init__.py000066400000000000000000000000001473701376500221400ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/queries/aggregation.py000066400000000000000000000063401473701376500227050ustar00rootroot00000000000000from typing import ( TYPE_CHECKING, Any, Generic, List, Mapping, Optional, Type, TypeVar, ) from motor.core import AgnosticCommandCursor from pydantic import BaseModel from beanie.odm.cache import LRUCache from beanie.odm.interfaces.clone import CloneInterface from beanie.odm.interfaces.session import SessionMethods from beanie.odm.queries.cursor import BaseCursorQuery from beanie.odm.utils.projection import get_projection if TYPE_CHECKING: from beanie.odm.documents import DocType AggregationProjectionType = TypeVar("AggregationProjectionType") class AggregationQuery( Generic[AggregationProjectionType], BaseCursorQuery[AggregationProjectionType], SessionMethods, CloneInterface, ): """ Aggregation Query """ def __init__( self, document_model: Type["DocType"], aggregation_pipeline: List[Mapping[str, Any]], find_query: Mapping[str, Any], projection_model: Optional[Type[BaseModel]] = None, ignore_cache: bool = False, **pymongo_kwargs: Any, ): self.aggregation_pipeline: List[Mapping[str, Any]] = ( aggregation_pipeline ) self.document_model = document_model self.projection_model = projection_model self.find_query = find_query self.session = None self.ignore_cache = ignore_cache self.pymongo_kwargs = pymongo_kwargs @property def _cache_key(self) -> str: return LRUCache.create_key( { "type": "Aggregation", "filter": self.find_query, "pipeline": self.aggregation_pipeline, "projection": get_projection(self.projection_model) if self.projection_model else None, } ) def _get_cache(self): if ( self.document_model.get_settings().use_cache and self.ignore_cache is False ): return self.document_model._cache.get(self._cache_key) # type: ignore else: return None def _set_cache(self, data): if ( self.document_model.get_settings().use_cache and self.ignore_cache is False ): return self.document_model._cache.set(self._cache_key, data) # type: ignore def get_aggregation_pipeline( self, ) -> List[Mapping[str, Any]]: match_pipeline: List[Mapping[str, Any]] = ( [{"$match": self.find_query}] if self.find_query else [] ) projection_pipeline: List[Mapping[str, Any]] = [] if self.projection_model: projection = get_projection(self.projection_model) if projection is not None: projection_pipeline = [{"$project": projection}] return match_pipeline + self.aggregation_pipeline + projection_pipeline @property def motor_cursor(self) -> AgnosticCommandCursor: aggregation_pipeline = self.get_aggregation_pipeline() return self.document_model.get_motor_collection().aggregate( aggregation_pipeline, session=self.session, **self.pymongo_kwargs ) def get_projection_model(self) -> Optional[Type[BaseModel]]: return self.projection_model python-beanie-1.29.0/beanie/odm/queries/cursor.py000066400000000000000000000043471473701376500217400ustar00rootroot00000000000000from abc import abstractmethod from typing import ( Any, Dict, Generic, List, Optional, Type, TypeVar, cast, ) from pydantic.main import BaseModel from beanie.odm.utils.parsing import parse_obj CursorResultType = TypeVar("CursorResultType") class BaseCursorQuery(Generic[CursorResultType]): """ BaseCursorQuery class. Wrapper over AsyncIOMotorCursor, which parse result with model """ cursor = None lazy_parse = False @abstractmethod def get_projection_model(self) -> Optional[Type[BaseModel]]: ... @property @abstractmethod def motor_cursor(self): ... def _cursor_params(self): ... def __aiter__(self): if self.cursor is None: self.cursor = self.motor_cursor return self async def __anext__(self) -> CursorResultType: if self.cursor is None: raise RuntimeError("cursor was not set") next_item = await self.cursor.__anext__() projection = self.get_projection_model() if projection is None: return next_item return parse_obj(projection, next_item, lazy_parse=self.lazy_parse) # type: ignore @abstractmethod def _get_cache(self) -> List[Dict[str, Any]]: ... @abstractmethod def _set_cache(self, data): ... async def to_list( self, length: Optional[int] = None ) -> List[CursorResultType]: # noqa """ Get list of documents :param length: Optional[int] - length of the list :return: Union[List[BaseModel], List[Dict[str, Any]]] """ cursor = self.motor_cursor if cursor is None: raise RuntimeError("self.motor_cursor was not set") motor_list: List[Dict[str, Any]] = self._get_cache() if motor_list is None: motor_list = await cursor.to_list(length) self._set_cache(motor_list) projection = self.get_projection_model() if projection is not None: return cast( List[CursorResultType], [ parse_obj(projection, i, lazy_parse=self.lazy_parse) for i in motor_list ], ) return cast(List[CursorResultType], motor_list) python-beanie-1.29.0/beanie/odm/queries/delete.py000066400000000000000000000047431473701376500216650ustar00rootroot00000000000000from typing import TYPE_CHECKING, Any, Dict, Generator, Mapping, Optional, Type from motor.motor_asyncio import AsyncIOMotorClientSession from pymongo import DeleteMany as DeleteManyPyMongo from pymongo import DeleteOne as DeleteOnePyMongo from pymongo.results import DeleteResult from beanie.odm.bulk import BulkWriter from beanie.odm.interfaces.clone import CloneInterface from beanie.odm.interfaces.session import SessionMethods if TYPE_CHECKING: from beanie.odm.documents import DocType class DeleteQuery(SessionMethods, CloneInterface): """ Deletion Query """ def __init__( self, document_model: Type["DocType"], find_query: Mapping[str, Any], bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ): self.document_model = document_model self.find_query = find_query self.session: Optional[AsyncIOMotorClientSession] = None self.bulk_writer = bulk_writer self.pymongo_kwargs: Dict[str, Any] = pymongo_kwargs class DeleteMany(DeleteQuery): def __await__( self, ) -> Generator[DeleteResult, None, Optional[DeleteResult]]: """ Run the query :return: """ if self.bulk_writer is None: return ( yield from self.document_model.get_motor_collection() .delete_many( self.find_query, session=self.session, **self.pymongo_kwargs, ) .__await__() ) else: self.bulk_writer.add_operation( self.document_model, DeleteManyPyMongo(self.find_query, **self.pymongo_kwargs), ) return None class DeleteOne(DeleteQuery): def __await__( self, ) -> Generator[DeleteResult, None, Optional[DeleteResult]]: """ Run the query :return: """ if self.bulk_writer is None: return ( yield from self.document_model.get_motor_collection() .delete_one( self.find_query, session=self.session, **self.pymongo_kwargs, ) .__await__() ) else: self.bulk_writer.add_operation( self.document_model, DeleteOnePyMongo(self.find_query), **self.pymongo_kwargs, ) return None python-beanie-1.29.0/beanie/odm/queries/find.py000066400000000000000000001073031473701376500213370ustar00rootroot00000000000000from typing import ( TYPE_CHECKING, Any, Callable, Coroutine, Dict, Generator, Generic, List, Mapping, Optional, Tuple, Type, TypeVar, Union, cast, overload, ) from motor.motor_asyncio import AsyncIOMotorClientSession from pydantic import BaseModel from pymongo import ReplaceOne from pymongo.results import UpdateResult from beanie.exceptions import DocumentNotFound from beanie.odm.bulk import BulkWriter from beanie.odm.cache import LRUCache from beanie.odm.enums import SortDirection from beanie.odm.interfaces.aggregation_methods import AggregateMethods from beanie.odm.interfaces.clone import CloneInterface from beanie.odm.interfaces.session import SessionMethods from beanie.odm.interfaces.update import UpdateMethods from beanie.odm.operators.find.logical import And from beanie.odm.queries.aggregation import AggregationQuery from beanie.odm.queries.cursor import BaseCursorQuery from beanie.odm.queries.delete import ( DeleteMany, DeleteOne, ) from beanie.odm.queries.update import ( UpdateMany, UpdateOne, UpdateQuery, UpdateResponse, ) from beanie.odm.utils.dump import get_dict from beanie.odm.utils.encoder import Encoder from beanie.odm.utils.find import construct_lookup_queries, split_text_query from beanie.odm.utils.parsing import parse_obj from beanie.odm.utils.projection import get_projection from beanie.odm.utils.relations import convert_ids if TYPE_CHECKING: from beanie.odm.documents import DocType FindQueryProjectionType = TypeVar("FindQueryProjectionType", bound=BaseModel) FindQueryResultType = TypeVar("FindQueryResultType", bound=BaseModel) class FindQuery( Generic[FindQueryResultType], UpdateMethods, SessionMethods, CloneInterface ): """ Find Query base class """ UpdateQueryType: Union[ Type[UpdateQuery], Type[UpdateMany], Type[UpdateOne] ] = UpdateQuery DeleteQueryType: Union[Type[DeleteOne], Type[DeleteMany]] = DeleteMany AggregationQueryType = AggregationQuery def __init__(self, document_model: Type["DocType"]): self.document_model = document_model self.find_expressions: List[Mapping[str, Any]] = [] self.projection_model: Type[FindQueryResultType] = cast( Type[FindQueryResultType], self.document_model ) self.session = None self.encoders: Dict[Any, Callable[[Any], Any]] = {} self.ignore_cache: bool = False self.encoders = self.document_model.get_bson_encoders() self.fetch_links: bool = False self.pymongo_kwargs: Dict[str, Any] = {} self.lazy_parse = False self.nesting_depth: Optional[int] = None self.nesting_depths_per_field: Optional[Dict[str, int]] = None def prepare_find_expressions(self): if self.document_model.get_link_fields() is not None: for i, query in enumerate(self.find_expressions): self.find_expressions[i] = convert_ids( query, doc=self.document_model, # type: ignore fetch_links=self.fetch_links, ) def get_filter_query(self) -> Mapping[str, Any]: """ Returns: MongoDB filter query """ self.prepare_find_expressions() if self.find_expressions: return Encoder(custom_encoders=self.encoders).encode( And(*self.find_expressions).query ) else: return {} def delete( self, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ) -> Union[DeleteOne, DeleteMany]: """ Provide search criteria to the Delete query :param session: Optional[AsyncIOMotorClientSession] :return: Union[DeleteOne, DeleteMany] """ self.set_session(session=session) return self.DeleteQueryType( document_model=self.document_model, find_query=self.get_filter_query(), bulk_writer=bulk_writer, **pymongo_kwargs, ).set_session(session=session) def project(self, projection_model): """ Apply projection parameter :param projection_model: Optional[Type[BaseModel]] - projection model :return: self """ if projection_model is not None: self.projection_model = projection_model return self def get_projection_model(self) -> Type[FindQueryResultType]: return self.projection_model async def count(self) -> int: """ Number of found documents :return: int """ kwargs = {} if isinstance(self, FindMany): if self.limit_number: kwargs["limit"] = self.limit_number if self.skip_number: kwargs["skip"] = self.skip_number return ( await self.document_model.get_motor_collection().count_documents( self.get_filter_query(), session=self.session, **kwargs ) ) async def exists(self) -> bool: """ If find query will return anything :return: bool """ return await self.count() > 0 class FindMany( FindQuery[FindQueryResultType], BaseCursorQuery[FindQueryResultType], AggregateMethods, ): """ Find Many query class """ UpdateQueryType = UpdateMany DeleteQueryType = DeleteMany def __init__(self, document_model: Type["DocType"]): super(FindMany, self).__init__(document_model=document_model) self.sort_expressions: List[Tuple[str, SortDirection]] = [] self.skip_number: int = 0 self.limit_number: int = 0 @overload def find_many( self: "FindMany[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: None = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> "FindMany[FindQueryResultType]": ... @overload def find_many( self: "FindMany[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type[FindQueryProjectionType]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> "FindMany[FindQueryProjectionType]": ... def find_many( self: "FindMany[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type[FindQueryProjectionType]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[ "FindMany[FindQueryResultType]", "FindMany[FindQueryProjectionType]" ]: """ Find many documents by criteria :param args: *Mapping[str, Any] - search criteria :param skip: Optional[int] - The number of documents to omit. :param limit: Optional[int] - The maximum number of results to return. :param sort: Union[None, str, List[Tuple[str, SortDirection]]] - A key or a list of (key, direction) pairs specifying the sort order for this query. :param projection_model: Optional[Type[BaseModel]] - projection model :param session: Optional[AsyncIOMotorClientSession] - motor session :param ignore_cache: bool :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) :return: FindMany - query instance """ self.find_expressions += args # type: ignore # bool workaround self.skip(skip) self.limit(limit) self.sort(sort) self.project(projection_model) self.set_session(session=session) self.ignore_cache = ignore_cache self.fetch_links = fetch_links self.pymongo_kwargs.update(pymongo_kwargs) self.nesting_depth = nesting_depth self.nesting_depths_per_field = nesting_depths_per_field if lazy_parse is True: self.lazy_parse = lazy_parse return self # TODO probably merge FindOne and FindMany to one class to avoid this # code duplication @overload def project( self: "FindMany", projection_model: None, ) -> "FindMany[FindQueryResultType]": ... @overload def project( self: "FindMany", projection_model: Type[FindQueryProjectionType], ) -> "FindMany[FindQueryProjectionType]": ... def project( self: "FindMany", projection_model: Optional[Type[FindQueryProjectionType]], ) -> Union[ "FindMany[FindQueryResultType]", "FindMany[FindQueryProjectionType]" ]: """ Apply projection parameter :param projection_model: Optional[Type[BaseModel]] - projection model :return: self """ super().project(projection_model) return self @overload def find( self: "FindMany[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: None = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> "FindMany[FindQueryResultType]": ... @overload def find( self: "FindMany[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type[FindQueryProjectionType]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> "FindMany[FindQueryProjectionType]": ... def find( self: "FindMany[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type[FindQueryProjectionType]] = None, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, lazy_parse: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[ "FindMany[FindQueryResultType]", "FindMany[FindQueryProjectionType]" ]: """ The same as `find_many(...)` """ return self.find_many( *args, skip=skip, limit=limit, sort=sort, projection_model=projection_model, session=session, ignore_cache=ignore_cache, fetch_links=fetch_links, lazy_parse=lazy_parse, nesting_depth=nesting_depth, nesting_depths_per_field=nesting_depths_per_field, **pymongo_kwargs, ) def sort( self, *args: Optional[ Union[ str, Tuple[str, SortDirection], List[Tuple[str, SortDirection]] ] ], ) -> "FindMany[FindQueryResultType]": """ Add sort parameters :param args: Union[str, Tuple[str, SortDirection], List[Tuple[str, SortDirection]]] - A key or a tuple (key, direction) or a list of (key, direction) pairs specifying the sort order for this query. :return: self """ for arg in args: if arg is None: pass elif isinstance(arg, list): self.sort(*arg) elif isinstance(arg, tuple): self.sort_expressions.append(arg) elif isinstance(arg, str): if arg.startswith("+"): self.sort_expressions.append( (arg[1:], SortDirection.ASCENDING) ) elif arg.startswith("-"): self.sort_expressions.append( (arg[1:], SortDirection.DESCENDING) ) else: self.sort_expressions.append( (arg, SortDirection.ASCENDING) ) else: raise TypeError("Wrong argument type") return self def skip(self, n: Optional[int]) -> "FindMany[FindQueryResultType]": """ Set skip parameter :param n: int :return: self """ if n is not None: self.skip_number = n return self def limit(self, n: Optional[int]) -> "FindMany[FindQueryResultType]": """ Set limit parameter :param n: int :return: """ if n is not None: self.limit_number = n return self def update( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ): """ Create Update with modifications query and provide search criteria there :param args: *Mapping[str,Any] - the modifications to apply. :param session: Optional[AsyncIOMotorClientSession] :param bulk_writer: Optional[BulkWriter] :return: UpdateMany query """ self.set_session(session) return ( self.UpdateQueryType( document_model=self.document_model, find_query=self.get_filter_query(), ) .update(*args, bulk_writer=bulk_writer, **pymongo_kwargs) .set_session(session=self.session) ) def upsert( self, *args: Mapping[str, Any], on_insert: "DocType", session: Optional[AsyncIOMotorClientSession] = None, **pymongo_kwargs: Any, ): """ Create Update with modifications query and provide search criteria there :param args: *Mapping[str,Any] - the modifications to apply. :param on_insert: DocType - document to insert if there is no matched document in the collection :param session: Optional[AsyncIOMotorClientSession] :return: UpdateMany query """ self.set_session(session) return ( self.UpdateQueryType( document_model=self.document_model, find_query=self.get_filter_query(), ) .upsert( *args, on_insert=on_insert, **pymongo_kwargs, ) .set_session(session=self.session) ) def update_many( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ) -> UpdateMany: """ Provide search criteria to the [UpdateMany](query.md#updatemany) query :param args: *Mapping[str,Any] - the modifications to apply. :param session: Optional[AsyncIOMotorClientSession] :return: [UpdateMany](query.md#updatemany) query """ return cast( UpdateMany, self.update( *args, session=session, bulk_writer=bulk_writer, **pymongo_kwargs, ), ) def delete_many( self, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ) -> DeleteMany: """ Provide search criteria to the [DeleteMany](query.md#deletemany) query :param session: :return: [DeleteMany](query.md#deletemany) query """ # We need to cast here to tell mypy that we are sure about the type. # This is because delete may also return a DeleteOne type in general, and mypy can not be sure in this case # See https://mypy.readthedocs.io/en/stable/common_issues.html#narrowing-and-inner-functions return cast( DeleteMany, self.delete( session=session, bulk_writer=bulk_writer, **pymongo_kwargs ), ) @overload def aggregate( self, aggregation_pipeline: List[Any], projection_model: None = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, **pymongo_kwargs: Any, ) -> AggregationQuery[Dict[str, Any]]: ... @overload def aggregate( self, aggregation_pipeline: List[Any], projection_model: Type[FindQueryProjectionType], session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, **pymongo_kwargs: Any, ) -> AggregationQuery[FindQueryProjectionType]: ... def aggregate( self, aggregation_pipeline: List[Any], projection_model: Optional[Type[FindQueryProjectionType]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, **pymongo_kwargs: Any, ) -> Union[ AggregationQuery[Dict[str, Any]], AggregationQuery[FindQueryProjectionType], ]: """ Provide search criteria to the [AggregationQuery](query.md#aggregationquery) :param aggregation_pipeline: list - aggregation pipeline. MongoDB doc: :param projection_model: Type[BaseModel] - Projection Model :param session: Optional[AsyncIOMotorClientSession] - PyMongo session :param ignore_cache: bool :return:[AggregationQuery](query.md#aggregationquery) """ self.set_session(session=session) return self.AggregationQueryType( self.document_model, self.build_aggregation_pipeline(*aggregation_pipeline), find_query={}, projection_model=projection_model, ignore_cache=ignore_cache, **pymongo_kwargs, ).set_session(session=self.session) @property def _cache_key(self) -> str: return LRUCache.create_key( { "type": "FindMany", "filter": self.get_filter_query(), "sort": self.sort_expressions, "projection": get_projection(self.projection_model), "skip": self.skip_number, "limit": self.limit_number, } ) def _get_cache(self): if ( self.document_model.get_settings().use_cache and self.ignore_cache is False ): return self.document_model._cache.get(self._cache_key) # type: ignore else: return None def _set_cache(self, data): if ( self.document_model.get_settings().use_cache and self.ignore_cache is False ): return self.document_model._cache.set(self._cache_key, data) # type: ignore def build_aggregation_pipeline(self, *extra_stages): if self.fetch_links: aggregation_pipeline: List[Dict[str, Any]] = ( construct_lookup_queries( self.document_model, nesting_depth=self.nesting_depth, nesting_depths_per_field=self.nesting_depths_per_field, ) ) else: aggregation_pipeline = [] filter_query = self.get_filter_query() if filter_query: text_queries, non_text_queries = split_text_query(filter_query) if text_queries: aggregation_pipeline.insert( 0, { "$match": ( {"$and": text_queries} if len(text_queries) > 1 else text_queries[0] ) }, ) if non_text_queries: aggregation_pipeline.append( { "$match": ( {"$and": non_text_queries} if len(non_text_queries) > 1 else non_text_queries[0] ) } ) if extra_stages: aggregation_pipeline.extend(extra_stages) sort_pipeline = {"$sort": {i[0]: i[1] for i in self.sort_expressions}} if sort_pipeline["$sort"]: aggregation_pipeline.append(sort_pipeline) if self.skip_number != 0: aggregation_pipeline.append({"$skip": self.skip_number}) if self.limit_number != 0: aggregation_pipeline.append({"$limit": self.limit_number}) return aggregation_pipeline @property def motor_cursor(self): if self.fetch_links: aggregation_pipeline: List[Dict[str, Any]] = ( self.build_aggregation_pipeline() ) projection = get_projection(self.projection_model) if projection is not None: aggregation_pipeline.append({"$project": projection}) return self.document_model.get_motor_collection().aggregate( aggregation_pipeline, session=self.session, **self.pymongo_kwargs, ) return self.document_model.get_motor_collection().find( filter=self.get_filter_query(), sort=self.sort_expressions, projection=get_projection(self.projection_model), skip=self.skip_number, limit=self.limit_number, session=self.session, **self.pymongo_kwargs, ) async def first_or_none(self) -> Optional[FindQueryResultType]: """ Returns the first found element or None if no elements were found """ res = await self.limit(1).to_list() if not res: return None return res[0] async def count(self) -> int: """ Number of found documents :return: int """ if self.fetch_links: aggregation_pipeline: List[Dict[str, Any]] = ( self.build_aggregation_pipeline() ) aggregation_pipeline.append({"$count": "count"}) result = ( await self.document_model.get_motor_collection() .aggregate( aggregation_pipeline, session=self.session, **self.pymongo_kwargs, ) .to_list(length=1) ) return result[0]["count"] if result else 0 return await super(FindMany, self).count() class FindOne(FindQuery[FindQueryResultType]): """ Find One query class """ UpdateQueryType = UpdateOne DeleteQueryType = DeleteOne @overload def project( self: "FindOne[FindQueryResultType]", projection_model: None = None, ) -> "FindOne[FindQueryResultType]": ... @overload def project( self: "FindOne[FindQueryResultType]", projection_model: Type[FindQueryProjectionType], ) -> "FindOne[FindQueryProjectionType]": ... # TODO probably merge FindOne and FindMany to one class to avoid this # code duplication def project( self: "FindOne[FindQueryResultType]", projection_model: Optional[Type[FindQueryProjectionType]] = None, ) -> Union[ "FindOne[FindQueryResultType]", "FindOne[FindQueryProjectionType]" ]: """ Apply projection parameter :param projection_model: Optional[Type[BaseModel]] - projection model :return: self """ super().project(projection_model) return self @overload def find_one( self: "FindOne[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: None = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> "FindOne[FindQueryResultType]": ... @overload def find_one( self: "FindOne[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: Type[FindQueryProjectionType], session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> "FindOne[FindQueryProjectionType]": ... def find_one( self: "FindOne[FindQueryResultType]", *args: Union[Mapping[str, Any], bool], projection_model: Optional[Type[FindQueryProjectionType]] = None, session: Optional[AsyncIOMotorClientSession] = None, ignore_cache: bool = False, fetch_links: bool = False, nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, **pymongo_kwargs: Any, ) -> Union[ "FindOne[FindQueryResultType]", "FindOne[FindQueryProjectionType]" ]: """ Find one document by criteria :param args: *Mapping[str, Any] - search criteria :param projection_model: Optional[Type[BaseModel]] - projection model :param session: Optional[AsyncIOMotorClientSession] - motor session :param ignore_cache: bool :param **pymongo_kwargs: pymongo native parameters for find operation (if Document class contains links, this parameter must fit the respective parameter of the aggregate MongoDB function) :return: FindOne - query instance """ self.find_expressions += args # type: ignore # bool workaround self.project(projection_model) self.set_session(session=session) self.ignore_cache = ignore_cache self.fetch_links = fetch_links self.pymongo_kwargs.update(pymongo_kwargs) self.nesting_depth = nesting_depth self.nesting_depths_per_field = nesting_depths_per_field return self def update( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, response_type: Optional[UpdateResponse] = None, **pymongo_kwargs: Any, ): """ Create Update with modifications query and provide search criteria there :param args: *Mapping[str,Any] - the modifications to apply. :param session: Optional[AsyncIOMotorClientSession] :param bulk_writer: Optional[BulkWriter] :param response_type: Optional[UpdateResponse] :return: UpdateMany query """ self.set_session(session) return ( self.UpdateQueryType( document_model=self.document_model, find_query=self.get_filter_query(), ) .update( *args, bulk_writer=bulk_writer, response_type=response_type, **pymongo_kwargs, ) .set_session(session=self.session) ) def upsert( self, *args: Mapping[str, Any], on_insert: "DocType", session: Optional[AsyncIOMotorClientSession] = None, response_type: Optional[UpdateResponse] = None, **pymongo_kwargs: Any, ): """ Create Update with modifications query and provide search criteria there :param args: *Mapping[str,Any] - the modifications to apply. :param on_insert: DocType - document to insert if there is no matched document in the collection :param session: Optional[AsyncIOMotorClientSession] :param response_type: Optional[UpdateResponse] :return: UpdateMany query """ self.set_session(session) return ( self.UpdateQueryType( document_model=self.document_model, find_query=self.get_filter_query(), ) .upsert( *args, on_insert=on_insert, response_type=response_type, **pymongo_kwargs, ) .set_session(session=self.session) ) def update_one( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, response_type: Optional[UpdateResponse] = None, **pymongo_kwargs: Any, ) -> UpdateOne: """ Create [UpdateOne](query.md#updateone) query using modifications and provide search criteria there :param args: *Mapping[str,Any] - the modifications to apply :param session: Optional[AsyncIOMotorClientSession] - PyMongo sessions :param response_type: Optional[UpdateResponse] :return: [UpdateOne](query.md#updateone) query """ return cast( UpdateOne, self.update( *args, session=session, bulk_writer=bulk_writer, response_type=response_type, **pymongo_kwargs, ), ) def delete_one( self, session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ) -> DeleteOne: """ Provide search criteria to the [DeleteOne](query.md#deleteone) query :param session: Optional[AsyncIOMotorClientSession] - PyMongo sessions :return: [DeleteOne](query.md#deleteone) query """ # We need to cast here to tell mypy that we are sure about the type. # This is because delete may also return a DeleteOne type in general, and mypy can not be sure in this case # See https://mypy.readthedocs.io/en/stable/common_issues.html#narrowing-and-inner-functions return cast( DeleteOne, self.delete( session=session, bulk_writer=bulk_writer, **pymongo_kwargs ), ) async def replace_one( self, document: "DocType", session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, ) -> Optional[UpdateResult]: """ Replace found document by provided :param document: Document - document, which will replace the found one :param session: Optional[AsyncIOMotorClientSession] - PyMongo session :param bulk_writer: Optional[BulkWriter] - Beanie bulk writer :return: UpdateResult """ self.set_session(session=session) if bulk_writer is None: result: UpdateResult = ( await self.document_model.get_motor_collection().replace_one( self.get_filter_query(), get_dict( document, to_db=True, exclude={"_id"}, keep_nulls=document.get_settings().keep_nulls, ), session=self.session, ) ) if not result.raw_result["updatedExisting"]: raise DocumentNotFound return result else: bulk_writer.add_operation( self.document_model, ReplaceOne( self.get_filter_query(), get_dict( document, to_db=True, exclude={"_id"}, keep_nulls=document.get_settings().keep_nulls, ), **self.pymongo_kwargs, ), ) return None async def _find_one(self): if self.fetch_links: return await self.document_model.find_many( *self.find_expressions, session=self.session, fetch_links=self.fetch_links, projection_model=self.projection_model, nesting_depth=self.nesting_depth, nesting_depths_per_field=self.nesting_depths_per_field, **self.pymongo_kwargs, ).first_or_none() return await self.document_model.get_motor_collection().find_one( filter=self.get_filter_query(), projection=get_projection(self.projection_model), session=self.session, **self.pymongo_kwargs, ) def __await__( self, ) -> Generator[Coroutine, Any, Optional[FindQueryResultType]]: """ Run the query :return: BaseModel """ # projection = get_projection(self.projection_model) if ( self.document_model.get_settings().use_cache and self.ignore_cache is False ): cache_key = LRUCache.create_key( "FindOne", self.get_filter_query(), self.projection_model, self.session, self.fetch_links, ) document: Dict[str, Any] = self.document_model._cache.get( # type: ignore cache_key ) if document is None: document = yield from self._find_one().__await__() # type: ignore self.document_model._cache.set(cache_key, document) # type: ignore else: document = yield from self._find_one().__await__() # type: ignore if document is None: return None if type(document) is self.projection_model: return cast(FindQueryResultType, document) return cast( FindQueryResultType, parse_obj(self.projection_model, document) ) async def count(self) -> int: """ Count the number of documents matching the query :return: int """ if self.fetch_links: return await self.document_model.find_many( *self.find_expressions, session=self.session, fetch_links=self.fetch_links, **self.pymongo_kwargs, ).count() return await super(FindOne, self).count() python-beanie-1.29.0/beanie/odm/queries/update.py000066400000000000000000000304271473701376500217030ustar00rootroot00000000000000from abc import abstractmethod from enum import Enum from typing import ( TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Type, Union, ) from motor.motor_asyncio import AsyncIOMotorClientSession from pymongo import ReturnDocument from pymongo import UpdateMany as UpdateManyPyMongo from pymongo import UpdateOne as UpdateOnePyMongo from pymongo.results import InsertOneResult, UpdateResult from beanie.odm.bulk import BulkWriter from beanie.odm.interfaces.clone import CloneInterface from beanie.odm.interfaces.session import SessionMethods from beanie.odm.interfaces.update import ( UpdateMethods, ) from beanie.odm.operators.update import BaseUpdateOperator from beanie.odm.operators.update.general import SetRevisionId from beanie.odm.utils.encoder import Encoder from beanie.odm.utils.parsing import parse_obj if TYPE_CHECKING: from beanie.odm.documents import DocType class UpdateResponse(str, Enum): UPDATE_RESULT = "UPDATE_RESULT" # PyMongo update result OLD_DOCUMENT = "OLD_DOCUMENT" # Original document NEW_DOCUMENT = "NEW_DOCUMENT" # Updated document class UpdateQuery(UpdateMethods, SessionMethods, CloneInterface): """ Update Query base class """ def __init__( self, document_model: Type["DocType"], find_query: Mapping[str, Any], ): self.document_model = document_model self.find_query = find_query self.update_expressions: List[Mapping[str, Any]] = [] self.session = None self.is_upsert = False self.upsert_insert_doc: Optional["DocType"] = None self.encoders: Dict[Any, Callable[[Any], Any]] = {} self.bulk_writer: Optional[BulkWriter] = None self.encoders = self.document_model.get_settings().bson_encoders self.pymongo_kwargs: Dict[str, Any] = {} @property def update_query(self) -> Dict[str, Any]: query: Union[Dict[str, Any], List[Dict[str, Any]], None] = None for expression in self.update_expressions: if isinstance(expression, BaseUpdateOperator): if query is None: query = {} if isinstance(query, list): raise TypeError("Wrong expression type") query.update(expression.query) elif isinstance(expression, dict): if query is None: query = {} if isinstance(query, list): raise TypeError("Wrong expression type") query.update(expression) elif isinstance(expression, SetRevisionId): if query is None: query = {} if isinstance(query, list): raise TypeError("Wrong expression type") set_query = query.get("$set", {}) set_query.update(expression.query.get("$set", {})) query["$set"] = set_query elif isinstance(expression, list): if query is None: query = [] if isinstance(query, dict): raise TypeError("Wrong expression type") query.extend(expression) else: raise TypeError("Wrong expression type") return Encoder(custom_encoders=self.encoders).encode(query) @abstractmethod async def _update(self) -> UpdateResult: ... class UpdateMany(UpdateQuery): """ Update Many query class """ def update( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ) -> "UpdateQuery": """ Provide modifications to the update query. :param args: *Union[dict, Mapping] - the modifications to apply. :param session: Optional[AsyncIOMotorClientSession] :param bulk_writer: Optional[BulkWriter] :param pymongo_kwargs: pymongo native parameters for update operation :return: UpdateMany query """ self.set_session(session=session) self.update_expressions += args if bulk_writer: self.bulk_writer = bulk_writer self.pymongo_kwargs.update(pymongo_kwargs) return self def upsert( self, *args: Mapping[str, Any], on_insert: "DocType", session: Optional[AsyncIOMotorClientSession] = None, **pymongo_kwargs: Any, ) -> "UpdateQuery": """ Provide modifications to the upsert query. :param args: *Union[dict, Mapping] - the modifications to apply. :param on_insert: DocType - document to insert if there is no matched document in the collection :param session: Optional[AsyncIOMotorClientSession] :param **pymongo_kwargs: pymongo native parameters for update operation :return: UpdateMany query """ self.upsert_insert_doc = on_insert # type: ignore self.update(*args, session=session, **pymongo_kwargs) return self def update_many( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, **pymongo_kwargs: Any, ): """ Provide modifications to the update query :param args: *Union[dict, Mapping] - the modifications to apply. :param session: Optional[AsyncIOMotorClientSession] :param bulk_writer: "BulkWriter" - Beanie bulk writer :param pymongo_kwargs: pymongo native parameters for update operation :return: UpdateMany query """ return self.update( *args, session=session, bulk_writer=bulk_writer, **pymongo_kwargs ) async def _update(self): if self.bulk_writer is None: return ( await self.document_model.get_motor_collection().update_many( self.find_query, self.update_query, session=self.session, **self.pymongo_kwargs, ) ) else: self.bulk_writer.add_operation( self.document_model, UpdateManyPyMongo( self.find_query, self.update_query, **self.pymongo_kwargs ), ) def __await__( self, ) -> Generator[ Any, None, Union[UpdateResult, InsertOneResult, Optional["DocType"]] ]: """ Run the query :return: """ update_result = yield from self._update().__await__() if self.upsert_insert_doc is None: return update_result if update_result is not None and update_result.matched_count == 0: return ( yield from self.document_model.insert_one( document=self.upsert_insert_doc, session=self.session, bulk_writer=self.bulk_writer, ).__await__() ) return update_result class UpdateOne(UpdateQuery): """ Update One query class """ def __init__(self, *args: Any, **kwargs: Any): super(UpdateOne, self).__init__(*args, **kwargs) self.response_type = UpdateResponse.UPDATE_RESULT def update( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, response_type: Optional[UpdateResponse] = None, **pymongo_kwargs: Any, ) -> "UpdateQuery": """ Provide modifications to the update query. :param args: *Union[dict, Mapping] - the modifications to apply. :param session: Optional[AsyncIOMotorClientSession] :param bulk_writer: Optional[BulkWriter] :param response_type: UpdateResponse :param pymongo_kwargs: pymongo native parameters for update operation :return: UpdateMany query """ self.set_session(session=session) self.update_expressions += args if response_type is not None: self.response_type = response_type if bulk_writer: self.bulk_writer = bulk_writer self.pymongo_kwargs.update(pymongo_kwargs) return self def upsert( self, *args: Mapping[str, Any], on_insert: "DocType", session: Optional[AsyncIOMotorClientSession] = None, response_type: Optional[UpdateResponse] = None, **pymongo_kwargs: Any, ) -> "UpdateQuery": """ Provide modifications to the upsert query. :param args: *Union[dict, Mapping] - the modifications to apply. :param on_insert: DocType - document to insert if there is no matched document in the collection :param session: Optional[AsyncIOMotorClientSession] :param response_type: Optional[UpdateResponse] :param pymongo_kwargs: pymongo native parameters for update operation :return: UpdateMany query """ self.upsert_insert_doc = on_insert # type: ignore self.update( *args, response_type=response_type, session=session, **pymongo_kwargs, ) return self def update_one( self, *args: Mapping[str, Any], session: Optional[AsyncIOMotorClientSession] = None, bulk_writer: Optional[BulkWriter] = None, response_type: Optional[UpdateResponse] = None, **pymongo_kwargs: Any, ): """ Provide modifications to the update query. The same as `update()` :param args: *Union[dict, Mapping] - the modifications to apply. :param session: Optional[AsyncIOMotorClientSession] :param bulk_writer: "BulkWriter" - Beanie bulk writer :param response_type: Optional[UpdateResponse] :param pymongo_kwargs: pymongo native parameters for update operation :return: UpdateMany query """ return self.update( *args, session=session, bulk_writer=bulk_writer, response_type=response_type, **pymongo_kwargs, ) async def _update(self): if not self.bulk_writer: if self.response_type == UpdateResponse.UPDATE_RESULT: return await self.document_model.get_motor_collection().update_one( self.find_query, self.update_query, session=self.session, **self.pymongo_kwargs, ) else: result = await self.document_model.get_motor_collection().find_one_and_update( self.find_query, self.update_query, session=self.session, return_document=( ReturnDocument.BEFORE if self.response_type == UpdateResponse.OLD_DOCUMENT else ReturnDocument.AFTER ), **self.pymongo_kwargs, ) if result is not None: result = parse_obj(self.document_model, result) return result else: self.bulk_writer.add_operation( self.document_model, UpdateOnePyMongo( self.find_query, self.update_query, **self.pymongo_kwargs ), ) def __await__( self, ) -> Generator[ Any, None, Union[UpdateResult, InsertOneResult, Optional["DocType"]] ]: """ Run the query :return: """ update_result = yield from self._update().__await__() if self.upsert_insert_doc is None: return update_result if ( self.response_type == UpdateResponse.UPDATE_RESULT and update_result is not None and update_result.matched_count == 0 ) or ( self.response_type != UpdateResponse.UPDATE_RESULT and update_result is None ): return ( yield from self.document_model.insert_one( document=self.upsert_insert_doc, session=self.session, bulk_writer=self.bulk_writer, ).__await__() ) return update_result python-beanie-1.29.0/beanie/odm/registry.py000066400000000000000000000015461473701376500206140ustar00rootroot00000000000000from typing import Dict, ForwardRef, Type, Union from pydantic import BaseModel class DocsRegistry: _registry: Dict[str, Type[BaseModel]] = {} @classmethod def register(cls, name: str, doc_type: Type[BaseModel]): cls._registry[name] = doc_type @classmethod def get(cls, name: str) -> Type[BaseModel]: return cls._registry[name] @classmethod def evaluate_fr(cls, forward_ref: Union[ForwardRef, Type]): """ Evaluate forward ref :param forward_ref: ForwardRef - forward ref to evaluate :return: Type[BaseModel] - class of the forward ref """ if ( isinstance(forward_ref, ForwardRef) and forward_ref.__forward_arg__ in cls._registry ): return cls._registry[forward_ref.__forward_arg__] else: return forward_ref python-beanie-1.29.0/beanie/odm/settings/000077500000000000000000000000001473701376500202245ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/settings/__init__.py000066400000000000000000000000001473701376500223230ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/settings/base.py000066400000000000000000000020161473701376500215070ustar00rootroot00000000000000from datetime import timedelta from typing import Any, Dict, Optional, Type from motor.motor_asyncio import AsyncIOMotorCollection, AsyncIOMotorDatabase from pydantic import BaseModel, Field from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 if IS_PYDANTIC_V2: from pydantic import ConfigDict class ItemSettings(BaseModel): name: Optional[str] = None use_cache: bool = False cache_capacity: int = 32 cache_expiration_time: timedelta = timedelta(minutes=10) bson_encoders: Dict[Any, Any] = Field(default_factory=dict) projection: Optional[Dict[str, Any]] = None motor_db: Optional[AsyncIOMotorDatabase] = None motor_collection: Optional[AsyncIOMotorCollection] = None union_doc: Optional[Type] = None union_doc_alias: Optional[str] = None class_id: str = "_class_id" is_root: bool = False if IS_PYDANTIC_V2: model_config = ConfigDict( arbitrary_types_allowed=True, ) else: class Config: arbitrary_types_allowed = True python-beanie-1.29.0/beanie/odm/settings/document.py000066400000000000000000000021301473701376500224100ustar00rootroot00000000000000from typing import List, Optional from pydantic import Field from beanie.odm.fields import IndexModelField from beanie.odm.settings.base import ItemSettings from beanie.odm.settings.timeseries import TimeSeriesConfig from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 if IS_PYDANTIC_V2: from pydantic import ConfigDict class DocumentSettings(ItemSettings): use_state_management: bool = False state_management_replace_objects: bool = False state_management_save_previous: bool = False validate_on_save: bool = False use_revision: bool = False single_root_inheritance: bool = False indexes: List[IndexModelField] = Field(default_factory=list) merge_indexes: bool = False timeseries: Optional[TimeSeriesConfig] = None lazy_parsing: bool = False keep_nulls: bool = True max_nesting_depths_per_field: dict = Field(default_factory=dict) max_nesting_depth: int = 3 if IS_PYDANTIC_V2: model_config = ConfigDict( arbitrary_types_allowed=True, ) else: class Config: arbitrary_types_allowed = True python-beanie-1.29.0/beanie/odm/settings/timeseries.py000066400000000000000000000025611473701376500227530ustar00rootroot00000000000000from enum import Enum from typing import Any, Dict, Optional from pydantic import BaseModel class Granularity(str, Enum): """ Time Series Granuality """ seconds = "seconds" minutes = "minutes" hours = "hours" class TimeSeriesConfig(BaseModel): """ Time Series Collection config """ time_field: str meta_field: Optional[str] = None granularity: Optional[Granularity] = None bucket_max_span_seconds: Optional[int] = None bucket_rounding_second: Optional[int] = None expire_after_seconds: Optional[int] = None def build_query(self, collection_name: str) -> Dict[str, Any]: res: Dict[str, Any] = {"name": collection_name} timeseries: Dict[str, Any] = {"timeField": self.time_field} if self.meta_field is not None: timeseries["metaField"] = self.meta_field if self.granularity is not None: timeseries["granularity"] = self.granularity if self.bucket_max_span_seconds is not None: timeseries["bucketMaxSpanSeconds"] = self.bucket_max_span_seconds if self.bucket_rounding_second is not None: timeseries["bucketRoundingSeconds"] = self.bucket_rounding_second res["timeseries"] = timeseries if self.expire_after_seconds is not None: res["expireAfterSeconds"] = self.expire_after_seconds return res python-beanie-1.29.0/beanie/odm/settings/union_doc.py000066400000000000000000000001361473701376500225530ustar00rootroot00000000000000from beanie.odm.settings.base import ItemSettings class UnionDocSettings(ItemSettings): ... python-beanie-1.29.0/beanie/odm/settings/view.py000066400000000000000000000005101473701376500215440ustar00rootroot00000000000000from typing import Any, Dict, List, Type, Union from pydantic import Field from beanie.odm.settings.base import ItemSettings class ViewSettings(ItemSettings): source: Union[str, Type] pipeline: List[Dict[str, Any]] max_nesting_depths_per_field: dict = Field(default_factory=dict) max_nesting_depth: int = 3 python-beanie-1.29.0/beanie/odm/union_doc.py000066400000000000000000000056061473701376500207220ustar00rootroot00000000000000from typing import Any, ClassVar, Dict, Optional, Type, TypeVar from motor.motor_asyncio import AsyncIOMotorClientSession from beanie.exceptions import UnionDocNotInited from beanie.odm.bulk import BulkWriter from beanie.odm.interfaces.aggregate import AggregateInterface from beanie.odm.interfaces.detector import DetectionInterface, ModelType from beanie.odm.interfaces.find import FindInterface from beanie.odm.interfaces.getters import OtherGettersInterface from beanie.odm.settings.union_doc import UnionDocSettings UnionDocType = TypeVar("UnionDocType", bound="UnionDoc") class UnionDoc( FindInterface, AggregateInterface, OtherGettersInterface, DetectionInterface, ): _document_models: ClassVar[Optional[Dict[str, Type]]] = None _is_inited: ClassVar[bool] = False _settings: ClassVar[UnionDocSettings] @classmethod def get_settings(cls) -> UnionDocSettings: return cls._settings @classmethod def register_doc(cls, name: str, doc_model: Type): if cls._document_models is None: cls._document_models = {} if cls._is_inited is False: raise UnionDocNotInited cls._document_models[name] = doc_model return cls.get_settings().name @classmethod def get_model_type(cls) -> ModelType: return ModelType.UnionDoc @classmethod def bulk_writer( cls, session: Optional[AsyncIOMotorClientSession] = None, ordered: bool = True, bypass_document_validation: bool = False, comment: Optional[Any] = None, ) -> BulkWriter: """ Returns a BulkWriter instance for handling bulk write operations. :param session: ClientSession The session instance used for transactional operations. :param ordered: bool If ``True`` (the default), requests will be performed on the server serially, in the order provided. If an error occurs, all remaining operations are aborted. If ``False``, requests will be performed on the server in arbitrary order, possibly in parallel, and all operations will be attempted. :param bypass_document_validation: bool, optional If ``True``, allows the write to opt-out of document-level validation. Default is ``False``. :param comment: str, optional A user-provided comment to attach to the BulkWriter. :returns: BulkWriter An instance of BulkWriter configured with the provided settings. Example Usage: -------------- This method is typically used within an asynchronous context manager. .. code-block:: python async with Document.bulk_writer(ordered=True) as bulk: await Document.insert_one(Document(field="value"), bulk_writer=bulk) """ return BulkWriter( session, ordered, cls, bypass_document_validation, comment ) python-beanie-1.29.0/beanie/odm/utils/000077500000000000000000000000001473701376500175245ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/utils/__init__.py000066400000000000000000000000001473701376500216230ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/utils/dump.py000066400000000000000000000024001473701376500210370ustar00rootroot00000000000000from typing import TYPE_CHECKING, Optional, Set from beanie.odm.utils.encoder import Encoder if TYPE_CHECKING: from beanie.odm.documents import Document def get_dict( document: "Document", to_db: bool = False, exclude: Optional[Set[str]] = None, keep_nulls: bool = True, ): if exclude is None: exclude = set() if document.id is None: exclude.add("_id") if not document.get_settings().use_revision: exclude.add("revision_id") encoder = Encoder(exclude=exclude, to_db=to_db, keep_nulls=keep_nulls) return encoder.encode(document) def get_nulls( document: "Document", exclude: Optional[Set[str]] = None, ): dictionary = get_dict(document, exclude=exclude, keep_nulls=True) return filter_none(dictionary) def get_top_level_nones( document: "Document", exclude: Optional[Set[str]] = None, ): dictionary = get_dict(document, exclude=exclude, keep_nulls=True) return {k: v for k, v in dictionary.items() if v is None} def filter_none(d): result = {} for k, v in d.items(): if isinstance(v, dict): filtered = filter_none(v) if filtered: result[k] = filtered elif v is None: result[k] = v return result python-beanie-1.29.0/beanie/odm/utils/encoder.py000066400000000000000000000122151473701376500215160ustar00rootroot00000000000000import dataclasses as dc import datetime import decimal import enum import ipaddress import operator import pathlib import re import uuid from enum import Enum from typing import ( Any, Callable, Container, Iterable, Mapping, MutableMapping, Optional, Tuple, ) import bson import pydantic import beanie from beanie.odm.fields import Link, LinkTypes from beanie.odm.utils.pydantic import ( IS_PYDANTIC_V2, IS_PYDANTIC_V2_10, get_model_fields, ) SingleArgCallable = Callable[[Any], Any] DEFAULT_CUSTOM_ENCODERS: MutableMapping[type, SingleArgCallable] = { ipaddress.IPv4Address: str, ipaddress.IPv4Interface: str, ipaddress.IPv4Network: str, ipaddress.IPv6Address: str, ipaddress.IPv6Interface: str, ipaddress.IPv6Network: str, pathlib.PurePath: str, pydantic.SecretBytes: pydantic.SecretBytes.get_secret_value, pydantic.SecretStr: pydantic.SecretStr.get_secret_value, datetime.date: lambda d: datetime.datetime.combine(d, datetime.time.min), datetime.timedelta: operator.methodcaller("total_seconds"), enum.Enum: operator.attrgetter("value"), Link: operator.attrgetter("ref"), bytes: bson.Binary, decimal.Decimal: bson.Decimal128, uuid.UUID: bson.Binary.from_uuid, re.Pattern: bson.Regex.from_native, } if IS_PYDANTIC_V2: from pydantic_core import Url DEFAULT_CUSTOM_ENCODERS[Url] = str if IS_PYDANTIC_V2_10: from pydantic import AnyUrl DEFAULT_CUSTOM_ENCODERS[AnyUrl] = str BSON_SCALAR_TYPES = ( type(None), str, int, float, datetime.datetime, bson.Binary, bson.DBRef, bson.Decimal128, bson.MaxKey, bson.MinKey, bson.ObjectId, bson.Regex, ) @dc.dataclass class Encoder: """ BSON encoding class """ exclude: Container[str] = frozenset() custom_encoders: Mapping[type, SingleArgCallable] = dc.field( default_factory=dict ) to_db: bool = False keep_nulls: bool = True def _encode_document(self, obj: "beanie.Document") -> Mapping[str, Any]: obj.parse_store() settings = obj.get_settings() obj_dict = {} if settings.union_doc is not None: obj_dict[settings.class_id] = ( settings.union_doc_alias or obj.__class__.__name__ ) if obj._class_id: obj_dict[settings.class_id] = obj._class_id link_fields = obj.get_link_fields() or {} sub_encoder = Encoder( # don't propagate self.exclude to subdocuments custom_encoders=settings.bson_encoders, to_db=self.to_db, keep_nulls=self.keep_nulls, ) for key, value in self._iter_model_items(obj): if key in link_fields: link_type = link_fields[key].link_type if link_type in (LinkTypes.DIRECT, LinkTypes.OPTIONAL_DIRECT): if value is not None: value = value.to_ref() elif link_type in (LinkTypes.LIST, LinkTypes.OPTIONAL_LIST): if value is not None: value = [link.to_ref() for link in value] elif self.to_db: continue obj_dict[key] = sub_encoder.encode(value) return obj_dict def encode(self, obj: Any) -> Any: if self.custom_encoders: encoder = _get_encoder(obj, self.custom_encoders) if encoder is not None: return encoder(obj) if isinstance(obj, BSON_SCALAR_TYPES): return obj encoder = _get_encoder(obj, DEFAULT_CUSTOM_ENCODERS) if encoder is not None: return encoder(obj) if isinstance(obj, beanie.Document): return self._encode_document(obj) if IS_PYDANTIC_V2 and isinstance(obj, pydantic.RootModel): return self.encode(obj.root) if isinstance(obj, pydantic.BaseModel): items = self._iter_model_items(obj) return {key: self.encode(value) for key, value in items} if isinstance(obj, Mapping): return { key if isinstance(key, Enum) else str(key): self.encode(value) for key, value in obj.items() } if isinstance(obj, Iterable): return [self.encode(value) for value in obj] raise ValueError(f"Cannot encode {obj!r}") def _iter_model_items( self, obj: pydantic.BaseModel ) -> Iterable[Tuple[str, Any]]: exclude, keep_nulls = self.exclude, self.keep_nulls get_model_field = get_model_fields(obj).get for key, value in obj.__iter__(): field_info = get_model_field(key) if field_info is not None: key = field_info.alias or key if key not in exclude and (value is not None or keep_nulls): yield key, value def _get_encoder( obj: Any, custom_encoders: Mapping[type, SingleArgCallable] ) -> Optional[SingleArgCallable]: encoder = custom_encoders.get(type(obj)) if encoder is not None: return encoder for cls, encoder in custom_encoders.items(): if isinstance(obj, cls): return encoder return None python-beanie-1.29.0/beanie/odm/utils/find.py000066400000000000000000000372741473701376500210330ustar00rootroot00000000000000from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type from beanie.odm.fields import LinkInfo, LinkTypes if TYPE_CHECKING: from beanie import Document # TODO: check if this is the most efficient way for # appending subqueries to the queries var def construct_lookup_queries( cls: Type["Document"], nesting_depth: Optional[int] = None, nesting_depths_per_field: Optional[Dict[str, int]] = None, ) -> List[Dict[str, Any]]: queries: List = [] link_fields = cls.get_link_fields() if link_fields is not None: for link_info in link_fields.values(): final_nesting_depth = ( nesting_depths_per_field.get(link_info.field_name, None) if nesting_depths_per_field is not None else None ) if final_nesting_depth is None: final_nesting_depth = nesting_depth construct_query( link_info=link_info, queries=queries, database_major_version=cls._database_major_version, current_depth=final_nesting_depth, ) return queries def construct_query( link_info: LinkInfo, queries: List, database_major_version: int, current_depth: Optional[int] = None, ): if link_info.is_fetchable is False or ( current_depth is not None and current_depth <= 0 ): return if link_info.link_type in [ LinkTypes.DIRECT, LinkTypes.OPTIONAL_DIRECT, ]: if database_major_version >= 5 or link_info.nested_links is None: lookup_steps = [ { "$lookup": { "from": link_info.document_class.get_motor_collection().name, # type: ignore "localField": f"{link_info.lookup_field_name}.$id", "foreignField": "_id", "as": f"_link_{link_info.field_name}", } }, { "$unwind": { "path": f"$_link_{link_info.field_name}", "preserveNullAndEmptyArrays": True, } }, { "$addFields": { link_info.field_name: { "$cond": { "if": { "$ifNull": [ f"$_link_{link_info.field_name}", False, ] }, "then": f"$_link_{link_info.field_name}", "else": f"${link_info.field_name}", } } } }, {"$project": {f"_link_{link_info.field_name}": 0}}, ] # type: ignore new_depth = ( current_depth - 1 if current_depth is not None else None ) if link_info.nested_links is not None: lookup_steps[0]["$lookup"]["pipeline"] = [] # type: ignore for nested_link in link_info.nested_links: construct_query( link_info=link_info.nested_links[nested_link], queries=lookup_steps[0]["$lookup"]["pipeline"], # type: ignore database_major_version=database_major_version, current_depth=new_depth, ) queries += lookup_steps else: lookup_steps = [ { "$lookup": { "from": link_info.document_class.get_motor_collection().name, # type: ignore "let": { "link_id": f"${link_info.lookup_field_name}.$id" }, "as": f"_link_{link_info.field_name}", "pipeline": [ { "$match": { "$expr": {"$eq": ["$_id", "$$link_id"]} } }, ], } }, { "$unwind": { "path": f"$_link_{link_info.field_name}", "preserveNullAndEmptyArrays": True, } }, { "$addFields": { link_info.field_name: { "$cond": { "if": { "$ifNull": [ f"$_link_{link_info.field_name}", False, ] }, "then": f"$_link_{link_info.field_name}", "else": f"${link_info.field_name}", } } } }, {"$project": {f"_link_{link_info.field_name}": 0}}, ] new_depth = ( current_depth - 1 if current_depth is not None else None ) for nested_link in link_info.nested_links: construct_query( link_info=link_info.nested_links[nested_link], queries=lookup_steps[0]["$lookup"]["pipeline"], # type: ignore database_major_version=database_major_version, current_depth=new_depth, ) queries += lookup_steps elif link_info.link_type in [ LinkTypes.BACK_DIRECT, LinkTypes.OPTIONAL_BACK_DIRECT, ]: if database_major_version >= 5 or link_info.nested_links is None: lookup_steps = [ { "$lookup": { "from": link_info.document_class.get_motor_collection().name, # type: ignore "localField": "_id", "foreignField": f"{link_info.lookup_field_name}.$id", "as": f"_link_{link_info.field_name}", } }, { "$unwind": { "path": f"$_link_{link_info.field_name}", "preserveNullAndEmptyArrays": True, } }, { "$addFields": { link_info.field_name: { "$cond": { "if": { "$ifNull": [ f"$_link_{link_info.field_name}", False, ] }, "then": f"$_link_{link_info.field_name}", "else": f"${link_info.field_name}", } } } }, {"$project": {f"_link_{link_info.field_name}": 0}}, ] # type: ignore new_depth = ( current_depth - 1 if current_depth is not None else None ) if link_info.nested_links is not None: lookup_steps[0]["$lookup"]["pipeline"] = [] # type: ignore for nested_link in link_info.nested_links: construct_query( link_info=link_info.nested_links[nested_link], queries=lookup_steps[0]["$lookup"]["pipeline"], # type: ignore database_major_version=database_major_version, current_depth=new_depth, ) queries += lookup_steps else: lookup_steps = [ { "$lookup": { "from": link_info.document_class.get_motor_collection().name, # type: ignore "let": {"link_id": "$_id"}, "as": f"_link_{link_info.field_name}", "pipeline": [ { "$match": { "$expr": { "$eq": [ f"${link_info.lookup_field_name}.$id", "$$link_id", ] } } }, ], } }, { "$unwind": { "path": f"$_link_{link_info.field_name}", "preserveNullAndEmptyArrays": True, } }, { "$addFields": { link_info.field_name: { "$cond": { "if": { "$ifNull": [ f"$_link_{link_info.field_name}", False, ] }, "then": f"$_link_{link_info.field_name}", "else": f"${link_info.field_name}", } } } }, {"$project": {f"_link_{link_info.field_name}": 0}}, ] new_depth = ( current_depth - 1 if current_depth is not None else None ) for nested_link in link_info.nested_links: construct_query( link_info=link_info.nested_links[nested_link], queries=lookup_steps[0]["$lookup"]["pipeline"], # type: ignore database_major_version=database_major_version, current_depth=new_depth, ) queries += lookup_steps elif link_info.link_type in [ LinkTypes.LIST, LinkTypes.OPTIONAL_LIST, ]: if database_major_version >= 5 or link_info.nested_links is None: queries.append( { "$lookup": { "from": link_info.document_class.get_motor_collection().name, # type: ignore "localField": f"{link_info.lookup_field_name}.$id", "foreignField": "_id", "as": link_info.field_name, } } ) new_depth = ( current_depth - 1 if current_depth is not None else None ) if link_info.nested_links is not None: queries[-1]["$lookup"]["pipeline"] = [] for nested_link in link_info.nested_links: construct_query( link_info=link_info.nested_links[nested_link], queries=queries[-1]["$lookup"]["pipeline"], database_major_version=database_major_version, current_depth=new_depth, ) else: lookup_step = { "$lookup": { "from": link_info.document_class.get_motor_collection().name, # type: ignore "let": {"link_id": f"${link_info.lookup_field_name}.$id"}, "as": link_info.field_name, "pipeline": [ {"$match": {"$expr": {"$in": ["$_id", "$$link_id"]}}}, ], } } new_depth = ( current_depth - 1 if current_depth is not None else None ) for nested_link in link_info.nested_links: construct_query( link_info=link_info.nested_links[nested_link], queries=lookup_step["$lookup"]["pipeline"], database_major_version=database_major_version, current_depth=new_depth, ) queries.append(lookup_step) elif link_info.link_type in [ LinkTypes.BACK_LIST, LinkTypes.OPTIONAL_BACK_LIST, ]: if database_major_version >= 5 or link_info.nested_links is None: queries.append( { "$lookup": { "from": link_info.document_class.get_motor_collection().name, # type: ignore "localField": "_id", "foreignField": f"{link_info.lookup_field_name}.$id", "as": link_info.field_name, } } ) new_depth = ( current_depth - 1 if current_depth is not None else None ) if link_info.nested_links is not None: queries[-1]["$lookup"]["pipeline"] = [] for nested_link in link_info.nested_links: construct_query( link_info=link_info.nested_links[nested_link], queries=queries[-1]["$lookup"]["pipeline"], database_major_version=database_major_version, current_depth=new_depth, ) else: lookup_step = { "$lookup": { "from": link_info.document_class.get_motor_collection().name, # type: ignore "let": {"link_id": "$_id"}, "as": link_info.field_name, "pipeline": [ { "$match": { "$expr": { "$in": [ "$$link_id", f"${link_info.lookup_field_name}.$id", ] } } } ], } } new_depth = ( current_depth - 1 if current_depth is not None else None ) for nested_link in link_info.nested_links: construct_query( link_info=link_info.nested_links[nested_link], queries=lookup_step["$lookup"]["pipeline"], database_major_version=database_major_version, current_depth=new_depth, ) queries.append(lookup_step) return queries def split_text_query( query: Dict[str, Any], ) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]: """Divide query into text and non-text matches :param query: Dict[str, Any] - query dict :return: Tuple[Dict[str, Any], Dict[str, Any]] - text and non-text queries, respectively """ root_text_query_args: Dict[str, Any] = query.get("$text", None) root_non_text_queries: Dict[str, Any] = { k: v for k, v in query.items() if k not in {"$text", "$and"} } text_queries: List[Dict[str, Any]] = ( [{"$text": root_text_query_args}] if root_text_query_args else [] ) non_text_queries: List[Dict[str, Any]] = ( [root_non_text_queries] if root_non_text_queries else [] ) for match_case in query.get("$and", []): if "$text" in match_case: text_queries.append(match_case) else: non_text_queries.append(match_case) return text_queries, non_text_queries python-beanie-1.29.0/beanie/odm/utils/general.py000066400000000000000000000000001473701376500215010ustar00rootroot00000000000000python-beanie-1.29.0/beanie/odm/utils/init.py000066400000000000000000000653541473701376500210560ustar00rootroot00000000000000import asyncio import sys from typing_extensions import Sequence, get_args, get_origin from beanie.odm.utils.pydantic import ( IS_PYDANTIC_V2, get_extra_field_info, get_model_fields, parse_model, ) from beanie.odm.utils.typing import get_index_attributes if sys.version_info >= (3, 10): from types import UnionType as TypesUnionType else: TypesUnionType = () import importlib import inspect from typing import ( # type: ignore List, Optional, Type, Union, _GenericAlias, ) from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase from pydantic import BaseModel from pydantic.fields import FieldInfo from pymongo import IndexModel from beanie.exceptions import Deprecation, MongoDBVersionError from beanie.odm.actions import ActionRegistry from beanie.odm.cache import LRUCache from beanie.odm.documents import DocType, Document from beanie.odm.fields import ( BackLink, ExpressionField, Link, LinkInfo, LinkTypes, ) from beanie.odm.interfaces.detector import ModelType from beanie.odm.registry import DocsRegistry from beanie.odm.settings.document import DocumentSettings, IndexModelField from beanie.odm.settings.union_doc import UnionDocSettings from beanie.odm.settings.view import ViewSettings from beanie.odm.union_doc import UnionDoc, UnionDocType from beanie.odm.views import View class Output(BaseModel): class_name: str collection_name: str class Initializer: def __init__( self, database: AsyncIOMotorDatabase = None, connection_string: Optional[str] = None, document_models: Optional[ Sequence[ Union[Type["DocType"], Type["UnionDocType"], Type["View"], str] ] ] = None, allow_index_dropping: bool = False, recreate_views: bool = False, multiprocessing_mode: bool = False, skip_indexes: bool = False, ): """ Beanie initializer :param database: AsyncIOMotorDatabase - motor database instance :param connection_string: str - MongoDB connection string :param document_models: List[Union[Type[DocType], Type[UnionDocType], str]] - model classes or strings with dot separated paths :param allow_index_dropping: bool - if index dropping is allowed. Default False :param recreate_views: bool - if views should be recreated. Default False :param multiprocessing_mode: bool - if multiprocessing mode is on it will patch the motor client to use process's event loop. :param skip_indexes: bool - if you want to skip working with indexes. Default False :return: None """ self.inited_classes: List[Type] = [] self.allow_index_dropping = allow_index_dropping self.skip_indexes = skip_indexes self.recreate_views = recreate_views self.models_with_updated_forward_refs: List[Type[BaseModel]] = [] if (connection_string is None and database is None) or ( connection_string is not None and database is not None ): raise ValueError( "connection_string parameter or database parameter must be set" ) if document_models is None: raise ValueError("document_models parameter must be set") if connection_string is not None: database = AsyncIOMotorClient( connection_string ).get_default_database() self.database: AsyncIOMotorDatabase = database if multiprocessing_mode: self.database.client.get_io_loop = asyncio.get_running_loop sort_order = { ModelType.UnionDoc: 0, ModelType.Document: 1, ModelType.View: 2, } self.document_models: List[ Union[Type[DocType], Type[UnionDocType], Type[View]] ] = [ self.get_model(model) if isinstance(model, str) else model for model in document_models ] self.fill_docs_registry() self.document_models.sort( key=lambda val: sort_order[val.get_model_type()] ) def __await__(self): for model in self.document_models: yield from self.init_class(model).__await__() # General def fill_docs_registry(self): for model in self.document_models: module = inspect.getmodule(model) members = inspect.getmembers(module) for name, obj in members: if inspect.isclass(obj) and issubclass(obj, BaseModel): DocsRegistry.register(name, obj) @staticmethod def get_model(dot_path: str) -> Type["DocType"]: """ Get the model by the path in format bar.foo.Model :param dot_path: str - dot seprated path to the model :return: Type[DocType] - class of the model """ module_name, class_name = None, None try: module_name, class_name = dot_path.rsplit(".", 1) return getattr(importlib.import_module(module_name), class_name) except ValueError: raise ValueError( f"'{dot_path}' doesn't have '.' path, eg. path.to.your.model.class" ) except AttributeError: raise AttributeError( f"module '{module_name}' has no class called '{class_name}'" ) def init_settings( self, cls: Union[Type[Document], Type[View], Type[UnionDoc]] ): """ Init Settings :param cls: Union[Type[Document], Type[View], Type[UnionDoc]] - Class to init settings :return: None """ settings_class = getattr(cls, "Settings", None) settings_vars = {} if settings_class is not None: # get all attributes of the Settings subclass (including inherited ones) # without magic dunder methods settings_vars = { attr: getattr(settings_class, attr) for attr in dir(settings_class) if not attr.startswith("__") } if issubclass(cls, Document): cls._document_settings = parse_model( DocumentSettings, settings_vars ) if issubclass(cls, View): cls._settings = parse_model(ViewSettings, settings_vars) if issubclass(cls, UnionDoc): cls._settings = parse_model(UnionDocSettings, settings_vars) if not IS_PYDANTIC_V2: def update_forward_refs(self, cls: Type[BaseModel]): """ Update forward refs :param cls: Type[BaseModel] - class to update forward refs :return: None """ if cls not in self.models_with_updated_forward_refs: cls.update_forward_refs() self.models_with_updated_forward_refs.append(cls) # General. Relations def detect_link( self, field: FieldInfo, field_name: str ) -> Optional[LinkInfo]: """ It detects link and returns LinkInfo if any found. :param field: ModelField :return: Optional[LinkInfo] """ origin = get_origin(field.annotation) args = get_args(field.annotation) classes = [ Link, BackLink, ] for cls in classes: # Check if annotation is one of the custom classes if ( isinstance(field.annotation, _GenericAlias) and field.annotation.__origin__ is cls ): if cls is Link: return LinkInfo( field_name=field_name, lookup_field_name=field_name, document_class=DocsRegistry.evaluate_fr(args[0]), # type: ignore link_type=LinkTypes.DIRECT, ) if cls is BackLink: return LinkInfo( field_name=field_name, lookup_field_name=get_extra_field_info( field, "original_field" ), # type: ignore document_class=DocsRegistry.evaluate_fr(args[0]), # type: ignore link_type=LinkTypes.BACK_DIRECT, ) # Check if annotation is List[custom class] elif ( (origin is List or origin is list) and len(args) == 1 and isinstance(args[0], _GenericAlias) and args[0].__origin__ is cls ): if cls is Link: return LinkInfo( field_name=field_name, lookup_field_name=field_name, document_class=DocsRegistry.evaluate_fr( get_args(args[0])[0] ), # type: ignore link_type=LinkTypes.LIST, ) if cls is BackLink: return LinkInfo( field_name=field_name, lookup_field_name=get_extra_field_info( # type: ignore field, "original_field" ), document_class=DocsRegistry.evaluate_fr( get_args(args[0])[0] ), # type: ignore link_type=LinkTypes.BACK_LIST, ) # Check if annotation is Optional[custom class] or Optional[List[custom class]] elif ( (origin is Union or origin is TypesUnionType) and len(args) == 2 and type(None) in args ): if args[1] is type(None): optional = args[0] else: optional = args[1] optional_origin = get_origin(optional) optional_args = get_args(optional) if ( isinstance(optional, _GenericAlias) and optional.__origin__ is cls ): if cls is Link: return LinkInfo( field_name=field_name, lookup_field_name=field_name, document_class=DocsRegistry.evaluate_fr( optional_args[0] ), # type: ignore link_type=LinkTypes.OPTIONAL_DIRECT, ) if cls is BackLink: return LinkInfo( field_name=field_name, lookup_field_name=get_extra_field_info( field, "original_field" ), document_class=DocsRegistry.evaluate_fr( optional_args[0] ), # type: ignore link_type=LinkTypes.OPTIONAL_BACK_DIRECT, ) elif ( (optional_origin is List or optional_origin is list) and len(optional_args) == 1 and isinstance(optional_args[0], _GenericAlias) and optional_args[0].__origin__ is cls ): if cls is Link: return LinkInfo( field_name=field_name, lookup_field_name=field_name, document_class=DocsRegistry.evaluate_fr( get_args(optional_args[0])[0] ), # type: ignore link_type=LinkTypes.OPTIONAL_LIST, ) if cls is BackLink: return LinkInfo( field_name=field_name, lookup_field_name=get_extra_field_info( field, "original_field" ), document_class=DocsRegistry.evaluate_fr( get_args(optional_args[0])[0] ), # type: ignore link_type=LinkTypes.OPTIONAL_BACK_LIST, ) return None def check_nested_links(self, link_info: LinkInfo, current_depth: int): if current_depth == 1: return for k, v in get_model_fields(link_info.document_class).items(): nested_link_info = self.detect_link(v, k) if nested_link_info is None: continue if link_info.nested_links is None: link_info.nested_links = {} link_info.nested_links[k] = nested_link_info new_depth = ( current_depth - 1 if current_depth is not None else None ) self.check_nested_links(nested_link_info, current_depth=new_depth) # Document @staticmethod def set_default_class_vars(cls: Type[Document]): """ Set default class variables. :param cls: Union[Type[Document], Type[View], Type[UnionDoc]] - Class to init settings :return: """ cls._children = dict() cls._parent = None cls._inheritance_inited = False cls._class_id = None cls._link_fields = None @staticmethod def init_cache(cls) -> None: """ Init model's cache :return: None """ if cls.get_settings().use_cache: cls._cache = LRUCache( capacity=cls.get_settings().cache_capacity, expiration_time=cls.get_settings().cache_expiration_time, ) def init_document_fields(self, cls) -> None: """ Init class fields :return: None """ if not IS_PYDANTIC_V2: self.update_forward_refs(cls) if cls._link_fields is None: cls._link_fields = {} for k, v in get_model_fields(cls).items(): path = v.alias or k setattr(cls, k, ExpressionField(path)) link_info = self.detect_link(v, k) depth_level = cls.get_settings().max_nesting_depths_per_field.get( k, None ) if depth_level is None: depth_level = cls.get_settings().max_nesting_depth if link_info is not None: if depth_level > 0 or depth_level is None: cls._link_fields[k] = link_info self.check_nested_links( link_info, current_depth=depth_level ) elif depth_level <= 0: link_info.is_fetchable = False cls._link_fields[k] = link_info cls._check_hidden_fields() @staticmethod def init_actions(cls): """ Init event-based actions """ ActionRegistry.clean_actions(cls) for attr in dir(cls): f = getattr(cls, attr) if inspect.isfunction(f): if hasattr(f, "has_action"): ActionRegistry.add_action( document_class=cls, event_types=f.event_types, # type: ignore action_direction=f.action_direction, # type: ignore funct=f, ) async def init_document_collection(self, cls): """ Init collection for the Document-based class :param cls: :return: """ cls.set_database(self.database) document_settings = cls.get_settings() # register in the Union Doc if document_settings.union_doc is not None: name = cls.get_settings().name or cls.__name__ document_settings.name = document_settings.union_doc.register_doc( name, cls ) document_settings.union_doc_alias = name # set a name if not document_settings.name: document_settings.name = cls.__name__ # check mongodb version fits if ( document_settings.timeseries is not None and cls._database_major_version < 5 ): raise MongoDBVersionError( "Timeseries are supported by MongoDB version 5 and higher" ) # create motor collection if ( document_settings.timeseries is not None and document_settings.name not in await self.database.list_collection_names( authorizedCollections=True, nameOnly=True ) ): collection = await self.database.create_collection( **document_settings.timeseries.build_query( document_settings.name ) ) else: collection = self.database[document_settings.name] cls.set_collection(collection) async def init_indexes(self, cls, allow_index_dropping: bool = False): """ Async indexes initializer """ collection = cls.get_motor_collection() document_settings = cls.get_settings() index_information = await collection.index_information() old_indexes = IndexModelField.from_motor_index_information( index_information ) new_indexes = [] # Indexed field wrapped with Indexed() indexed_fields = ( (k, fvalue, get_index_attributes(fvalue)) for k, fvalue in get_model_fields(cls).items() ) found_indexes = [ IndexModelField( IndexModel( [ ( fvalue.alias or k, indexed_attrs[0], ) ], **indexed_attrs[1], ) ) for k, fvalue, indexed_attrs in indexed_fields if indexed_attrs is not None ] if document_settings.merge_indexes: result: List[IndexModelField] = [] for subclass in reversed(cls.mro()): if issubclass(subclass, Document) and not subclass == Document: if ( subclass not in self.inited_classes and not subclass == cls ): await self.init_class(subclass) if subclass.get_settings().indexes: result = IndexModelField.merge_indexes( result, subclass.get_settings().indexes ) found_indexes = IndexModelField.merge_indexes( found_indexes, result ) else: if document_settings.indexes: found_indexes = IndexModelField.merge_indexes( found_indexes, document_settings.indexes ) new_indexes += found_indexes # delete indexes # Only drop indexes if the user specifically allows for it if allow_index_dropping: for index in IndexModelField.list_difference( old_indexes, new_indexes ): await collection.drop_index(index.name) # create indices if found_indexes: new_indexes += await collection.create_indexes( IndexModelField.list_to_index_model(new_indexes) ) async def init_document(self, cls: Type[Document]) -> Optional[Output]: """ Init Document-based class :param cls: :return: """ if cls is Document: return None # get db version build_info = await self.database.command({"buildInfo": 1}) mongo_version = build_info["version"] cls._database_major_version = int(mongo_version.split(".")[0]) if cls not in self.inited_classes: self.set_default_class_vars(cls) self.init_settings(cls) bases = [b for b in cls.__bases__ if issubclass(b, Document)] if len(bases) > 1: return None parent = bases[0] output = await self.init_document(parent) if cls.get_settings().is_root and ( parent is Document or not parent.get_settings().is_root ): if cls.get_collection_name() is None: cls.set_collection_name(cls.__name__) output = Output( class_name=cls.__name__, collection_name=cls.get_collection_name(), ) cls._class_id = cls.__name__ cls._inheritance_inited = True elif output is not None: output.class_name = f"{output.class_name}.{cls.__name__}" cls._class_id = output.class_name cls.set_collection_name(output.collection_name) parent.add_child(cls._class_id, cls) cls._parent = parent cls._inheritance_inited = True await self.init_document_collection(cls) if not self.skip_indexes: await self.init_indexes(cls, self.allow_index_dropping) self.init_document_fields(cls) self.init_cache(cls) self.init_actions(cls) self.inited_classes.append(cls) return output else: if cls._inheritance_inited is True: return Output( class_name=cls._class_id, collection_name=cls.get_collection_name(), ) else: return None # Views def init_view_fields(self, cls) -> None: """ Init class fields :return: None """ if cls._link_fields is None: cls._link_fields = {} for k, v in get_model_fields(cls).items(): path = v.alias or k setattr(cls, k, ExpressionField(path)) link_info = self.detect_link(v, k) depth_level = cls.get_settings().max_nesting_depths_per_field.get( k, None ) if depth_level is None: depth_level = cls.get_settings().max_nesting_depth if link_info is not None: if depth_level > 0: cls._link_fields[k] = link_info self.check_nested_links( link_info, current_depth=depth_level ) elif depth_level <= 0: link_info.is_fetchable = False cls._link_fields[k] = link_info def init_view_collection(self, cls): """ Init collection for View :param cls: :return: """ view_settings = cls.get_settings() if view_settings.name is None: view_settings.name = cls.__name__ if inspect.isclass(view_settings.source): view_settings.source = view_settings.source.get_collection_name() view_settings.motor_db = self.database view_settings.motor_collection = self.database[view_settings.name] async def init_view(self, cls: Type[View]): """ Init View-based class :param cls: :return: """ self.init_settings(cls) self.init_view_collection(cls) self.init_view_fields(cls) self.init_cache(cls) collection_names = await self.database.list_collection_names( authorizedCollections=True, nameOnly=True ) if self.recreate_views or cls._settings.name not in collection_names: if cls._settings.name in collection_names: await cls.get_motor_collection().drop() await self.database.command( { "create": cls.get_settings().name, "viewOn": cls.get_settings().source, "pipeline": cls.get_settings().pipeline, } ) # Union Doc async def init_union_doc(self, cls: Type[UnionDoc]): """ Init Union Doc based class :param cls: :return: """ self.init_settings(cls) if cls._settings.name is None: cls._settings.name = cls.__name__ cls._settings.motor_db = self.database cls._settings.motor_collection = self.database[cls._settings.name] cls._is_inited = True # Deprecations @staticmethod def check_deprecations( cls: Union[Type[Document], Type[View], Type[UnionDoc]], ): if hasattr(cls, "Collection"): raise Deprecation( "Collection inner class is not supported more. " "Please use Settings instead. " "https://beanie-odm.dev/tutorial/defining-a-document/#settings" ) # Final async def init_class( self, cls: Union[Type[Document], Type[View], Type[UnionDoc]] ): """ Init Document, View or UnionDoc based class. :param cls: :return: """ self.check_deprecations(cls) if issubclass(cls, Document): await self.init_document(cls) if issubclass(cls, View): await self.init_view(cls) if issubclass(cls, UnionDoc): await self.init_union_doc(cls) if hasattr(cls, "custom_init"): await cls.custom_init() # type: ignore async def init_beanie( database: AsyncIOMotorDatabase = None, connection_string: Optional[str] = None, document_models: Optional[ Sequence[Union[Type[Document], Type[UnionDoc], Type["View"], str]] ] = None, allow_index_dropping: bool = False, recreate_views: bool = False, multiprocessing_mode: bool = False, skip_indexes: bool = False, ): """ Beanie initialization :param database: AsyncIOMotorDatabase - motor database instance :param connection_string: str - MongoDB connection string :param document_models: List[Union[Type[DocType], Type[UnionDocType], str]] - model classes or strings with dot separated paths :param allow_index_dropping: bool - if index dropping is allowed. Default False :param recreate_views: bool - if views should be recreated. Default False :param multiprocessing_mode: bool - if multiprocessing mode is on it will patch the motor client to use process's event loop. Default False :param skip_indexes: bool - if you want to skip working with the indexes. Default False :return: None """ await Initializer( database=database, connection_string=connection_string, document_models=document_models, allow_index_dropping=allow_index_dropping, recreate_views=recreate_views, multiprocessing_mode=multiprocessing_mode, skip_indexes=skip_indexes, ) python-beanie-1.29.0/beanie/odm/utils/parsing.py000066400000000000000000000117571473701376500215540ustar00rootroot00000000000000from typing import TYPE_CHECKING, Any, Dict, Type, Union from pydantic import BaseModel from beanie.exceptions import ( ApplyChangesException, DocWasNotRegisteredInUnionClass, UnionHasNoRegisteredDocs, ) from beanie.odm.interfaces.detector import ModelType from beanie.odm.utils.pydantic import get_config_value, parse_model if TYPE_CHECKING: from beanie.odm.documents import Document def merge_models(left: BaseModel, right: BaseModel) -> None: """ Merge two models :param left: left model :param right: right model :return: None """ from beanie.odm.fields import Link for k, right_value in right.__iter__(): left_value = getattr(left, k) if isinstance(right_value, BaseModel) and isinstance( left_value, BaseModel ): if get_config_value(left_value, "frozen"): left.__setattr__(k, right_value) else: merge_models(left_value, right_value) continue if isinstance(right_value, list): links_found = False for i in right_value: if isinstance(i, Link): links_found = True break if links_found: continue left.__setattr__(k, right_value) elif not isinstance(right_value, Link): left.__setattr__(k, right_value) def apply_changes( changes: Dict[str, Any], target: Union[BaseModel, Dict[str, Any]] ): for key, value in changes.items(): if "." in key: key_parts = key.split(".") current_target = target try: for part in key_parts[:-1]: if isinstance(current_target, dict): current_target = current_target[part] elif isinstance(current_target, BaseModel): current_target = getattr(current_target, part) else: raise ApplyChangesException( f"Unexpected type of target: {type(target)}" ) final_key = key_parts[-1] if isinstance(current_target, dict): current_target[final_key] = value elif isinstance(current_target, BaseModel): setattr(current_target, final_key, value) else: raise ApplyChangesException( f"Unexpected type of target: {type(target)}" ) except (KeyError, AttributeError) as e: raise ApplyChangesException( f"Failed to apply change for key '{key}': {e}" ) else: if isinstance(target, dict): target[key] = value elif isinstance(target, BaseModel): setattr(target, key, value) else: raise ApplyChangesException( f"Unexpected type of target: {type(target)}" ) def save_state(item: BaseModel): if hasattr(item, "_save_state"): item._save_state() # type: ignore def parse_obj( model: Union[Type[BaseModel], Type["Document"]], data: Any, lazy_parse: bool = False, ) -> BaseModel: if ( hasattr(model, "get_model_type") and model.get_model_type() == ModelType.UnionDoc # type: ignore ): if model._document_models is None: # type: ignore raise UnionHasNoRegisteredDocs if isinstance(data, dict): class_name = data[model.get_settings().class_id] # type: ignore else: class_name = data._class_id if class_name not in model._document_models: # type: ignore raise DocWasNotRegisteredInUnionClass return parse_obj( model=model._document_models[class_name], # type: ignore data=data, lazy_parse=lazy_parse, ) # type: ignore if ( hasattr(model, "get_model_type") and model.get_model_type() == ModelType.Document # type: ignore and model._inheritance_inited # type: ignore ): if isinstance(data, dict): class_name = data.get(model.get_settings().class_id) # type: ignore elif hasattr(data, model.get_settings().class_id): # type: ignore class_name = data._class_id else: class_name = None if model._children and class_name in model._children: # type: ignore return parse_obj( model=model._children[class_name], # type: ignore data=data, lazy_parse=lazy_parse, ) # type: ignore if ( lazy_parse and hasattr(model, "get_model_type") and model.get_model_type() == ModelType.Document # type: ignore ): o = model.lazy_parse(data, {"_id"}) # type: ignore o._saved_state = {"_id": o.id} return o result = parse_model(model, data) save_state(result) return result python-beanie-1.29.0/beanie/odm/utils/projection.py000066400000000000000000000021771473701376500222610ustar00rootroot00000000000000from typing import Dict, Optional, Type, TypeVar from pydantic import BaseModel from beanie.odm.interfaces.detector import ModelType from beanie.odm.utils.pydantic import get_config_value, get_model_fields ProjectionModelType = TypeVar("ProjectionModelType", bound=BaseModel) def get_projection( model: Type[ProjectionModelType], ) -> Optional[Dict[str, int]]: if hasattr(model, "get_model_type") and ( model.get_model_type() == ModelType.UnionDoc # type: ignore or ( # type: ignore model.get_model_type() == ModelType.Document # type: ignore and model._inheritance_inited # type: ignore ) ): # type: ignore return None if hasattr(model, "Settings"): # MyPy checks settings = getattr(model, "Settings") if hasattr(settings, "projection"): return getattr(settings, "projection") if get_config_value(model, "extra") == "allow": return None document_projection: Dict[str, int] = {} for name, field in get_model_fields(model).items(): document_projection[field.alias or name] = 1 return document_projection python-beanie-1.29.0/beanie/odm/utils/pydantic.py000066400000000000000000000030701473701376500217110ustar00rootroot00000000000000from typing import Any, Type import pydantic from pydantic import BaseModel IS_PYDANTIC_V2 = int(pydantic.VERSION.split(".")[0]) >= 2 IS_PYDANTIC_V2_10 = ( IS_PYDANTIC_V2 and int(pydantic.VERSION.split(".")[1]) >= 10 ) if IS_PYDANTIC_V2: from pydantic import TypeAdapter else: from pydantic import parse_obj_as def parse_object_as(object_type: Type, data: Any): if IS_PYDANTIC_V2: return TypeAdapter(object_type).validate_python(data) else: return parse_obj_as(object_type, data) def get_field_type(field): if IS_PYDANTIC_V2: return field.annotation else: return field.outer_type_ def get_model_fields(model): if IS_PYDANTIC_V2: return model.model_fields else: return model.__fields__ def parse_model(model_type: Type[BaseModel], data: Any): if IS_PYDANTIC_V2: return model_type.model_validate(data) else: return model_type.parse_obj(data) def get_extra_field_info(field, parameter: str): if IS_PYDANTIC_V2: if isinstance(field.json_schema_extra, dict): return field.json_schema_extra.get(parameter) return None else: return field.field_info.extra.get(parameter) def get_config_value(model, parameter: str): if IS_PYDANTIC_V2: return model.model_config.get(parameter) else: return getattr(model.Config, parameter, None) def get_model_dump(model, *args: Any, **kwargs: Any): if IS_PYDANTIC_V2: return model.model_dump(*args, **kwargs) else: return model.dict(*args, **kwargs) python-beanie-1.29.0/beanie/odm/utils/relations.py000066400000000000000000000025651473701376500221060ustar00rootroot00000000000000from collections.abc import Mapping from typing import TYPE_CHECKING, Any, Dict from typing import Mapping as MappingType from beanie.odm.fields import ( ExpressionField, ) # from pydantic.fields import ModelField # from pydantic.typing import get_origin if TYPE_CHECKING: from beanie import Document def convert_ids( query: MappingType[str, Any], doc: "Document", fetch_links: bool ) -> Dict[str, Any]: # TODO add all the cases new_query = {} for k, v in query.items(): k_splitted = k.split(".") if ( isinstance(k, ExpressionField) and doc.get_link_fields() is not None and len(k_splitted) == 2 and k_splitted[0] in doc.get_link_fields().keys() # type: ignore and k_splitted[1] == "id" ): if fetch_links: new_k = f"{k_splitted[0]}._id" else: new_k = f"{k_splitted[0]}.$id" else: new_k = k new_v: Any if isinstance(v, Mapping): new_v = convert_ids(v, doc, fetch_links) elif isinstance(v, list): new_v = [ convert_ids(ele, doc, fetch_links) if isinstance(ele, Mapping) else ele for ele in v ] else: new_v = v new_query[new_k] = new_v return new_query python-beanie-1.29.0/beanie/odm/utils/self_validation.py000066400000000000000000000010401473701376500232340ustar00rootroot00000000000000from functools import wraps from typing import TYPE_CHECKING, TypeVar from typing_extensions import ParamSpec if TYPE_CHECKING: from beanie.odm.documents import AsyncDocMethod, DocType P = ParamSpec("P") R = TypeVar("R") def validate_self_before( f: "AsyncDocMethod[DocType, P, R]", ) -> "AsyncDocMethod[DocType, P, R]": @wraps(f) async def wrapper(self: "DocType", *args: P.args, **kwargs: P.kwargs) -> R: await self.validate_self(*args, **kwargs) return await f(self, *args, **kwargs) return wrapper python-beanie-1.29.0/beanie/odm/utils/state.py000066400000000000000000000062121473701376500212170ustar00rootroot00000000000000import inspect from functools import wraps from typing import TYPE_CHECKING, TypeVar from typing_extensions import ParamSpec from beanie.exceptions import StateManagementIsTurnedOff, StateNotSaved if TYPE_CHECKING: from beanie.odm.documents import AnyDocMethod, AsyncDocMethod, DocType P = ParamSpec("P") R = TypeVar("R") def check_if_state_saved(self: "DocType"): if not self.use_state_management(): raise StateManagementIsTurnedOff( "State management is turned off for this document" ) if self._saved_state is None: raise StateNotSaved("No state was saved") def saved_state_needed( f: "AnyDocMethod[DocType, P, R]", ) -> "AnyDocMethod[DocType, P, R]": @wraps(f) def sync_wrapper(self: "DocType", *args: P.args, **kwargs: P.kwargs) -> R: check_if_state_saved(self) return f(self, *args, **kwargs) @wraps(f) async def async_wrapper( self: "DocType", *args: P.args, **kwargs: P.kwargs ) -> R: check_if_state_saved(self) # type ignore because there is no nice/proper way to annotate both sync # and async case without parametrized TypeVar, which is not supported return await f(self, *args, **kwargs) # type: ignore[misc] if inspect.iscoroutinefunction(f): # type ignore because there is no nice/proper way to annotate both sync # and async case without parametrized TypeVar, which is not supported return async_wrapper # type: ignore[return-value] return sync_wrapper def check_if_previous_state_saved(self: "DocType"): if not self.use_state_management(): raise StateManagementIsTurnedOff( "State management is turned off for this document" ) if not self.state_management_save_previous(): raise StateManagementIsTurnedOff( "State management's option to save previous state is turned off for this document" ) def previous_saved_state_needed( f: "AnyDocMethod[DocType, P, R]", ) -> "AnyDocMethod[DocType, P, R]": @wraps(f) def sync_wrapper(self: "DocType", *args: P.args, **kwargs: P.kwargs) -> R: check_if_previous_state_saved(self) return f(self, *args, **kwargs) @wraps(f) async def async_wrapper( self: "DocType", *args: P.args, **kwargs: P.kwargs ) -> R: check_if_previous_state_saved(self) # type ignore because there is no nice/proper way to annotate both sync # and async case without parametrized TypeVar, which is not supported return await f(self, *args, **kwargs) # type: ignore[misc] if inspect.iscoroutinefunction(f): # type ignore because there is no nice/proper way to annotate both sync # and async case without parametrized TypeVar, which is not supported return async_wrapper # type: ignore[return-value] return sync_wrapper def save_state_after( f: "AsyncDocMethod[DocType, P, R]", ) -> "AsyncDocMethod[DocType, P, R]": @wraps(f) async def wrapper(self: "DocType", *args: P.args, **kwargs: P.kwargs) -> R: result = await f(self, *args, **kwargs) self._save_state() return result return wrapper python-beanie-1.29.0/beanie/odm/utils/typing.py000066400000000000000000000044451473701376500214170ustar00rootroot00000000000000import inspect import sys from typing import Any, Dict, Optional, Tuple, Type from beanie.odm.fields import IndexedAnnotation from .pydantic import IS_PYDANTIC_V2, get_field_type if sys.version_info >= (3, 8): from typing import get_args, get_origin else: from typing_extensions import get_args, get_origin def extract_id_class(annotation) -> Type[Any]: if get_origin(annotation) is not None: try: annotation = next( arg for arg in get_args(annotation) if arg is not type(None) ) except StopIteration: annotation = None if inspect.isclass(annotation): return annotation raise ValueError("Unknown annotation: {}".format(annotation)) def get_index_attributes(field) -> Optional[Tuple[int, Dict[str, Any]]]: """Gets the index attributes from the field, if it is indexed. :param field: The field to get the index attributes from. :return: The index attributes, if the field is indexed. Otherwise, None. """ # For fields that are directly typed with `Indexed()`, the type will have # an `_indexed` attribute. field_type = get_field_type(field) if hasattr(field_type, "_indexed"): return getattr(field_type, "_indexed", None) # For fields that are use `Indexed` within `Annotated`, the field will have # metadata that might contain an `IndexedAnnotation` instance. if IS_PYDANTIC_V2: # In Pydantic 2, the field has a `metadata` attribute with # the annotations. metadata = getattr(field, "metadata", None) elif hasattr(field, "annotation") and hasattr( field.annotation, "__metadata__" ): # In Pydantic 1, the field has an `annotation` attribute with the # type assigned to the field. If the type is annotated, it will # have a `__metadata__` attribute with the annotations. metadata = field.annotation.__metadata__ else: return None if metadata is None: return None try: iter(metadata) except TypeError: return None indexed_annotation = next( ( annotation for annotation in metadata if isinstance(annotation, IndexedAnnotation) ), None, ) return getattr(indexed_annotation, "_indexed", None) python-beanie-1.29.0/beanie/odm/views.py000066400000000000000000000040021473701376500200670ustar00rootroot00000000000000import asyncio from typing import Any, ClassVar, Dict, Optional, Union from pydantic import BaseModel from beanie.exceptions import ViewWasNotInitialized from beanie.odm.fields import Link, LinkInfo from beanie.odm.interfaces.aggregate import AggregateInterface from beanie.odm.interfaces.detector import DetectionInterface, ModelType from beanie.odm.interfaces.find import FindInterface from beanie.odm.interfaces.getters import OtherGettersInterface from beanie.odm.settings.view import ViewSettings class View( BaseModel, FindInterface, AggregateInterface, OtherGettersInterface, DetectionInterface, ): """ What is needed: Source collection or view pipeline """ # Relations _link_fields: ClassVar[Optional[Dict[str, LinkInfo]]] = None # Settings _settings: ClassVar[ViewSettings] @classmethod def get_settings(cls) -> ViewSettings: """ Get view settings, which was created on the initialization step :return: ViewSettings class """ if cls._settings is None: raise ViewWasNotInitialized return cls._settings async def fetch_link(self, field: Union[str, Any]): ref_obj = getattr(self, field, None) if isinstance(ref_obj, Link): value = await ref_obj.fetch(fetch_links=True) setattr(self, field, value) if isinstance(ref_obj, list) and ref_obj: values = await Link.fetch_list(ref_obj, fetch_links=True) setattr(self, field, values) async def fetch_all_links(self): coros = [] link_fields = self.get_link_fields() if link_fields is not None: for ref in link_fields.values(): coros.append(self.fetch_link(ref.field_name)) # TODO lists await asyncio.gather(*coros) @classmethod def get_link_fields(cls) -> Optional[Dict[str, LinkInfo]]: return cls._link_fields @classmethod def get_model_type(cls) -> ModelType: return ModelType.View python-beanie-1.29.0/beanie/operators.py000066400000000000000000000034651473701376500202050ustar00rootroot00000000000000from beanie.odm.operators.find.array import All, ElemMatch, Size from beanie.odm.operators.find.bitwise import ( BitsAllClear, BitsAllSet, BitsAnyClear, BitsAnySet, ) from beanie.odm.operators.find.comparison import ( GT, GTE, LT, LTE, NE, Eq, In, NotIn, ) from beanie.odm.operators.find.element import Exists, Type from beanie.odm.operators.find.evaluation import ( Expr, JsonSchema, Mod, RegEx, Text, Where, ) from beanie.odm.operators.find.geospatial import ( Box, GeoIntersects, GeoWithin, GeoWithinTypes, Near, NearSphere, ) from beanie.odm.operators.find.logical import And, Nor, Not, Or from beanie.odm.operators.update.array import ( AddToSet, Pop, Pull, PullAll, Push, ) from beanie.odm.operators.update.bitwise import Bit from beanie.odm.operators.update.general import ( CurrentDate, Inc, Max, Min, Mul, Rename, Set, SetOnInsert, Unset, ) __all__ = [ # Find # Array "All", "ElemMatch", "Size", # Bitwise "BitsAllClear", "BitsAllSet", "BitsAnyClear", "BitsAnySet", # Comparison "Eq", "GT", "GTE", "In", "NotIn", "LT", "LTE", "NE", # Element "Exists", "Type", "Type", # Evaluation "Expr", "JsonSchema", "Mod", "RegEx", "Text", "Where", # Geospatial "GeoIntersects", "GeoWithinTypes", "GeoWithin", "Box", "Near", "NearSphere", # Logical "Or", "And", "Nor", "Not", # Update # Array "AddToSet", "Pop", "Pull", "Push", "PullAll", # Bitwise "Bit", # General "Set", "CurrentDate", "Inc", "Min", "Max", "Mul", "Rename", "SetOnInsert", "Unset", ] python-beanie-1.29.0/beanie/py.typed000066400000000000000000000000001473701376500172720ustar00rootroot00000000000000python-beanie-1.29.0/docs/000077500000000000000000000000001473701376500153125ustar00rootroot00000000000000python-beanie-1.29.0/docs/CNAME000066400000000000000000000000161473701376500160550ustar00rootroot00000000000000beanie-odm.devpython-beanie-1.29.0/docs/api/000077500000000000000000000000001473701376500160635ustar00rootroot00000000000000python-beanie-1.29.0/docs/api/document.md000066400000000000000000000231451473701376500202300ustar00rootroot00000000000000 # beanie.odm.documents ## Document Objects ```python class Document(BaseModel, UpdateMethods) ``` Document Mapping class. Fields: - `id` - MongoDB document ObjectID "_id" field. Mapped to the PydanticObjectId class Inherited from: - Pydantic BaseModel - [UpdateMethods](https://roman-right.github.io/beanie/api/interfaces/`aggregatemethods`) #### insert ```python | async insert(session: Optional[ClientSession] = None) -> DocType ``` Insert the document (self) to the collection **Returns**: Document #### create ```python | async create(session: Optional[ClientSession] = None) -> DocType ``` The same as self.insert() **Returns**: Document #### insert\_one ```python | @classmethod | async insert_one(cls: Type[DocType], document: DocType, session: Optional[ClientSession] = None) -> InsertOneResult ``` Insert one document to the collection **Arguments**: - `document`: Document - document to insert - `session`: ClientSession - pymongo session **Returns**: InsertOneResult #### insert\_many ```python | @classmethod | async insert_many(cls: Type[DocType], documents: List[DocType], session: Optional[ClientSession] = None) -> InsertManyResult ``` Insert many documents to the collection **Arguments**: - `documents`: List["Document"] - documents to insert - `session`: ClientSession - pymongo session **Returns**: InsertManyResult #### get ```python | @classmethod | async get(cls: Type[DocType], document_id: PydanticObjectId, session: Optional[ClientSession] = None) -> Optional[DocType] ``` Get document by id **Arguments**: - `document_id`: PydanticObjectId - document id - `session`: Optional[ClientSession] - pymongo session **Returns**: Union["Document", None] #### find\_one ```python | @classmethod | find_one(cls, *args: Union[Dict[str, Any], Mapping[str, Any], bool], *, projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> FindOne ``` Find one document by criteria. Returns [FindOne](https://roman-right.github.io/beanie/api/queries/`findone`) query object **Arguments**: - `args`: *Union[Dict[str, Any], Mapping[str, Any], bool] - search criteria - `projection_model`: Optional[Type[BaseModel]] - projection model - `session`: Optional[ClientSession] - pymongo session instance **Returns**: [FindOne](https://roman-right.github.io/beanie/api/queries/`findone`) - find query instance #### find\_many ```python | @classmethod | find_many(cls, *args: Union[Dict[str, Any], Mapping[str, Any], bool], *, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> FindMany ``` Find many documents by criteria. Returns [FindMany](https://roman-right.github.io/beanie/api/queries/`findmany`) query object **Arguments**: - `args`: *Union[Dict[str, Any], Mapping[str, Any], bool] - search criteria - `skip`: Optional[int] - The number of documents to omit. - `limit`: Optional[int] - The maximum number of results to return. - `sort`: Union[None, str, List[Tuple[str, SortDirection]]] - A key or a list of (key, direction) pairs specifying the sort order for this query. - `projection_model`: Optional[Type[BaseModel]] - projection model - `session`: Optional[ClientSession] - pymongo session **Returns**: [FindMany](https://roman-right.github.io/beanie/api/queries/`findmany`) - query instance #### find ```python | @classmethod | find(cls, *args: Union[Dict[str, Any], Mapping[str, Any], bool], *, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> FindMany ``` The same as find_many #### find\_all ```python | @classmethod | find_all(cls, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> FindMany ``` Get all the documents **Arguments**: - `skip`: Optional[int] - The number of documents to omit. - `limit`: Optional[int] - The maximum number of results to return. - `sort`: Union[None, str, List[Tuple[str, SortDirection]]] - A key or a list of (key, direction) pairs specifying the sort order for this query. - `projection_model`: Optional[Type[BaseModel]] - projection model - `session`: Optional[ClientSession] - pymongo session **Returns**: [FindMany](https://roman-right.github.io/beanie/api/queries/`findmany`) - query instance #### all ```python | @classmethod | all(cls, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> FindMany ``` the same as find_all #### replace ```python | async replace(session: Optional[ClientSession] = None) -> DocType ``` Fully update the document in the database **Arguments**: - `session`: Optional[ClientSession] - pymongo session. **Returns**: None #### save ```python | async save(session: Optional[ClientSession] = None) -> DocType ``` Update an existing model in the database or insert it if it does not yet exist. **Arguments**: - `session`: Optional[ClientSession] - pymongo session. **Returns**: None #### replace\_many ```python | @classmethod | async replace_many(cls: Type[DocType], documents: List[DocType], session: Optional[ClientSession] = None) -> None ``` Replace list of documents **Arguments**: - `documents`: List["Document"] - `session`: Optional[ClientSession] - pymongo session. **Returns**: None #### update ```python | async update(*args, *, session: Optional[ClientSession] = None) -> None ``` Partially update the document in the database **Arguments**: - `args`: *Union[dict, Mapping] - the modifications to apply. - `session`: ClientSession - pymongo session. **Returns**: None #### update\_all ```python | @classmethod | update_all(cls, *args: Union[dict, Mapping], *, session: Optional[ClientSession] = None) -> UpdateMany ``` Partially update all the documents **Arguments**: - `args`: *Union[dict, Mapping] - the modifications to apply. - `session`: ClientSession - pymongo session. **Returns**: UpdateMany query #### delete ```python | async delete(session: Optional[ClientSession] = None) -> DeleteResult ``` Delete the document **Arguments**: - `session`: Optional[ClientSession] - pymongo session. **Returns**: DeleteResult - pymongo DeleteResult instance. #### delete\_all ```python | @classmethod | async delete_all(cls, session: Optional[ClientSession] = None) -> DeleteResult ``` Delete all the documents **Arguments**: - `session`: Optional[ClientSession] - pymongo session. **Returns**: DeleteResult - pymongo DeleteResult instance. #### aggregate ```python | @classmethod | aggregate(cls, aggregation_pipeline: list, projection_model: Type[BaseModel] = None, session: Optional[ClientSession] = None) -> AggregationQuery ``` Aggregate over collection. Returns [AggregationQuery](https://roman-right.github.io/beanie/api/queries/`aggregationquery`) query object **Arguments**: - `aggregation_pipeline`: list - aggregation pipeline - `projection_model`: Type[BaseModel] - `session`: Optional[ClientSession] **Returns**: [AggregationQuery](https://roman-right.github.io/beanie/api/queries/`aggregationquery`) #### count ```python | @classmethod | async count(cls) -> int ``` Number of documents in the collections The same as find_all().count() **Returns**: int #### init\_collection ```python | @classmethod | async init_collection(cls, database: AsyncIOMotorDatabase, allow_index_dropping: bool) -> None ``` Internal CollectionMeta class creator **Arguments**: - `database`: AsyncIOMotorDatabase - motor database instance - `allow_index_dropping`: bool - if index dropping is allowed **Returns**: None #### get\_motor\_collection ```python | @classmethod | get_motor_collection(cls) -> AsyncIOMotorCollection ``` Get Motor Collection to access low level control **Returns**: AsyncIOMotorCollection #### inspect\_collection ```python | @classmethod | async inspect_collection(cls, session: Optional[ClientSession] = None) -> InspectionResult ``` Check, if documents, stored in the MongoDB collection are compatible with the Document schema **Returns**: InspectionResult python-beanie-1.29.0/docs/api/fields.md000066400000000000000000000016141473701376500176550ustar00rootroot00000000000000 # beanie.odm.fields #### Indexed ```python Indexed(typ, index_type=ASCENDING, **kwargs) ``` Returns a subclass of `typ` with an extra attribute `_indexed` as a tuple: - Index 0: `index_type` such as `pymongo.ASCENDING` - Index 1: `kwargs` passed to `IndexModel` When instantiated the type of the result will actually be `typ`. ## PydanticObjectId Objects ```python class PydanticObjectId(ObjectId) ``` Object Id field. Compatible with Pydantic. ## ExpressionField Objects ```python class ExpressionField(str) ``` #### \_\_getattr\_\_ ```python | __getattr__(item) ``` Get sub field **Arguments**: - `item`: name of the subfield **Returns**: ExpressionField python-beanie-1.29.0/docs/api/interfaces.md000066400000000000000000000107721473701376500205370ustar00rootroot00000000000000 # beanie.odm.interfaces.update ## UpdateMethods Objects ```python class UpdateMethods() ``` Update methods #### set ```python | set(expression: Dict[Union[ExpressionField, str], Any], session: Optional[ClientSession] = None) ``` Set values Example: ```python class Sample(Document): one: int await Document.find(Sample.one == 1).set({Sample.one: 100}) ``` Uses [Set operator](https://roman-right.github.io/beanie/api/operators/update/`set`) **Arguments**: - `expression`: Dict[Union[ExpressionField, str], Any] - keys and values to set - `session`: Optional[ClientSession] - pymongo session **Returns**: self #### current\_date ```python | current_date(expression: Dict[Union[ExpressionField, str], Any], session: Optional[ClientSession] = None) ``` Set current date Uses [CurrentDate operator](https://roman-right.github.io/beanie/api/operators/update/`currentdate`) **Arguments**: - `expression`: Dict[Union[ExpressionField, str], Any] - `session`: Optional[ClientSession] - pymongo session **Returns**: self #### inc ```python | inc(expression: Dict[Union[ExpressionField, str], Any], session: Optional[ClientSession] = None) ``` Increment Example: ```python class Sample(Document): one: int await Document.find(Sample.one == 1).inc({Sample.one: 100}) ``` Uses [Inc operator](https://roman-right.github.io/beanie/api/operators/update/`inc`) **Arguments**: - `expression`: Dict[Union[ExpressionField, str], Any] - `session`: Optional[ClientSession] - pymongo session **Returns**: self # beanie.odm.interfaces.aggregate ## AggregateMethods Objects ```python class AggregateMethods() ``` Aggregate methods #### sum ```python | async sum(field: Union[str, ExpressionField], session: Optional[ClientSession] = None) -> float ``` Sum of values of the given field Example: ```python class Sample(Document): price: int count: int sum_count = await Document.find(Sample.price <= 100).sum(Sample.count) ``` **Arguments**: - `field`: Union[str, ExpressionField] - `session`: Optional[ClientSession] - pymongo session **Returns**: float - sum #### avg ```python | async avg(field, session: Optional[ClientSession] = None) -> float ``` Average of values of the given field Example: ```python class Sample(Document): price: int count: int avg_count = await Document.find(Sample.price <= 100).avg(Sample.count) ``` **Arguments**: - `field`: Union[str, ExpressionField] - `session`: Optional[ClientSession] - pymongo session **Returns**: float - avg #### max ```python | async max(field: Union[str, ExpressionField], session: Optional[ClientSession] = None) -> Any ``` Max of the values of the given field Example: ```python class Sample(Document): price: int count: int max_count = await Document.find(Sample.price <= 100).max(Sample.count) ``` **Arguments**: - `field`: Union[str, ExpressionField] - `session`: Optional[ClientSession] - pymongo session **Returns**: float - max #### min ```python | async min(field: Union[str, ExpressionField], session: Optional[ClientSession] = None) -> Any ``` Min of the values of the given field Example: ```python class Sample(Document): price: int count: int min_count = await Document.find(Sample.price <= 100).min(Sample.count) ``` **Arguments**: - `field`: Union[str, ExpressionField] - `session`: Optional[ClientSession] - pymongo session **Returns**: float - max # beanie.odm.interfaces.session ## SessionMethods Objects ```python class SessionMethods() ``` Session methods #### set\_session ```python | set_session(session: Optional[ClientSession] = None) ``` Set pymongo session **Arguments**: - `session`: Optional[ClientSession] - pymongo session **Returns**: python-beanie-1.29.0/docs/api/operators/000077500000000000000000000000001473701376500201015ustar00rootroot00000000000000python-beanie-1.29.0/docs/api/operators/find.md000066400000000000000000000347661473701376500213630ustar00rootroot00000000000000 # beanie.odm.operators.find.comparison ## Eq Objects ```python class Eq(BaseFindComparisonOperator) ``` `equal` query operator **Example**: ```python class Product(Document): price: float Eq(Product.price, 2) ``` Will return query object like ```python {"price": 2} ``` MongoDB doc: ## GT Objects ```python class GT(BaseFindComparisonOperator) ``` `$gt` query operator **Example**: ```python class Product(Document): price: float GT(Product.price, 2) ``` Will return query object like ```python {"price": {"$gt": 2}} ``` MongoDB doc: ## GTE Objects ```python class GTE(BaseFindComparisonOperator) ``` `$gte` query operator **Example**: ```python class Product(Document): price: float GTE(Product.price, 2) ``` Will return query object like ```python {"price": {"$gte": 2}} ``` MongoDB doc: ## In Objects ```python class In(BaseFindComparisonOperator) ``` `$in` query operator **Example**: ```python class Product(Document): price: float In(Product.price, [2, 3, 4]) ``` Will return query object like ```python {"price": {"$in": [2, 3, 4]}} ``` MongoDB doc: ## NotIn Objects ```python class NotIn(BaseFindComparisonOperator) ``` `$nin` query operator **Example**: ```python class Product(Document): price: float NotIn(Product.price, [2, 3, 4]) ``` Will return query object like ```python {"price": {"$nin": [2, 3, 4]}} ``` MongoDB doc: ## LT Objects ```python class LT(BaseFindComparisonOperator) ``` `$lt` query operator **Example**: ```python class Product(Document): price: float LT(Product.price, 2) ``` Will return query object like ```python {"price": {"$lt": 2}} ``` MongoDB doc: ## LTE Objects ```python class LTE(BaseFindComparisonOperator) ``` `$lte` query operator **Example**: ```python class Product(Document): price: float LTE(Product.price, 2) ``` Will return query object like ```python {"price": {"$lte": 2}} ``` MongoDB doc: ## NE Objects ```python class NE(BaseFindComparisonOperator) ``` `$ne` query operator **Example**: ```python class Product(Document): price: float NE(Product.price, 2) ``` Will return query object like ```python {"price": {"$ne": 2}} ``` MongoDB doc: # beanie.odm.operators.find.logical ## Or Objects ```python class Or(LogicalOperatorForListOfExpressions) ``` `$or` query operator **Example**: ```python class Product(Document): price: float category: str Or({Product.price<10}, {Product.category=="Sweets"}) ``` Will return query object like ```python {"$or": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} ``` MongoDB doc: ## And Objects ```python class And(LogicalOperatorForListOfExpressions) ``` `$and` query operator **Example**: ```python class Product(Document): price: float category: str And({Product.price<10}, {Product.category=="Sweets"}) ``` Will return query object like ```python {"$and": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} ``` MongoDB doc: ## Nor Objects ```python class Nor(BaseFindLogicalOperator) ``` `$nor` query operator **Example**: ```python class Product(Document): price: float category: str Nor({Product.price<10}, {Product.category=="Sweets"}) ``` Will return query object like ```python {"$nor": [{"price": {"$lt": 10}}, {"category": "Sweets"}]} ``` MongoDB doc: ## Not Objects ```python class Not(BaseFindLogicalOperator) ``` `$not` query operator **Example**: ```python class Product(Document): price: float category: str Not({Product.price<10}) ``` Will return query object like ```python {"$not": {"price": {"$lt": 10}}} ``` MongoDB doc: # beanie.odm.operators.find.element ## Exists Objects ```python class Exists(BaseFindElementOperator) ``` `$exists` query operator **Example**: ```python class Product(Document): price: float Exists(Product.price, True) ``` Will return query object like ```python {"price": {"$exists": True}} ``` MongoDB doc: ## Type Objects ```python class Type(BaseFindElementOperator) ``` `$type` query operator **Example**: ```python class Product(Document): price: float Type(Product.price, "decimal") ``` Will return query object like ```python {"price": {"$type": "decimal"}} ``` MongoDB doc: # beanie.odm.operators.find.evaluation ## Expr Objects ```python class Expr(BaseFindEvaluationOperator) ``` `$type` query operator **Example**: ```python class Sample(Document): one: int two: int Expr({"$gt": [ "$one" , "$two" ]}) ``` Will return query object like ```python {"$expr": {"$gt": [ "$one" , "$two" ]}} ``` MongoDB doc: ## JsonSchema Objects ```python class JsonSchema(BaseFindEvaluationOperator) ``` `$jsonSchema` query operator MongoDB doc: ## Mod Objects ```python class Mod(BaseFindEvaluationOperator) ``` `$mod` query operator **Example**: ```python class Sample(Document): one: int Mod(Sample.one, 4, 0) ``` Will return query object like ```python { "one": { "$mod": [ 4, 0 ] } } ``` MongoDB doc: ## RegEx Objects ```python class RegEx(BaseFindEvaluationOperator) ``` `$regex` query operator MongoDB doc: ## Text Objects ```python class Text(BaseFindEvaluationOperator) ``` `$text` query operator **Example**: ```python class Sample(Document): description: Indexed(str, pymongo.TEXT) Text("coffee") ``` Will return query object like ```python { "$text": { "$search": "coffee" , "$caseSensitive": False, "$diacriticSensitive": False } } ``` MongoDB doc: #### \_\_init\_\_ ```python | __init__(search: str, language: Optional[str] = None, case_sensitive: bool = False, diacritic_sensitive: bool = False) ``` **Arguments**: - `search`: str - `language`: Optional[str] = None - `case_sensitive`: bool = False - `diacritic_sensitive`: bool = False ## Where Objects ```python class Where(BaseFindEvaluationOperator) ``` `$where` query operator MongoDB doc: # beanie.odm.operators.find.geospatial ## GeoIntersects Objects ```python class GeoIntersects(BaseFindGeospatialOperator) ``` `$geoIntersects` query operator **Example**: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Settings: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] GeoIntersects(Place.geo, "Polygon", [[0,0], [1,1], [3,3]]) ``` Will return query object like ```python { "geo": { "$geoIntersects": { "$geometry": { "type": "Polygon", "coordinates": [[0,0], [1,1], [3,3]], } } } } ``` MongoDB doc: ## GeoWithin Objects ```python class GeoWithin(BaseFindGeospatialOperator) ``` `$geoWithin` query operator **Example**: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Settings: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] GeoWithin(Place.geo, "Polygon", [[0,0], [1,1], [3,3]]) ``` Will return query object like ```python { "geo": { "$geoWithin": { "$geometry": { "type": "Polygon", "coordinates": [[0,0], [1,1], [3,3]], } } } } ``` MongoDB doc: ## Near Objects ```python class Near(BaseFindGeospatialOperator) ``` `$near` query operator **Example**: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Settings: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] Near(Place.geo, 1.2345, 2.3456, min_distance=500) ``` Will return query object like ```python { "geo": { "$near": { "$geometry": { "type": "Point", "coordinates": [1.2345, 2.3456], }, "$maxDistance": 500, } } } ``` MongoDB doc: ## NearSphere Objects ```python class NearSphere(Near) ``` `$nearSphere` query operator **Example**: ```python class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Place(Document): geo: GeoObject class Settings: name = "places" indexes = [ [("geo", pymongo.GEOSPHERE)], # GEO index ] NearSphere(Place.geo, 1.2345, 2.3456, min_distance=500) ``` Will return query object like ```python { "geo": { "$nearSphere": { "$geometry": { "type": "Point", "coordinates": [1.2345, 2.3456], }, "$maxDistance": 500, } } } ``` MongoDB doc: # beanie.odm.operators.find.array ## All Objects ```python class All(BaseFindArrayOperator) ``` `$all` array query operator **Example**: ```python class Sample(Document): results: List[int] All(Sample.results, [80, 85]) ``` Will return query object like ```python {"results": {"$all": [80, 85]}} ``` MongoDB doc: ## ElemMatch Objects ```python class ElemMatch(BaseFindArrayOperator) ``` `$elemMatch` array query operator **Example**: ```python class Sample(Document): results: List[int] ElemMatch(Sample.results, [80, 85]) ``` Will return query object like ```python {"results": {"$elemMatch": [80, 85]}} ``` MongoDB doc: ## Size Objects ```python class Size(BaseFindArrayOperator) ``` `$size` array query operator **Example**: ```python class Sample(Document): results: List[int] Size(Sample.results, 2) ``` Will return query object like ```python {"results": {"$size": 2}} ``` MongoDB doc: # beanie.odm.operators.find.bitwise ## BitsAllClear Objects ```python class BitsAllClear(BaseFindBitwiseOperator) ``` `$bitsAllClear` query operator MongoDB doc: ## BitsAllSet Objects ```python class BitsAllSet(BaseFindBitwiseOperator) ``` `$bitsAllSet` query operator MongoDB doc: https://docs.mongodb.com/manual/reference/operator/query/bitsAllSet/ ## BitsAnyClear Objects ```python class BitsAnyClear(BaseFindBitwiseOperator) ``` `$bitsAnyClear` query operator MongoDB doc: https://docs.mongodb.com/manual/reference/operator/query/bitsAnyClear/ ## BitsAnySet Objects ```python class BitsAnySet(BaseFindBitwiseOperator) ``` `$bitsAnySet` query operator MongoDB doc: https://docs.mongodb.com/manual/reference/operator/query/bitsAnySet/ python-beanie-1.29.0/docs/api/operators/update.md000066400000000000000000000135311473701376500217100ustar00rootroot00000000000000 # beanie.odm.operators.update.general ## Set Objects ```python class Set(BaseUpdateGeneralOperator) ``` `$set` update query operator **Example**: ```python class Sample(Document): one: int Set({Sample.one, 2}) ``` Will return query object like ```python {"$set": {"one": 2}} ``` MongoDB doc: ## CurrentDate Objects ```python class CurrentDate(BaseUpdateGeneralOperator) ``` `$currentDate` update query operator **Example**: ```python class Sample(Document): ts: datetime CurrentDate({Sample.ts, True}) ``` Will return query object like ```python {"$currentDate": {"ts": True}} ``` MongoDB doc: ## Inc Objects ```python class Inc(BaseUpdateGeneralOperator) ``` `$inc` update query operator **Example**: ```python class Sample(Document): one: int Inc({Sample.one, 2}) ``` Will return query object like ```python {"$inc": {"one": 2}} ``` MongoDB doc: ## Min Objects ```python class Min(BaseUpdateGeneralOperator) ``` `$min` update query operator **Example**: ```python class Sample(Document): one: int Min({Sample.one, 2}) ``` Will return query object like ```python {"$min": {"one": 2}} ``` MongoDB doc: ## Max Objects ```python class Max(BaseUpdateGeneralOperator) ``` `$max` update query operator **Example**: ```python class Sample(Document): one: int Max({Sample.one, 2}) ``` Will return query object like ```python {"$max": {"one": 2}} ``` MongoDB doc: ## Mul Objects ```python class Mul(BaseUpdateGeneralOperator) ``` `$mul` update query operator **Example**: ```python class Sample(Document): one: int Mul({Sample.one, 2}) ``` Will return query object like ```python {"$mul": {"one": 2}} ``` MongoDB doc: ## Rename Objects ```python class Rename(BaseUpdateGeneralOperator) ``` `$rename` update query operator MongoDB doc: ## SetOnInsert Objects ```python class SetOnInsert(BaseUpdateGeneralOperator) ``` `$setOnInsert` update query operator MongoDB doc: ## Unset Objects ```python class Unset(BaseUpdateGeneralOperator) ``` `$unset` update query operator MongoDB doc: # beanie.odm.operators.update.array ## AddToSet Objects ```python class AddToSet(BaseUpdateArrayOperator) ``` `$addToSet` update array query operator **Example**: ```python class Sample(Document): results: List[int] AddToSet({Sample.results, 2}) ``` Will return query object like ```python {"$addToSet": {"results": 2}} ``` MongoDB docs: ## Pop Objects ```python class Pop(BaseUpdateArrayOperator) ``` `$pop` update array query operator **Example**: ```python class Sample(Document): results: List[int] Pop({Sample.results, 2}) ``` Will return query object like ```python {"$pop": {"results": -1}} ``` MongoDB docs: ## Pull Objects ```python class Pull(BaseUpdateArrayOperator) ``` `$pull` update array query operator **Example**: ```python class Sample(Document): results: List[int] Pull(In(Sample.result, [1,2,3,4,5]) ``` Will return query object like ```python {"$pull": { "results": { $in: [1,2,3,4,5] }}} ``` MongoDB docs: ## Push Objects ```python class Push(BaseUpdateArrayOperator) ``` `$push` update array query operator **Example**: ```python class Sample(Document): results: List[int] Push({Sample.results: 1}) ``` Will return query object like ```python {"$push": { "results": 1}} ``` MongoDB docs: ## PullAll Objects ```python class PullAll(BaseUpdateArrayOperator) ``` `$pullAll` update array query operator **Example**: ```python class Sample(Document): results: List[int] PullAll({ Sample.results: [ 0, 5 ] }) ``` Will return query object like ```python {"$pullAll": { "results": [ 0, 5 ] }} ``` MongoDB docs: # beanie.odm.operators.update.bitwise ## Bit Objects ```python class Bit(BaseUpdateBitwiseOperator) ``` `$bit` update query operator MongoDB doc: python-beanie-1.29.0/docs/api/queries.md000066400000000000000000000305411473701376500200650ustar00rootroot00000000000000 # beanie.odm.queries.find ## FindQuery Objects ```python class FindQuery(UpdateMethods, SessionMethods) ``` Find Query base class Inherited from: - [SessionMethods](https://roman-right.github.io/beanie/api/interfaces/`sessionmethods`) - [UpdateMethods](https://roman-right.github.io/beanie/api/interfaces/`aggregatemethods`) #### update ```python | update(*args: Union[Dict[str, Any], Mapping[str, Any]], *, session: Optional[ClientSession] = None) ``` Create Update with modifications query and provide search criteria there **Arguments**: - `args`: *Union[dict, Mapping] - the modifications to apply. - `session`: Optional[ClientSession] **Returns**: UpdateMany query #### delete ```python | delete(session: Optional[ClientSession] = None) -> Union[DeleteOne, DeleteMany] ``` Provide search criteria to the Delete query **Arguments**: - `session`: Optional[ClientSession] **Returns**: Union[DeleteOne, DeleteMany] #### project ```python | project(projection_model: Optional[Type[BaseModel]]) -> FindQueryType ``` Apply projection parameter **Arguments**: - `projection_model`: Optional[Type[BaseModel]] - projection model **Returns**: self ## FindMany Objects ```python class FindMany(FindQuery, BaseCursorQuery, AggregateMethods) ``` Find Many query class Inherited from: - [FindQuery](https://roman-right.github.io/beanie/api/queries/`findquery`) - [BaseCursorQuery](https://roman-right.github.io/beanie/api/queries/`basecursorquery`) - async generator - [AggregateMethods](https://roman-right.github.io/beanie/api/interfaces/`aggregatemethods`) #### find\_many ```python | find_many(*args: Union[Dict[str, Any], Mapping[str, Any], bool], *, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> "FindMany" ``` Find many documents by criteria **Arguments**: - `args`: *Union[Dict[str, Any], Mapping[str, Any], bool] - search criteria - `skip`: Optional[int] - The number of documents to omit. - `limit`: Optional[int] - The maximum number of results to return. - `sort`: Union[None, str, List[Tuple[str, SortDirection]]] - A key or a list of (key, direction) pairs specifying the sort order for this query. - `projection_model`: Optional[Type[BaseModel]] - projection model - `session`: Optional[ClientSession] - pymongo session **Returns**: FindMany - query instance #### find ```python | find(*args: Union[Dict[str, Any], Mapping[str, Any], bool], *, skip: Optional[int] = None, limit: Optional[int] = None, sort: Union[None, str, List[Tuple[str, SortDirection]]] = None, projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> "FindMany" ``` The same as `find_many(...)` #### sort ```python | sort(*args: Optional[ | Union[ | str, Tuple[str, SortDirection], List[Tuple[str, SortDirection]] | ] | ]) -> "FindMany" ``` Add sort parameters **Arguments**: - `args`: Union[str, Tuple[str, SortDirection], List[Tuple[str, SortDirection]]] - A key or a tuple (key, direction) or a list of (key, direction) pairs specifying the sort order for this query. **Returns**: self #### skip ```python | skip(n: Optional[int]) -> "FindMany" ``` Set skip parameter **Arguments**: - `n`: int **Returns**: self #### limit ```python | limit(n: Optional[int]) -> "FindMany" ``` Set limit parameter **Arguments**: - `n`: int **Returns**: #### update\_many ```python | update_many(*args: Union[Dict[str, Any], Mapping[str, Any]], *, session: Optional[ClientSession] = None) -> UpdateMany ``` Provide search criteria to the [UpdateMany](https://roman-right.github.io/beanie/api/queries/`updatemany`) query **Arguments**: - `args`: *Union[dict, Mapping] - the modifications to apply. - `session`: Optional[ClientSession] **Returns**: [UpdateMany](https://roman-right.github.io/beanie/api/queries/`updatemany`) query #### delete\_many ```python | delete_many(session: Optional[ClientSession] = None) -> DeleteMany ``` Provide search criteria to the [DeleteMany](https://roman-right.github.io/beanie/api/queries/`deletemany`) query **Arguments**: - `session`: **Returns**: [DeleteMany](https://roman-right.github.io/beanie/api/queries/`deletemany`) query #### count ```python | async count() -> int ``` Number of found documents **Returns**: int #### aggregate ```python | aggregate(aggregation_pipeline: List[Any], projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> AggregationQuery ``` Provide search criteria to the [AggregationQuery](https://roman-right.github.io/beanie/api/queries/`aggregationquery`) **Arguments**: - `aggregation_pipeline`: list - aggregation pipeline. MongoDB doc: - `projection_model`: Type[BaseModel] - Projection Model - `session`: Optional[ClientSession] - PyMongo session **Returns**: [AggregationQuery](https://roman-right.github.io/beanie/api/queries/`aggregationquery`) ## FindOne Objects ```python class FindOne(FindQuery) ``` Find One query class Inherited from: - [FindQuery](https://roman-right.github.io/beanie/api/queries/`findquery`) #### find\_one ```python | find_one(*args: Union[Dict[str, Any], Mapping[str, Any], bool], *, projection_model: Optional[Type[BaseModel]] = None, session: Optional[ClientSession] = None) -> "FindOne" ``` Find one document by criteria **Arguments**: - `args`: *Union[Dict[str, Any], Mapping[str, Any], bool] - search criteria - `projection_model`: Optional[Type[BaseModel]] - projection model - `session`: Optional[ClientSession] - pymongo session **Returns**: FindOne - query instance #### update\_one ```python | update_one(*args: Union[Dict[str, Any], Mapping[str, Any]], *, session: Optional[ClientSession] = None) -> UpdateOne ``` Create [UpdateOne](https://roman-right.github.io/beanie/api/queries/`updateone`) query using modifications and provide search criteria there **Arguments**: - `args`: *Union[dict, Mapping] - the modifications to apply - `session`: Optional[ClientSession] - PyMongo sessions **Returns**: [UpdateOne](https://roman-right.github.io/beanie/api/queries/`updateone`) query #### delete\_one ```python | delete_one(session: Optional[ClientSession] = None) -> DeleteOne ``` Provide search criteria to the [DeleteOne](https://roman-right.github.io/beanie/api/queries/`deleteone`) query **Arguments**: - `session`: Optional[ClientSession] - PyMongo sessions **Returns**: [DeleteOne](https://roman-right.github.io/beanie/api/queries/`deleteone`) query #### replace\_one ```python | async replace_one(document: "DocType", session: Optional[ClientSession] = None) -> UpdateResult ``` Replace found document by provided **Arguments**: - `document`: Document - document, which will replace the found one - `session`: Optional[ClientSession] - PyMongo session **Returns**: UpdateResult #### \_\_await\_\_ ```python | __await__() ``` Run the query **Returns**: BaseModel # beanie.odm.queries.update ## UpdateQuery Objects ```python class UpdateQuery(UpdateMethods, SessionMethods) ``` Update Query base class Inherited from: - [SessionMethods](https://roman-right.github.io/beanie/api/interfaces/`sessionmethods`) - [UpdateMethods](https://roman-right.github.io/beanie/api/interfaces/`aggregatemethods`) #### update ```python | update(*args: Union[Dict[str, Any], Mapping[str, Any]], *, session: Optional[ClientSession] = None) -> "UpdateQuery" ``` Provide modifications to the update query. The same as `update()` **Arguments**: - `args`: *Union[dict, Mapping] - the modifications to apply. - `session`: Optional[ClientSession] **Returns**: UpdateMany query ## UpdateMany Objects ```python class UpdateMany(UpdateQuery) ``` Update Many query class Inherited from: - [UpdateQuery](https://roman-right.github.io/beanie/api/queries/`updatequery`) #### update\_many ```python | update_many(*args: Union[Dict[str, Any], Mapping[str, Any]], *, session: Optional[ClientSession] = None) ``` Provide modifications to the update query **Arguments**: - `args`: *Union[dict, Mapping] - the modifications to apply. - `session`: Optional[ClientSession] **Returns**: UpdateMany query #### \_\_await\_\_ ```python | __await__() -> UpdateResult ``` Run the query **Returns**: ## UpdateOne Objects ```python class UpdateOne(UpdateQuery) ``` Update One query class Inherited from: - [UpdateQuery](https://roman-right.github.io/beanie/api/queries/`updatequery`) #### update\_one ```python | update_one(*args: Union[Dict[str, Any], Mapping[str, Any]], *, session: Optional[ClientSession] = None) ``` Provide modifications to the update query. The same as `update()` **Arguments**: - `args`: *Union[dict, Mapping] - the modifications to apply. - `session`: Optional[ClientSession] **Returns**: UpdateMany query #### \_\_await\_\_ ```python | __await__() -> UpdateResult ``` Run the query **Returns**: # beanie.odm.queries.delete ## DeleteQuery Objects ```python class DeleteQuery(SessionMethods) ``` Deletion Query ## DeleteMany Objects ```python class DeleteMany(DeleteQuery) ``` #### \_\_await\_\_ ```python | __await__() -> DeleteResult ``` Run the query **Returns**: ## DeleteOne Objects ```python class DeleteOne(DeleteQuery) ``` #### \_\_await\_\_ ```python | __await__() -> DeleteResult ``` Run the query **Returns**: # beanie.odm.queries.aggregation ## AggregationQuery Objects ```python class AggregationQuery(BaseCursorQuery, SessionMethods) ``` Aggregation Query Inherited from: - [SessionMethods](https://roman-right.github.io/beanie/api/interfaces/`sessionmethods`) - session methods - [BaseCursorQuery](https://roman-right.github.io/beanie/api/queries/`basecursorquery`) - async generator # beanie.odm.queries.cursor ## BaseCursorQuery Objects ```python class BaseCursorQuery() ``` BaseCursorQuery class. Wrapper over AsyncIOMotorCursor, which parse result with model #### to\_list ```python | async to_list(length: Optional[int] = None) -> Union[List[BaseModel], List[Dict[str, Any]]] ``` Get list of documents **Arguments**: - `length`: Optional[int] - length of the list **Returns**: Union[List[BaseModel], List[Dict[str, Any]]] python-beanie-1.29.0/docs/articles/000077500000000000000000000000001473701376500171205ustar00rootroot00000000000000python-beanie-1.29.0/docs/articles/1.8.0.md000066400000000000000000000205051473701376500201100ustar00rootroot00000000000000I'm happy to introduce to you the new version of Beanie and a lot of new features, that come with it. Here is the feature list: - Relations - Event-based actions - Cache - Revision ## Relations This feature is perhaps the most anticipated of all. It took some time, but finally, it is here. Relations. The document can contain links to other documents in their fields. *Only top-level fields are fully supported for now.* Direct link to the document: ```python from beanie import Document, Link class Door(Document): height: int = 2 width: int = 1 class House(Document): name: str door: Link[Door] # This is the link ``` List of the links: ```python from typing import List from beanie import Document, Link class Window(Document): x: int = 10 y: int = 10 class House(Document): name: str door: Link[Door] windows: List[Link[Window]] # This is the list of the links ``` Other link patterns are not supported for now. If you need something more specific for your use-case, please leave an issue on the GitHub page - ### Write The next write methods support relations: - `insert(...)` - `replace(...)` - `save(...)` To apply the writing method to the linked documents, you should set the respective `link_rule` parameter ```python house.windows = [Window(x=100, y=100)] house.name = "NEW NAME" # The next call will insert a new window object # and replace the house instance with updated data await house.save(link_rule=WriteRules.WRITE) # `insert` and `replace` methods will work the same way ``` Or Beanie can ignore internal links with the `link_rule` parameter `WriteRules.DO_NOTHING` ```python house.door.height = 3 house.name = "NEW NAME" # The next call will just replace the house instance # with new data, but the linked door object will not be synced await house.replace(link_rule=WriteRules.DO_NOTHING) # `insert` and `save` methods will work the same way ``` ### Fetch #### Prefetch You can fetch linked documents on the find query step, using the parameter `fetch_links` ```python houses = await House.find( House.name == "test", fetch_links=True ).to_list() ``` All the find methods supported: - find - find_one - get Beanie uses a single aggregation query under the hood to fetch all the linked documents. This operation is very effective. #### On-demand fetch If you don't use prefetching, linked documents will be presented as objects of the `Link` class. You can fetch them manually then. To fetch all the linked documents you can use the `fetch_all_links` method ```python await house.fetch_all_links() ``` It will fetch all the linked documents and replace `Link` objects with them. Or you can fetch a single field: ```python await house.fetch_link(House.door) ``` This will fetch the Door object and put it in the `door` field of the `house` object. ### Delete Delete method works the same way as write operations, but it uses other rules: To delete all the links on the document deletion you should use the `DeleteRules.DELETE_LINKS` value for the `link_rule` parameter ```python await house.delete(link_rule=DeleteRules.DELETE_LINKS) ``` To keep linked documents you can use the `DO_NOTHING` rule ```python await house.delete(link_rule=DeleteRules.DO_NOTHING) ``` ## Event-based actions You can register methods as pre- or post- actions for document events like `insert`, `replace` and etc. Currently supported events: - Insert - Replace - SaveChanges - ValidateOnSave To register an action you can use `@before_event` and `@after_event` decorators respectively. ```python from beanie import Insert, Replace class Sample(Document): num: int name: str @before_event(Insert) def capitalize_name(self): self.name = self.name.capitalize() @after_event(Replace) def num_change(self): self.num -= 1 ``` It is possible to register action for a list of events: ```python from beanie import Insert, Replace class Sample(Document): num: int name: str @before_event([Insert, Replace]) def capitalize_name(self): self.name = self.name.capitalize() ``` This will capitalize the `name` field value before each document insert and replace And sync and async methods could work as actions. ```python from beanie import Insert, Replace class Sample(Document): num: int name: str @after_event([Insert, Replace]) async def send_callback(self): await client.send(self.id) ``` ## Cache All the query results could be locally cached. This feature must be turned on in the `Settings` inner class explicitly. ```python class Sample(Document): num: int name: str class Settings: use_cache = True ``` Beanie uses LRU cache with expiration time. You can set `capacity` (the maximum number of the cached queries) and expiration time in the `Settings` inner class. ```python class Sample(Document): num: int name: str class Settings: use_cache = True cache_expiration_time = datetime.timedelta(seconds=10) cache_capacity = 5 ``` Any query will be cached for this document class. ```python # on the first call it will go to the database samples = await Sample.find(num>10).to_list() # on the second - it will use cache instead samples = await Sample.find(num>10).to_list() await asyncio.sleep(15) # if the expiration time was reached # it will go to the database again samples = await Sample.find(num>10).to_list() ``` ## Revision This feature helps with concurrent operations. It stores `revision_id` together with the document and changes it on each document update. If the application with the old local copy of the document will try to change it, an exception will be raised. Only when the local copy will be synced with the database, the application will be allowed to change the data. It helps to avoid losses of data. This feature must be turned on in the `Settings` inner class explicitly too. ```python class Sample(Document): num: int name: str class Settings: use_revision = True ``` Any changing operation will check if the local copy of the document has the actual `revision_id` value: ```python s = await Sample.find_one(Sample.name="TestName") s.num = 10 # If a concurrent process already changed the doc, # the next operation will raise an error await s.replace() ``` If you want to ignore revision and apply all the changes even if the local copy is outdated, you can use the parameter `ignore_revision` ```python await s.replace(ignore_revision=True) ``` ## Other There is a bunch of smaller features, presented in this release. I would like to mention a couple of them here. ### Save changes Beanie can keep the document state, that synced with the database, to find local changes and save only them. This feature must be turned on in the `Settings` inner class explicitly. ```python class Sample(Document): num: int name: str class Settings: use_state_management = True ``` To save only changed values the `save_changes()` method should be used. ```python s = await Sample.find_one(Sample.name == "Test") s.num = 100 await s.save_changes() ``` The `save_changes()` method can be used only with already inserted documents. ### On save validation Pydantic has very useful config to validate values on assignment - `validate_assignment = True`. But unfortunately, this is a heavy operation and doesn't fit some use cases. You can validate all the values before saving the document (insert, replace, save, save_changes) with beanie config `validate_on_save` instead. This feature must be turned on in the `Settings` inner class explicitly. ```python class Sample(Document): num: int name: str class Settings: validate_on_save = True ``` If any field has a wrong value, it will raise an error on write operations (insert, replace, save, save_changes) ```python sample = await Sample.find_one(Sample.name == "Test") sample.num = "wrong value type" # Next call will raise an error await sample.replace() ``` ## Conclusion Thank you for reading. I hope you'll find these features useful. If you would like to help with development - there are some issues at the GitHub page of the project - ## Links - [Beanie Project](https://github.com/roman-right/beanie) - [Documentation](https://roman-right.github.io/beanie/) - [Discord Server](https://discord.gg/ZTTnM7rMaz)python-beanie-1.29.0/docs/assets/000077500000000000000000000000001473701376500166145ustar00rootroot00000000000000python-beanie-1.29.0/docs/assets/color_scheme.css000066400000000000000000000003331473701376500217670ustar00rootroot00000000000000[data-md-color-scheme="slate"] { --md-primary-fg-color: #2e303e; /* Panel */ --md-default-bg-color: #2e303e; /* BG */ --md-typeset-a-color: #8e91aa; --md-accent-fg-color: #658eda }python-beanie-1.29.0/docs/assets/favicon.png000066400000000000000000000021051473701376500207450ustar00rootroot00000000000000‰PNG  IHDRóÿa„iCCPICC profile(‘}‘=HÃ@Å_ÓŠ¢;¨8d¨NDEµ E¨j…VL.ý‚& IŠ‹£àZpðc±êà⬫ƒ« ~€¸¹9)ºH‰ÿK -b<8îÇ»{»w€P/3Í šn›©D\ÌdWÅÎW„D•™eÌIR¾ãë¾ÞÅx–ÿ¹?G¯š³‰g™aÚÄÄÓ›¶ÁyŸ8вJ|NF€Î] QsœïcÇiœÁgàJoù+u`æ“ôZK‹}ÛÀÅuKSö€Ë`àÉMÙ•‚4…|x?£oÊý·@÷š×[s§@šºJÞ‡Àh²×}ÞÝÕÞÛ¿gšýý1Éry _bKGDt0ýŒ pHYs × ×B(›xtIMEå 5iô¾BIDAT8Ë“KH”a†Ÿó}ÿ?ÌÑÉ!¡E” „\t£ØÒ¦¥]lUÛ Z¸È(ˆ l-*Š"„@"’’rÑ¢B+K‘.¤3š3ÿ÷6“—ÒèYÎû.Þ÷a.’j6È9ƒ®Ž®cd»–Ú“yñˆt•PXk°íÕÆw´Ä#‰ <œµ 9iî$wÿ/ƒ«¬¬Èã{BtOÆ+H‹§>T€1o¸·C9ÑÉä­…v7±“!nO:Ûxj­gULi¡ PC¿ê›J[“&ò˜}òÛÀ(’LH‰.™EL Ú(墴Ä墧»I­›3ôÎ79r*üT–%DÙhh‘¶E!^ õ¸Êд3æ¨6‹0ꄯ^˜Qøì ã^CØÑAv"è&µ*„ºýñˆ¤Ì Ǽa°(Œ8!BXo=µR8¦•†»³AÐHc\te¢$qÂÓ¢EMgÔ"…Þ°ÝI¤uÖï½äÊ2Á˜V£½…@¦¼bEØsdŒ’Wè+Xf~¨(…½!PûÆ…Ì¦Þ ô8¡>TöÅ#2¥[BÇ QêOº4Ï+ ;Á€ÊE*7ôÅÁd´l•3 ¯#Ã{':îM¿G/O»84+à=s·³ˆ/^(Þ93ìáŠ`oãûè5V\«1þPkÂ-jáyÑðÑÉ3EÎN2yïø%Ÿé<••}µ5t5£ D–ONúÎ'ÛË?Xð»n/ œ9JöÿÁ/HèëöÞ4IEND®B`‚python-beanie-1.29.0/docs/assets/logo.svg000066400000000000000000000176031473701376500203040ustar00rootroot00000000000000 image/svg+xml python-beanie-1.29.0/docs/batteries/000077500000000000000000000000001473701376500172745ustar00rootroot00000000000000python-beanie-1.29.0/docs/batteries/queue.md000066400000000000000000000222261473701376500207460ustar00rootroot00000000000000# Task Queue Task Queue is an advanced queue system for Beanie (MongoDB), designed to efficiently manage and process tasks. It features task priorities, states, dependencies, and automatic expiration. Different task queues can be processed together using the Worker class. Multiple workers can be run in separate processes using the Runner class. ## Example ```python from beanie_batteries_queue import Task, Runner class ExampleTask(Task): data: str async def run(self): self.data = self.data.upper() await self.save() runner = Runner(task_classes=[ExampleTask]) runner.start() ``` ## Installation ```shell pip install beanie[queue] ``` ## Task ### Declare a task class ```python from beanie_batteries_queue import Task class SimpleTask(Task): s: str ``` ### Process a task ```python from beanie_batteries_queue import State # Producer task = SimpleTask(s="test") await task.push() # Consumer async for task in SimpleTask.queue(): assert task.s == "test" # Do some work await task.finish() break # Check that the task is finished task = await SimpleTask.find_one({"s": "test"}) assert task.state == State.FINISHED ``` Async generator `SimpleTask.queue()` will return all unfinished tasks in the order they were created or based on the priority if it was specified. It is an infinite loop, so you can use `break` to stop it. You can also use `SimpleTask.pop()` to get the next task from the queue. ```python from beanie_batteries_queue import State # Producer task = SimpleTask(s="test") await task.push() # Consumer task = await SimpleTask.pop() assert task.s == "test" # Do some work await task.finish() ``` ### Task priority There are three priority levels: `LOW`, `MEDIUM`, and `HIGH`. The default priority is `MEDIUM`. Tasks are popped from the queue in the following order: `HIGH`, `MEDIUM`, `LOW`. ```python from beanie_batteries_queue import Priority task1 = SimpleTask(s="test1", priority=Priority.LOW) await task1.push() task2 = SimpleTask(s="test2", priority=Priority.HIGH) await task2.push() async for task in SimpleTask.queue(): assert task.s == "test2" await task.finish() break ``` ### Task state There are four states: `CREATED`, `RUNNING`, `FINISHED`, and `FAILED`. The default state is `PENDING`. When a task is pushed, it is in the `CREATED` state. When it gets popped from the queue, it is in the `RUNNING` state. `FINISHED` and `FAILED` states should be set manually. Finished: ```python from beanie_batteries_queue import State task = SimpleTask(s="test") await task.push() async for task in SimpleTask.queue(): assert task.state == State.RUNNING await task.finish() break task = await SimpleTask.find_one({"s": "test"}) assert task.state == State.FINISHED ``` Failed: ```python from beanie_batteries_queue import State task = SimpleTask(s="test") await task.push() async for task in SimpleTask.queue(): assert task.state == State.RUNNING await task.fail() break task = await SimpleTask.find_one({"s": "test"}) assert task.state == State.FAILED ``` ### Task dependencies You can specify that a task depends on another task. In this case, the task will be popped from the queue only when all its dependencies have finished. ```python from beanie_batteries_queue import Task, DependencyType from beanie_batteries_queue import Link from pydantic import Field class SimpleTask(Task): s: str class TaskWithDirectDependency(Task): s: str direct_dependency: Link[SimpleTask] = Field( dependency_type=DependencyType.DIRECT ) ``` ```python from beanie_batteries_queue import State task1 = SimpleTask(s="test1") await task1.push() task2 = TaskWithDirectDependency(s="test2", direct_dependency=task1) await task2.push() task_from_queue = await TaskWithDirectDependency.pop() assert task_from_queue is None # task2 is not popped from the queue because task1 is not finished yet await task1.finish() task_from_queue = await TaskWithDirectDependency.pop() assert task_from_queue is not None # task2 is popped from the queue because task1 is finished ``` ### Task dependencies with multiple links You can specify that a task depends on multiple tasks. In this case, the task will be popped from the queue when all or any its dependencies are finished. It is controlled by the `dependency_type` parameter. All ```python class TaskWithMultipleDependencies(Task): s: str list_of_dependencies: Link[SimpleTask] = Field( dependency_type=DependencyType.ALL_OF ) ``` Any ```python class TaskWithMultipleDependencies(Task): s: str list_of_dependencies: Link[SimpleTask] = Field( dependency_type=DependencyType.ANY_OF ) ``` Tasks can have multiple links with different dependency types. ```python class TaskWithMultipleDependencies(Task): s: str list_of_dependencies_all: Link[SimpleTask] = Field( dependency_type=DependencyType.ALL_OF ) list_of_dependencies_any: Link[SimpleTask] = Field( dependency_type=DependencyType.ANY_OF ) direct_dependency: Link[SimpleTask] = Field( dependency_type=DependencyType.DIRECT ) ``` ### Expire time You can specify the time after which the task will be removed from the queue, even if it is not finished or has failed. This is controlled by the `expireAfterSeconds` index, which is set to 24 hours by default. ```python from pymongo import ASCENDING from beanie_batteries_queue import Task class TaskWithExpireTime(Task): s: str class Settings: indexes = [ # Other indexes, # Expire after 5 minutes [("created_at", ASCENDING), ("expireAfterSeconds", 300)], ] ``` Finished or failed tasks are not immediately removed from the queue. They are removed after the expiration time. You can manually delete them using the `delete()` method. ## Queue Queues are designed to manage tasks. It will handle all the logic of creating, updating, and deleting tasks. Task logic should be defined in the `run` method of the task ```python from beanie_batteries_queue import Task class ProcessTask(Task): data: str async def run(self): # Implement the logic for processing the task print(f"Processing task with data: {self.data}") self.data = self.data.upper() await self.save() ``` Now we can start the queue and it will process all the tasks. Be aware - it will run infinite loop. If you want to have another logic after starting the queue, you should run it with `asyncio.create_task()`. ```python queue = ProcessTask.queue() await queue.start() ``` ### Stop the queue You can stop the queue by calling the `stop()` method. ```python await queue.stop() ``` ### Queue settings You can specify how frequently the queue will check for new tasks. The default value is 1 second. ```python queue = ProcessTask.queue(sleep_time=60) # 60 seconds await queue.start() ``` ## Worker Queue can handle only one task model. To process multiple task models, you should use Worker. It will run multiple queues ```python from beanie_batteries_queue import Task, Worker class ProcessTask(Task): data: str async def run(self): self.data = self.data.upper() await self.save() class AnotherTask(Task): data: str async def run(self): self.data = self.data.upper() await self.save() worker = Worker(task_classes=[ProcessTask, AnotherTask]) await worker.start() ``` Be aware - it will run infinite loop. If you want to have another logic after starting the worker, you should run it with `asyncio.create_task()`. ### Stop the worker You can stop the worker by calling the `stop()` method. ```python await worker.stop() ``` ### Worker settings You can specify how frequently the worker will check for new tasks. The default value is 1 second. ```python worker = Worker(task_classes=[ProcessTask, AnotherTask], sleep_time=60) # 60 seconds await worker.start() ``` ## Runner Runner is a class that allows you to run multiple workers in separate processes. It is useful when your tasks are CPU intensive and you want to use all the cores of your CPU. ```python from beanie_batteries_queue import Task, Runner class ProcessTask(Task): data: str async def run(self): self.data = self.data.upper() await self.save() class AnotherTask(Task): data: str async def run(self): self.data = self.data.upper() await self.save() runner = Runner(task_classes=[ProcessTask, AnotherTask]) runner.start() ``` ### Stop the runner You can stop the runner by calling the `stop()` method. ```python runner.stop() ``` ### Runner settings You can specify how many workers will be run. The default value is 1. ```python runner = Runner(task_classes=[ProcessTask, AnotherTask], workers_count=4) runner.start() ``` You can specify how frequently the worker will check for new tasks. The default value is 1 second. ```python runner = Runner(task_classes=[ProcessTask, AnotherTask], sleep_time=60) # 60 seconds runner.start() ``` You can specify if the start method should run while the workers are alive or if it should return immediately. The default value is True. ```python runner = Runner(task_classes=[ProcessTask, AnotherTask], run_indefinitely=False) runner.start() ``` python-beanie-1.29.0/docs/changelog.md000066400000000000000000001365741473701376500176030ustar00rootroot00000000000000# Changelog Beanie project ## [1.29.0] - 2025-01-06 ### Fix serialization of link/backlink and openapi schema generation - Author - [staticxterm](https://github.com/staticxterm) - PR ### Fix: `owner` model missing in `init_beanie` in inheritance documentation's inserts example - Author - [ksayer](https://github.com/ksayer) - PR ### Make `diacritic_sensitive` parameter optional to support $text operator on cosmos db - Author - [mykolaskrynnyk](https://github.com/mykolaskrynnyk) - PR ### Add tests with case of {id} in fastapi path - Author - [dantetemplar](https://github.com/dantetemplar) - PR ### Use strings to specify mongodb versions in ci - Author - [Viicos](https://github.com/Viicos) - PR ### fix: pydantic 2.10.x breaking change - Author - [mdaffad](https://github.com/mdaffad) - PR ### Bulk writer improving & bulk_writer method for document and possibility to bypass mongo document validation + comment parameter - Author - [CAPITAINMARVEL](https://github.com/CAPITAINMARVEL) - PR ### Add coverage configuration to pyproject.toml - Author - [staticxterm](https://github.com/staticxterm) - PR [1.29.0]: https://pypi.org/project/beanie/1.29.0 ## [1.28.0] - 2024-12-05 ### Fix kwargs/args untyped - Author - [CAPITAINMARVEL](https://github.com/CAPITAINMARVEL) - PR ### Update pre-commit - Author - [07pepa](https://github.com/07pepa) - PR ### Drop support for python 3.7 - Author - [07pepa](https://github.com/07pepa) - PR ### Add missing type hint to `find_many_in_all` method - Author - [vasuman](https://github.com/vasuman) - PR ### Add documentdb compatibility to fetch_links - Author - [whitfin](https://github.com/whitfin) - PR ### Fix issues caused by #1044 - Author - [07pepa](https://github.com/07pepa) - PR ### Feat(skip_index): possibility added to skip index actions - Author - [jorma16](https://github.com/jorma16) - PR ### Fix pydanticobjectid fields being parsed into str - Author - [07pepa](https://github.com/07pepa) - PR ### Modify tests to not raise deprecation warnings - Author - [07pepa](https://github.com/07pepa) - PR ### Add python 3.13 and jit into testing - Author - [07pepa](https://github.com/07pepa) - PR ### Handle limit and session in .count() method - Author - [CAPITAINMARVEL](https://github.com/CAPITAINMARVEL) - PR [1.28.0]: https://pypi.org/project/beanie/1.28.0 ## [1.27.0] - 2024-10-06 ### Add tests on all major mongo version - Author - [07pepa](https://github.com/07pepa) - PR ### Fix return type from document update - Author - [CAPITAINMARVEL](https://github.com/CAPITAINMARVEL) - PR ### Fix expression type hint not allowing some type https://github.com/beanieodm/beanie/issues/1020 - Author - [CAPITAINMARVEL](https://github.com/CAPITAINMARVEL) - PR ### Fix type hint using pymongo client session instead of motor client session - Author - [CAPITAINMARVEL](https://github.com/CAPITAINMARVEL) - PR ### Fix logical operator typing #1000 - Author - [janas-adam](https://github.com/janas-adam) - PR ### Use session in document insert - Author - [andraghetti](https://github.com/andraghetti) - PR ### Use sequence instead of list in init_beanie - Author - [07pepa](https://github.com/07pepa) - PR ### Replace deprecated datetime.utcnow with datetime.now - Author - [adeelsohailahmed](https://github.com/adeelsohailahmed) - PR ### Fix uniondoc type hint missing in init_beanie and on findinterface - Author - [CAPITAINMARVEL](https://github.com/CAPITAINMARVEL) - PR ### Add test to ensure dict with enum keys are encoded properly - Author - [adeelsohailahmed](https://github.com/adeelsohailahmed) - PR ### Project publishing instruction + changelog generation script - Author - [roman-right](https://github.com/roman-right) - PR ### Revert project publishing gh action - Author - [roman-right](https://github.com/roman-right) - PR ### Extend motor option to beanie - Author - [Dudesons](https://github.com/Dudesons) - PR ### Fix regex storing - Author - [07pepa](https://github.com/07pepa) - PR ### Remove links to ko-fi from the project - Author - [roman-right](https://github.com/roman-right) - PR ### Fix typo in source code comment in inheritance.md - Author - [fnogatz](https://github.com/fnogatz) - PR ### Fix gh action to grant permissions and use tags - Author - [roman-right](https://github.com/roman-right) - PR ### Fix: example of find by id and link to finding-documents - Author - [fredowashere](https://github.com/fredowashere) - PR ### Fix incorrect type serialization when dumping to python - Author - [07pepa](https://github.com/07pepa) - PR ### Use ruff format instead of black - Author - [roman-right](https://github.com/roman-right) - PR ### Gh action: set new version and publish on push - Author - [roman-right](https://github.com/roman-right) - PR ### Feature / fix: allow settings to be inherited and extended (fixes #644) - Author - [dotKokott](https://github.com/dotKokott) - PR ### Fix: issue #951 - Author - [IterableTrucks](https://github.com/IterableTrucks) - PR ### Allow unordered parameter on bulkwriter - Author - [thiagosalvatore](https://github.com/thiagosalvatore) - PR ### Fix: set default value in findinterface._inheritance_inited to avoid … - Author - [Robert-Nogueira](https://github.com/Robert-Nogueira) - PR ### Fix example in multi-model.md - Author - [gianpaj](https://github.com/gianpaj) - PR ### Add missing type signature to `basefindcomparisonoperator` constructor - Author - [aaronted009](https://github.com/aaronted009) - PR ### Removed calls to function causing deprecation warning where possible - Author - [07pepa](https://github.com/07pepa) - PR ### Update migrations.md - Author - [marwan-alloreview](https://github.com/marwan-alloreview) - PR [1.27.0]: https://pypi.org/project/beanie/1.27.0 ## [1.26.0] - 2024-05-01 ### Feature: soft delete - Author - [Ali Moradi](https://github.com/alm0ra) - PR ### Update deprecated call of general_plain_validator_function (#676) - Author - [dslemusp](https://github.com/dslemusp) - PR ### Annotate decorators that wrap `document` methods (#679) - Author - [Maxim](https://github.com/bedlamzd) - PR ### Update relations docs to indicate that backlinks are virtual. - Author - [Josh Borrow](https://github.com/JBorrow) - PR ### Docs: fix typo (#869) - Author - [Valentin Oliver Loftsson](https://github.com/valentinoli) - PR ### Add possibility of leveraging enum in find query - Author - [Danil](https://github.com/damikhai) - PR ### Handle typeerror in validator of pydanticobjectid - Author - [Christian Grotheer](https://github.com/grthr) - PR [1.26.0]: https://pypi.org/project/beanie/1.26.0 ## [1.25.0] - 2024-01-24 ### Encode Date Objects - Author - [George Sakkis](https://github.com/gsakkis) - PR ### Fix: Findinterface Type-Hints Break On View Models - Author - [Guy Tsitsiashvili](https://github.com/GuyGooL5) - PR ### Fix: Count With Text Queries And Links - Author - [Benjamin Earle](https://github.com/MrEarle) - PR ### Update Migration Command To Enable/Disable Transactions - Author - [Mahmoud Mabrouk](https://github.com/mmabrouk) - PR ### Sync Method - Author - [Roman Right](https://github.com/roman-right) - PR ### Limit Nesting Level Of Linked Documents *WARNING: This is a breaking change. Please, read [the docs](https://beanie-odm.dev/tutorial/defining-a-document/#nested-documents-depth) before updating.* - Author - [Roman Right](https://github.com/roman-right) - PR [1.25.0]: https://pypi.org/project/beanie/1.25.0 ## [1.24.0] - 2023-12-24 ### Exclude revision_id From The get_changes Method - Author - [Roman Right](https://github.com/roman-right) - PR ### Add Support For Custom Bucket Fields In Time Series - Author - [Lucas Hardt](https://github.com/Luc1412) - PR ### Add Bson Maxkey And Minkey - Author - [Noah Witt](https://github.com/noah-witt) - PR ### Update Model During Save Validation - Author - [Roman Right](https://github.com/roman-right) - PR ### Fix init_beanie document_models Type Hint - Author - [Capi Etheriel](https://github.com/barraponto) - PR ### Fix Encoding Keys In `Mapping` Branch Of `Encoder` - Author - [Rubikoid](https://github.com/Rubikoid) - PR ### Improve Write Performances - Author - [Thibault Djaballah](https://github.com/tdjaballah) - PR ### Doc Update: Queue - Author - [Roman Right](https://github.com/roman-right) - PR ### Tests For Indexed Fields - Author - [Roman Right](https://github.com/roman-right) - PR ### Rework Revision - Author - [Roman Right](https://github.com/roman-right) - PR ### Add Missing Type Signature To `Document` Constructor - Author - [None](https://github.com/johnthagen) - PR [1.24.0]: https://pypi.org/project/beanie/1.24.0 ## [1.23.6] - 2023-11-12 ### Fix Multiprocessing Mode - Author - [Roman Right](https://github.com/roman-right) - PR [1.23.6]: https://pypi.org/project/beanie/1.23.6 ## [1.23.5] - 2023-11-12 ### Multiprocessing Mode For Init - Author - [Roman Right](https://github.com/roman-right) - PR [1.23.5]: https://pypi.org/project/beanie/1.23.5 ## [1.23.4] - 2023-11-12 ### Args For `get_model_dump` - Author - [Roman Right](https://github.com/roman-right) - PR [1.23.4]: https://pypi.org/project/beanie/1.23.4 ## [1.23.3] - 2023-11-08 ### Fix Id Notation - Author - [Roman Right](https://github.com/roman-right) - PR [1.23.3]: https://pypi.org/project/beanie/1.23.3 ## [1.23.2] - 2023-11-08 ### Fix Aggregations With Text Queries - Author - [Benjamin Earle](https://github.com/MrEarle) - PR ### Handle Annotated Indexes - Author - [Benjamin Earle](https://github.com/MrEarle) - PR ### Fix Docstring - Author - [Andrew Grinevich](https://github.com/Derfirm) - PR ### Build Aggregation Pipeline From Find Query Without Fetch - Author - [Roman Right](https://github.com/roman-right) - PR [1.23.2]: https://pypi.org/project/beanie/1.23.2 ## [1.23.1] - 2023-10-23 ### Fix: Issue #631 - Author - [IterableTrucks](https://github.com/IterableTrucks) - PR ### Replace Custom 'Hidden=True' Field Attribute With Builtin 'Exclude=True' - Author - [George Sakkis](https://github.com/gsakkis) - PR ### Add Support For Indexed Custom Pydantic Fields - Author - [Adam Asay](https://github.com/aasay) - PR [1.23.1]: https://pypi.org/project/beanie/1.23.0 ## [1.23.0] - 2023-10-15 ### Refactor Encoder - Author - [George Sakkis](https://github.com/gsakkis) - PR ### Preserve Sort/Skip/Limit For Aggregations - Author - [George Sakkis](https://github.com/gsakkis) - PR ### Update Pre-Commit Hooks - Author - [SADIK KUZU](https://github.com/sadikkuzu) - PR ### Fixed Link Validation - Author - [Evgeniy Goncharuck](https://github.com/iterlace) - PR ### Fix: pydantic_core._pydantic_core.Url object is not iterable - Author - [Tomohiro Hiratsuka](https://github.com/tomohirohiratsuka) - PR ### Simplify And Fix DecimalAnnotation - Author - [George Sakkis](https://github.com/gsakkis) - PR - Issues: - [[BUG] Validation Error on parsing retrieved document's BSON Decimal128 field](https://github.com/roman-right/beanie/issues/691) ### Simplify BsonBinary - Author - [George Sakkis](https://github.com/gsakkis) - PR ### Minor Fixes - Author - [Roman Right](https://github.com/roman-right) - PR ### Replace Encoder With get_dict In The replace_one Method - Author - [Roman Right](https://github.com/roman-right) - PR [1.23.0]: https://pypi.org/project/beanie/1.23.0 ## [1.22.6] - 2023-09-16 ### Update Precommit Hooks & CI - Author - [George Sakkis](https://github.com/gsakkis) - PR [1.22.6]: https://pypi.org/project/beanie/1.22.6 ## [1.22.5] - 2023-09-13 ### Fix: Unify Methods for Retrieving Field's Extra Parameters During Backlink Processing - Author - [Roman Right](https://github.com/roman-right) - PR - Issues: - [[BUG] Optional[Backlink]](https://github.com/roman-right/beanie/issues/702) [1.22.5]: https://pypi.org/project/beanie/1.22.5 ## [1.22.4] - 2023-09-13 ### Fix Numpy Array Incompatability - Author - [Alex Lau](https://github.com/riven314) - PR [1.22.4]: https://pypi.org/project/beanie/1.22.4 ## [1.22.3] - 2023-09-13 ### Refactor: Simplify UpdateMany And UpdateOne __await__ Method - Author - [Muzaffer Cikay](https://github.com/cikay) - PR [1.22.3]: https://pypi.org/project/beanie/1.22.3 ## [1.22.2] - 2023-09-13 ### Fix get_field_type & Generalize extract_id_class - Author - [George Sakkis](https://github.com/gsakkis) - PR [1.22.2]: https://pypi.org/project/beanie/1.22.2 ## [1.22.1] - 2023-09-13 ### Fix | list_collection_names Requires Unnecessary Privileges - Author - [Marina](https://github.com/marinashe) - PR - Issues: - [[BUG] Can't use a View if the user doesn't have full read privileges to all collections](https://github.com/roman-right/beanie/issues/680) [1.22.1]: https://pypi.org/project/beanie/1.22.1 ## [1.22.0] - 2023-09-13 ### Fix | August 2023 - Author - [Roman Right](https://github.com/roman-right) - PR - Issues: - [[BUG] Issue with `List[Link[Type]]` when `fetch_all_links` is called](https://github.com/roman-right/beanie/issues/576) - [Loosen type requirement for `insert_many()`?](https://github.com/roman-right/beanie/issues/591) - [[BUG] Updating documents with a frozen BaseModel as field raises TypeError](https://github.com/roman-right/beanie/issues/599) - [[BUG] Not operator cant be on top level](https://github.com/roman-right/beanie/issues/600) - [[BUG] `Text` query doesn't work with `fetch_links=True`](https://github.com/roman-right/beanie/issues/606) - [[BUG] List type fields in updated model record do not get update.](https://github.com/roman-right/beanie/issues/629) - [[BUG] Undefined behavior when chaining update methods](https://github.com/roman-right/beanie/issues/646) - [[BUG] Revision Id is in Responsemodel](https://github.com/roman-right/beanie/issues/648) - [[BUG] Custom types like bson.Binary require `__get_pydantic_core_schema__`](https://github.com/roman-right/beanie/issues/651) - [[BUG] `validate_on_save` doesn't work with `Document.save()`](https://github.com/roman-right/beanie/issues/664) - [[BUG] Beanie persists `root` field](https://github.com/roman-right/beanie/issues/668) - [Beanie 1.21 still triggers many deprecation warnings with Pydantic v2](https://github.com/roman-right/beanie/issues/676) - [[BUG] TypeError: expected 1 argument, got 0 when beanie.Document has method wrapped in pydantic.validate_call](https://github.com/roman-right/beanie/issues/695) [1.22.0]: https://pypi.org/project/beanie/1.22.0 ## [1.21.0] - 2023-08-03 ### Pydantic bump | final - Author - [Roman Right](https://github.com/roman-right) - PR [1.21.0]: https://pypi.org/project/beanie/1.21.0 ## [1.21.0b1] - 2023-07-21 ### Bump pydantic | beta 1 - Author - [Roman Right](https://github.com/roman-right) - PR [1.21.0b1]: https://pypi.org/project/beanie/1.21.0b1 ## [1.21.0b0] - 2023-07-19 ### Bump pydantic | beta 0 - Author - [Roman Right](https://github.com/roman-right) - PR [1.21.0b0]: https://pypi.org/project/beanie/1.21.0b0 ## [1.20.0] - 2023-06-30 ### Docs: queue battery - Author - [Roman Right](https://github.com/roman-right) - PR [1.20.0]: https://pypi.org/project/beanie/1.20.0 ## [1.20.0b1] - 2023-06-09 ### Feature: custom init classmethod - Author - [Roman Right](https://github.com/roman-right) - PR [1.20.0b1]: https://pypi.org/project/beanie/1.20.0b1 ## [1.20.0b0] - 2023-06-09 ### Feature: optional batteries - Author - [Roman Right](https://github.com/roman-right) - PR [1.20.0b0]: https://pypi.org/project/beanie/1.20.0b0 ## [1.19.2] - 2023-05-25 ### Fix: issues opened before 2023.05 - Author - [Roman Right](https://github.com/roman-right) - PR [1.19.2]: https://pypi.org/project/beanie/1.19.2 ## [1.19.1] - 2023-05-22 ### Fix: update forward refs during nested links check - Author - [Roman Right](https://github.com/roman-right) - PR ### Fix: session in iterative transactions - Author - [Roman Right](https://github.com/roman-right) - PR [1.19.1]: https://pypi.org/project/beanie/1.19.1 ## [1.19.0] - 2023-05-05 ### Feat/back refs - Author - [Roman Right](https://github.com/roman-right) - PR ### Feat: add box operator - Author - [Anton Kriese](https://github.com/akriese) - PR ### Fix table of contents not showing all classes - Author - [Kai Schniedergers](https://github.com/kschniedergers) - PR ### Return bulkwriteresult response from motor - Author - [divyam234](https://github.com/divyam234) - PR ### Fix typing in 'document.get(...)' - Author - [Yallxe](https://github.com/yallxe) - PR ### Init view's cache - Author - [Antonio Eugenio Burriel](https://github.com/aeburriel) - PR ### Kwargs arguments for elemmatch operator - Author - [Roman Right](https://github.com/roman-right) - PR [1.19.0]: https://pypi.org/project/beanie/1.19.0 ## [1.18.1] - 2023-05-04 ### Keep nulls config - Author - [Roman Right](https://github.com/roman-right) - PR [1.18.1]: https://pypi.org/project/beanie/1.18.1 ## [1.18.0] - 2023-03-31 ### Prevent the models returned from find_all to be modified in the middle of modifying - Author - [Harris Tsim](https://github.com/harris) - PR ### Allow change class_id and use name settings in uniondoc - Author - [설ì›ì¤€(Wonjoon Seol)/Dispatch squad](https://github.com/wonjoonSeol-WS) - PR ### Fix: make `revision_id` not show in schema - Author - [Ivan GJ](https://github.com/ivan-gj) - PR ### Fix: added re.pattern support to common encoder suite - Author - [Ilia](https://github.com/Abashinos) - PR ### Fix other issues - Author - [Roman Right](https://github.com/roman-right) - PR [1.18.0]: https://pypi.org/project/beanie/1.18.0 ## [1.18.0b1] - 2023-02-09 ### Fix - Don't create state on init for docs with custom id types ### Implementation - Author - [Roman Right](https://github.com/roman-right) - PR [1.18.0b1]: https://pypi.org/project/beanie/1.18.0b1 ## [1.18.0b0] - 2023-01-30 ### Feature - feat: convert updates to be atomic operations ### Implementation - Author - [Roman Right](https://github.com/roman-right) - PR [1.18.0b0]: https://pypi.org/project/beanie/1.18.0b0 ## [1.17.0] - 2023-01-19 ### Feature - Add links to views ### Implementation - Author - [Sebastian Battle](https://github.com/sabattle) - PR [1.17.0]: https://pypi.org/project/beanie/1.17.0 ## [1.16.8] - 2022-01-05 ### Fix - Already inserted Links will throw DuplicateKeyError on insert of wrapping doc ### Implementation - Author - [noaHson86](https://github.com/noaHson86) - PR ## [1.16.7] - 2023-01-03 ### Fix - sort many args ### Implementation - PR ## [1.16.6] - 2022-12-27 ### Feature - Previous saved state ### Implementation - Author - [Paul Renvoisé](https://github.com/paul-finary) - PR ## [1.16.5] - 2022-12-27 ### Deprecation - Raises exception if `Collection` inner class was used as it is not supported more ### Backported to - [1.15.5] - [1.14.1] ### Implementation - PR ## [1.16.4] - 2022-12-20 ### Fix - [[BUG] Initiating self-referencing documents with nested links breaks due to uncaught request loop](https://github.com/roman-right/beanie/issues/449) - Nested lookups for direct links ### Implementation - PR ## [1.16.3] - 2022-12-19 ### Fix - [[BUG] revision_id field saved in MongoDB using save()/replace() on an existing document even if use_revision is False](https://github.com/roman-right/beanie/issues/420) ### Implementation - PR ## [1.16.2] - 2022-12-19 ### Fix - [[BUG] find_one projection link](https://github.com/roman-right/beanie/issues/383) - [[BUG]: Link fields interference/contamination](https://github.com/roman-right/beanie/issues/433) - [[BUG]: ElemMatch on Document property of Type List[Link] fails with IndexError in relations.py convert_ids()](https://github.com/roman-right/beanie/issues/439) ### Implementation - PR ## [1.16.1] - 2022-12-17 ### Feature - Remove yarl dependency ### Implementation - PR ## [1.16.0] - 2022-12-17 ### Feature - Support for fetching deep-nested Links ### Implementation - Author - [Courtney Sanders](https://github.com/csanders-rga) - PR ## [1.16.0b3] - 2022-11-28 ### Feature - Lazy parsing for find many ### Implementation - PR ## [1.15.4] - 2022-11-18 ### Fix - Wrong inheritance behavior with non-rooted documents ### Implementation - ISSUE ## [1.15.3] - 2022-11-10 ### Fix - Deepcopy dict before encode it to save the original ### Implementation - ISSUE ## [1.15.2] - 2022-11-09 ### Fix - Use Settings inner class in migrations internals - Fix inheritance: mark root docs with `_inheritance_inited = True` ### Implementation - PR ## [1.15.1] - 2022-11-07 ### Fix - Pass pymongo kwargs to the bulk writer ### Implementation - PR ## [1.15.0] - 2022-11-05 ### Feature - The sync version was moved to a separate project ### Breaking change - There is no sync version here more. Please use [Bunnet](https://github.com/roman-right/bunnet) instead ### Implementation - PR ## [1.14.0] - 2022-11-04 ### Feature - Multi-model behavior for inherited documents ### Breaking change - The inner class `Collection` is not supported more. Please, use `Settings` instead. ### Implementation - Author - [Vitaliy Ivanov](https://github.com/Vitalium) - PR ## [1.13.1] - 2022-10-26 ### Fix - Remove redundant async things from sync interface ### Implementation - ISSUE ## [1.13.0] - 2022-10-22 ### Improvement - Sync interface ### Implementation - PR ## [1.12.1] - 2022-10-17 ### Improvement - Clone interface for query objects ### Implementation - PR ## [1.12.0] - 2022-10-06 ### Improvement - Optional list of links field ### Implementation - Author - [Alex Deng](https://github.com/rga-alex-deng) - PR ## [1.11.12] - 2022-09-28 ### Improvement - Change before_event, after_event signature to be more pythonic ### Implementation - DISCUSSION ## [1.11.11] - 2022-09-26 ### Fix - Remove prints ### Implementation - ISSUE ## [1.11.10] - 2022-09-20 ### Improvement - Adding Update Action ### Implementation - Author - [schwannden](https://github.com/schwannden) - PR ## [1.11.9] - 2022-08-19 ### Fix - Move set state and swap revision to init to avoid problems with subdocs - Issue - Issue ## [1.11.8] - 2022-08-17 ### Improvement - Skip sync parameter for instance updates ## [1.11.7] - 2022-08-02 ### Improvement - Decimal128 encoding ### Implementation - Author - [Teslim Olunlade](https://github.com/ogtega) - PR ## [1.11.6] - 2022-06-24 ### Fix - Roll back projections fix, as it was valid ## [1.11.5] - 2022-06-24 ### Fix - Projection fix for aggregations ## [1.11.4] - 2022-06-13 ### Improvement - Link as FastAPI output ## [1.11.3] - 2022-06-10 ### Improvement - Motor3 support ### Implementation - ISSUE ## [1.11.2] - 2022-06-06 ### Fix - Dnt inherit excludes ### Implementation - PR ## [1.11.1] - 2022-05-31 ### Features - Allow extra - Distinct ### Implementation - Author - [Robert Rosca](https://github.com/RobertRosca) - PR - Author - [Ðикита](https://github.com/gruianichita) - PR ## [1.11.0] - 2022-05-06 ### Features - Multi-model mode - Views ## [1.10.9] - 2022-05-06 ### Improvement - pymongo_kwargs for insert many ## [1.10.8] - 2022-04-13 ### Fix - Match step after limit step ### Implementation - ISSUE ## [1.10.7] - 2022-04-12 ### Fix - Empty update fails on revision id turned on ### Implementation - ISSUE ## [1.10.6] - 2022-04-12 ### Improvement - Single syntax for find by id ### Implementation - PR ## [1.10.5] - 2022-04-11 ### Improvement - Avoid creating redundant query object ### Implementation - Author - [amos402](https://github.com/amos402) - PR ## [1.10.4] - 2022-03-24 ### Improvement - Allow custom MigrationNode for build ### Implementation - Author - [amos402](https://github.com/amos402) - PR ## [1.10.3] - 2022-02-29 ### Improvement - Delete action ### Implementation - ISSUE ## [1.10.2] - 2022-02-28 ### Improvement - Bulk writer for upsert ### Implementation - ISSUE ## [1.10.1] - 2022-02-24 ### Improvement - Skip actions ### Implementation - Author - [Paul Renvoisé](https://github.com/paul-finary) - PR ## [1.10.0] - 2022-02-24 ### Improvement - Timeseries collections support - Pymongo kwargs for find, aggregate, update and delete operations ### Implementation - PR ## [1.9.2] - 2022-02-22 ### Improvement - First or None for find queries ### Implementation - ISSUE - ## [1.9.1] - 2022-02-11 ### Improvement - Add support for py.typed file ### Implementation - Author - [Nicholas Smith](https://github.com/nzsmith1) - PR - ## [1.9.0] - 2022-02-11 ### Breaking Change - Property allow_index_dropping to be default False. Indexes will not be deleted by default ### Implementation - Author - [Nicholas Smith](https://github.com/nzsmith1) - PR - ## [1.8.13] - 2022-02-10 ### Improvement - Add state_management_replace_objects setting ### Implementation - Author - [Paul Renvoisé](https://github.com/paul-finary) - PR - ## [1.8.12] - 2022-01-06 ### Improvement - Add exclude_hidden to dict() to be able to keep hidden fields hidden when the exclude parameter set ### Implementation - Author - [Yallxe](https://github.com/yallxe) - PR - ## [1.8.11] - 2021-12-30 ### Improvement - Only safe pydantic version are allowed. https://github.com/samuelcolvin/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh ## [1.8.10] - 2021-12-29 ### Fix - Revision didn't swap previous revision id and the current one on getting objects from db ## [1.8.9] - 2021-12-23 ### Improvement - Deep search of updates for the `save_changes()` method ### Kudos - Thanks, [Tigran Khazhakyan](https://github.com/tigrankh) for the deep search algo here ## [1.8.8] - 2021-12-17 ### Added - Search by linked documents fields (for pre-fetching search only) ## [1.8.7] - 2021-12-12 ### Fixed - Binary encoder issue ## [1.8.6] - 2021-12-14 ### Improved - Encoder ## [1.8.5] - 2021-12-09 ### Added - `Optional[Link[Sample]]` is allowed field type syntax now ## [1.8.4] - 2021-12-12 ### Fixed - DateTime bson type ## [1.8.3] - 2021-12-07 ### Added - Subclasses inherit event-based actions ## [1.8.2] - 2021-12-04 ### Fixed - Encoder priority ## [1.8.1] - 2021-11-30 ### Added - Key-based call of subfields in the query builders ## [1.8.0] - 2021-11-30 ### Added - Relations ### Implementation - PR ## [1.7.2] - 2021-11-03 ### Fixed - `revision_id` is hidden in the api schema ### Implementation - ISSUE ## [1.7.1] - 2021-11-02 ### Fixed - `revision_id` is hidden in the outputs ### Implementation - ISSUE ## [1.7.0] - 2021-10-12 ### Added - Cache - Bulk write - `exists` - find query's method ### Implementation - PR - - PR - - PR - ## [1.6.1] - 2021-10-06 ### Update - Customization support. It is possible to change query builder classes, used in the classes, which are inherited from the Document class ### Implementation - PR - ## [1.6.0] - 2021-09-30 ### Update - Validate on save ### Implementation - PR - ## [1.5.1] - 2021-09-27 ### Update - Simplification for init_beanie function ### Implementation - PR - ## [1.5.0] - 2021-09-27 ### Update - Custom encoders to be able to configure, how specific type should be presented in the database ### Implementation - Author - [Nazar Vovk](https://github.com/Vovcharaa) - PR - ## [1.4.0] - 2021-09-13 ### Added - Document state management ### Implementation - PR - ## [1.3.0] - 2021-09-08 ### Added - Active record pattern ### Implementation - Issue - ## [1.2.8] - 2021-09-01 ### Fix - Delete's return annotation ### Implementation - PR - ## [1.2.7] - 2021-09-01 ### Update - Annotations for update and delete ### Implementation - Author - [Anthony Shaw](https://github.com/tonybaloney) - PR - ## [1.2.6] - 2021-08-25 ### Fixed - MongoDB 5.0 in GH actions ### Implementation - PR - ## [1.2.5] - 2021-07-21 ### Fixed - Indexed fields work with aliases now ### Implementation - Author - [Kira](https://github.com/KiraPC) - Issue - ## [1.2.4] - 2021-07-13 ### Fixed - Aggregation preset method outputs ### Implementation - Issue - ## [1.2.3] - 2021-07-08 ### Fixed - Pyright issues ### Added - Doc publishing on merge to the main branch ### Implementation - Issue - - Issue - ## [1.2.2] - 2021-07-06 ### Fixed - Bool types in search criteria ### Implementation - Issue - ## [1.2.1] - 2021-07-06 ### Fixed - Document, FindQuery, Aggregation typings ### Implementation - Author - [Kira](https://github.com/KiraPC) - Issue - ## [1.2.0] - 2021-06-25 ### Added - Upsert ### Implementation - Issue - ## [1.1.6] - 2021-06-21 ### Fix - Pydantic dependency version ^1.5 ### Implementation - PR - ## [1.1.5] - 2021-06-17 ### Fix - Convert document id to the right type in the `get()` method ### Implementation - ISSUE - ## [1.1.4] - 2021-06-15 ### Changed - Stricter flake8 and fixing resulting errors ### Implementation - Author - [Joran van Apeldoorn](https://github.com/jorants) - PR - ## [1.1.3] - 2021-06-15 ### Added - MyPy to pre-commit ### Fixed - Typing errors ### Implementation - Author - [Joran van Apeldoorn](https://github.com/jorants) - PR - ## [1.1.2] - 2021-06-14 ### Changed - Skip migration test when transactions not available ### Implementation - Author - [Joran van Apeldoorn](https://github.com/jorants) - PR - ## [1.1.1] - 2021-06-14 ### Added - Save method ### Implementation - Author - [Joran van Apeldoorn](https://github.com/jorants) - PR - ## [1.1.0] - 2021-06-02 ### Added - Custom id types. ### Implementation - Issue - ## [1.0.6] - 2021-06-01 ### Fixed - Typo in the module name. ### Implementation - Author - [Joran van Apeldoorn](https://github.com/jorants) - PR - ## [1.0.5] - 2021-05-25 ### Fixed - Typing. ### Implementation - PR - ## [1.0.4] - 2021-05-18 ### Fixed - `aggregation_model` -> `projection_model` ### Implementation - PR - ## [1.0.3] - 2021-05-16 ### Added - Index kwargs in the Indexed field ### Implementation - Author - [Michael duPont](https://github.com/flyinactor91) - PR - ## [1.0.2] - 2021-05-16 ### Fixed - Deprecated import ### Implementation - Author - [Oliver Andrich](https://github.com/oliverandrich) - PR - ## [1.0.1] - 2021-05-14 ### Fixed - `Document` self annotation ### Implementation - Issue - ## [1.0.0] - 2021-05-10 ### Added - QueryBuilder ### Changed - Document class was reworked. [Documentation](https://roman-right.github.io/beanie/api/document/) ### Implementation - PR - ## [0.4.3] - 2021-04-25 ### Fixed - PydanticObjectId openapi generation ## [0.4.2] - 2021-04-20 ### Added - Python ^3.6.1 support. ### Fixed - Documents init order in migrations ## [0.4.1] - 2021-04-19 ### Added - Projections support to reduce database load ### Implementation - Author - [Nicholas Smith](https://github.com/nzsmith1) - Issue - ## [0.4.0] - 2021-04-18 ### Added - [ODM Documentation](https://roman-right.github.io/beanie/documentation/odm/) ### Changed - [Documentation](https://roman-right.github.io/beanie/) ## [0.4.0b1] - 2021-04-14 ### Added - Migrations - `inspect_collection` Document method - `count_documents` Document method ### Changed - Session can be provided to the most of the Document methods ### Removed - Internal `DocumentMeta` class. ## [0.3.4] - 2021-04-09 ### Changed - `Indexed(...)` field supports index types. ### Implementation - Author - [Joran van Apeldoorn](https://github.com/jorants) ## [0.3.3] - 2021-04-09 ### Added - Simple indexes via type hints. ### Implementation - Author - [Joran van Apeldoorn](https://github.com/jorants) ## [0.3.2] - 2021-03-25 ### Added - `init_beanie` supports also lists of strings with model paths as the` document_models` parameter. ### Implementation - Author - [Mohamed Nesredin](https://github.com/Mohamed-Kaizen) ## [0.3.1] - 2021-03-21 ### Added - `skip`, `limit` and `sort` parameters in the `find_many` and `find_all` methods. [Documentation](https://roman-right.github.io/beanie/#find-many-documents) ## [0.3.0] - 2021-03-19 ### Added - `Collection` - internal class of the `Document` to set up additional properties. - Indexes support. ### Changed - **Breaking change:** `init_beanie` is async function now. ### Deprecated - Internal `DocumentMeta` class. Will be removed in **0.4.0**. [0.3.0]: https://pypi.org/project/beanie/0.3.0 [0.3.1]: https://pypi.org/project/beanie/0.3.1 [0.3.2]: https://pypi.org/project/beanie/0.3.2 [0.3.3]: https://pypi.org/project/beanie/0.3.3 [0.3.4]: https://pypi.org/project/beanie/0.3.4 [0.4.0b1]: https://pypi.org/project/beanie/0.4.0b1 [0.4.0]: https://pypi.org/project/beanie/0.4.0 [0.4.1]: https://pypi.org/project/beanie/0.4.1 [0.4.2]: https://pypi.org/project/beanie/0.4.2 [0.4.3]: https://pypi.org/project/beanie/0.4.3 [1.0.0]: https://pypi.org/project/beanie/1.0.0 [1.0.1]: https://pypi.org/project/beanie/1.0.1 [1.0.2]: https://pypi.org/project/beanie/1.0.2 [1.0.3]: https://pypi.org/project/beanie/1.0.3 [1.0.4]: https://pypi.org/project/beanie/1.0.4 [1.0.5]: https://pypi.org/project/beanie/1.0.5 [1.0.6]: https://pypi.org/project/beanie/1.0.6 [1.1.0]: https://pypi.org/project/beanie/1.1.0 [1.1.1]: https://pypi.org/project/beanie/1.1.1 [1.1.2]: https://pypi.org/project/beanie/1.1.2 [1.1.3]: https://pypi.org/project/beanie/1.1.3 [1.1.4]: https://pypi.org/project/beanie/1.1.4 [1.1.5]: https://pypi.org/project/beanie/1.1.5 [1.1.6]: https://pypi.org/project/beanie/1.1.6 [1.2.0]: https://pypi.org/project/beanie/1.2.0 [1.2.1]: https://pypi.org/project/beanie/1.2.1 [1.2.2]: https://pypi.org/project/beanie/1.2.2 [1.2.3]: https://pypi.org/project/beanie/1.2.3 [1.2.4]: https://pypi.org/project/beanie/1.2.4 [1.2.5]: https://pypi.org/project/beanie/1.2.5 [1.2.6]: https://pypi.org/project/beanie/1.2.6 [1.2.7]: https://pypi.org/project/beanie/1.2.7 [1.2.8]: https://pypi.org/project/beanie/1.2.8 [1.3.0]: https://pypi.org/project/beanie/1.3.0 [1.4.0]: https://pypi.org/project/beanie/1.4.0 [1.5.0]: https://pypi.org/project/beanie/1.5.0 [1.5.1]: https://pypi.org/project/beanie/1.5.1 [1.6.0]: https://pypi.org/project/beanie/1.6.0 [1.6.1]: https://pypi.org/project/beanie/1.6.1 [1.7.0]: https://pypi.org/project/beanie/1.7.0 [1.7.1]: https://pypi.org/project/beanie/1.7.1 [1.7.2]: https://pypi.org/project/beanie/1.7.2 [1.8.0]: https://pypi.org/project/beanie/1.8.0 [1.8.1]: https://pypi.org/project/beanie/1.8.1 [1.8.2]: https://pypi.org/project/beanie/1.8.2 [1.8.3]: https://pypi.org/project/beanie/1.8.3 [1.8.4]: https://pypi.org/project/beanie/1.8.4 [1.8.5]: https://pypi.org/project/beanie/1.8.5 [1.8.6]: https://pypi.org/project/beanie/1.8.6 [1.8.7]: https://pypi.org/project/beanie/1.8.7 [1.8.8]: https://pypi.org/project/beanie/1.8.8 [1.8.9]: https://pypi.org/project/beanie/1.8.9 [1.8.10]: https://pypi.org/project/beanie/1.8.10 [1.8.11]: https://pypi.org/project/beanie/1.8.11 [1.8.12]: https://pypi.org/project/beanie/1.8.12 [1.8.13]: https://pypi.org/project/beanie/1.8.13 [1.9.0]: https://pypi.org/project/beanie/1.9.0 [1.9.1]: https://pypi.org/project/beanie/1.9.1 [1.9.2]: https://pypi.org/project/beanie/1.9.2 [1.10.0]: https://pypi.org/project/beanie/1.10.0 [1.10.1]: https://pypi.org/project/beanie/1.10.1 [1.10.2]: https://pypi.org/project/beanie/1.10.2 [1.10.3]: https://pypi.org/project/beanie/1.10.3 [1.10.4]: https://pypi.org/project/beanie/1.10.4 [1.10.5]: https://pypi.org/project/beanie/1.10.5 [1.10.6]: https://pypi.org/project/beanie/1.10.6 [1.10.7]: https://pypi.org/project/beanie/1.10.7 [1.10.8]: https://pypi.org/project/beanie/1.10.8 [1.10.9]: https://pypi.org/project/beanie/1.10.9 [1.11.0]: https://pypi.org/project/beanie/1.11.0 [1.11.1]: https://pypi.org/project/beanie/1.11.1 [1.11.2]: https://pypi.org/project/beanie/1.11.2 [1.11.3]: https://pypi.org/project/beanie/1.11.3 [1.11.4]: https://pypi.org/project/beanie/1.11.4 [1.11.5]: https://pypi.org/project/beanie/1.11.5 [1.11.6]: https://pypi.org/project/beanie/1.11.6 [1.11.7]: https://pypi.org/project/beanie/1.11.7 [1.11.8]: https://pypi.org/project/beanie/1.11.8 [1.11.9]: https://pypi.org/project/beanie/1.11.9 [1.11.10]: https://pypi.org/project/beanie/1.11.10 [1.11.11]: https://pypi.org/project/beanie/1.11.11 [1.11.12]: https://pypi.org/project/beanie/1.11.12 [1.12.0]: https://pypi.org/project/beanie/1.12.0 [1.12.1]: https://pypi.org/project/beanie/1.12.1 [1.13.0]: https://pypi.org/project/beanie/1.13.0 [1.13.1]: https://pypi.org/project/beanie/1.13.1 [1.14.0]: https://pypi.org/project/beanie/1.14.0 [1.15.0]: https://pypi.org/project/beanie/1.15.0 [1.15.1]: https://pypi.org/project/beanie/1.15.1 [1.15.2]: https://pypi.org/project/beanie/1.15.2 [1.15.3]: https://pypi.org/project/beanie/1.15.3 [1.15.4]: https://pypi.org/project/beanie/1.15.4 [1.16.0b3]: https://pypi.org/project/beanie/1.16.0b3 [1.16.0]: https://pypi.org/project/beanie/1.16.0 [1.16.1]: https://pypi.org/project/beanie/1.16.1 [1.16.2]: https://pypi.org/project/beanie/1.16.2 [1.16.3]: https://pypi.org/project/beanie/1.16.3 [1.16.4]: https://pypi.org/project/beanie/1.16.4 [1.16.5]: https://pypi.org/project/beanie/1.16.5 [1.14.1]: https://pypi.org/project/beanie/1.14.1 [1.15.5]: https://pypi.org/project/beanie/1.15.5 [1.16.6]: https://pypi.org/project/beanie/1.16.6 [1.16.7]: https://pypi.org/project/beanie/1.16.7 [1.16.8]: https://pypi.org/project/beanie/1.16.8 python-beanie-1.29.0/docs/code-of-conduct.md000066400000000000000000000121541473701376500206100ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at roman-right@protonmail.com. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see the FAQ at https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. python-beanie-1.29.0/docs/development.md000066400000000000000000000075751473701376500201740ustar00rootroot00000000000000# Development Hopefully, you have landed here because you would like to help out with the development of Beanie. Whether through adding new features, fixing bugs, or extending documentation, your help is really appreciated! Please read this page carefully. If you have any questions, drop by on [the Discord](https://discord.com/invite/29mMrEBvr4). Also, please read the [Code of Conduct](code-of-conduct.md). ## Setting up the development environment We assume you are familiar with the general forking and pull request workflow for submitting to open-source projects. If not, don't worry, there are plenty of good guides available. Maybe check out [this one](https://www.atlassian.com/git/tutorials/comparing-workflows/forking-workflow). All the dependencies and build configs are set in the `pyproject.toml` file. There are three main dependency sections there: - dependencies: for the dependencies required to run Beanie - test: for the dependencies required to run tests - doc: for the dependencies required to build the documentation And there are other extra dependency sections for Beanie batteries. For example, the `queue` section contains dependencies that extend features of Beanie with a queue. To install all required dependencies, including test dependencies, in a virtual environment, run the following command in the root directory of the Beanie project: ```shell pip install -e .[test] ``` To install dependencies required for building the documentation, run: ```shell pip install -e .[doc] ``` ### Database connection To run tests and use Beanie in general, you will need an accessible MongoDB database. To use migrations, you will need a connection to a Replica Set or Mongos instance. All tests assume that the database is hosted locally on port `27017` and do not require authentication. ## Testing Beanie uses [pytest](https://docs.pytest.org) for unit testing. To ensure the stability of Beanie, each added feature must be tested in a separate unit test, even if it looks like other tests are covering it now. This strategy guarantees that: - All the features will be covered and stay covered. - There is independence from other features and test cases. To run the test suite, make sure that you have MongoDB running and run `pytest`. ## Submitting new code You can submit your changes through a pull request on GitHub. Please take into account the following sections. ### Use pre-commit To ensure code consistency, Beanie uses Black and Ruff through pre-commit. To set it up, run: ```shell pre-commit install ``` This will add the pre-commit command to your git's pre-commit hooks and make sure you never forget to run these. ### Single commit To make the pull request reviewing easier and keep the version tree clean, your pull request should consist of a single commit. It is natural that your branch might contain multiple commits, so you will need to squash these into a single commit. Instructions can be found [here](https://www.internalpointers.com/post/squash-commits-into-one-git) or [here](https://medium.com/@slamflipstrom/a-beginners-guide-to-squashing-commits-with-git-rebase-8185cf6e62ec). ### Add documentation Please write clear documentation for any new functionality you add. Docstrings will be converted to the API documentation, but more human-friendly documentation might also be needed! See the section below. ## Working on the documentation The documentation is generated using `pydoc-markdown`. To see a preview of any edits you make, you can run: ```shell pydoc-markdown --server ``` and visit the printed address (usually `localhost:8000`) in your browser. Beware, the auto-recompiling might not work for everyone. This will automatically generate the API documentation from the source. All other documentation should be written by hand. The documentation is compiled using `mkdocs` behind the scenes. To change the table of contents or other options, check out `pydoc-markdown.yml`.python-beanie-1.29.0/docs/getting-started.md000066400000000000000000000051011473701376500207360ustar00rootroot00000000000000# Getting started ## Installing beanie You can simply install Beanie from the [PyPI](https://pypi.org/project/beanie/): ### PIP ```shell pip install beanie ``` ### Poetry ```shell poetry add beanie ``` ### Optional dependencies Beanie supports some optional dependencies from Motor (`pip` or `poetry` can be used). GSSAPI authentication requires `gssapi` extra dependency: ```bash pip install "beanie[gssapi]" ``` MONGODB-AWS authentication requires `aws` extra dependency: ```bash pip install "beanie[aws]" ``` Support for mongodb+srv:// URIs requires `srv` extra dependency: ```bash pip install "beanie[srv]" ``` OCSP requires `ocsp` extra dependency: ```bash pip install "beanie[ocsp]" ``` Wire protocol compression with snappy requires `snappy` extra dependency: ```bash pip install "beanie[snappy]" ``` Wire protocol compression with zstandard requires `zstd` extra dependency: ```bash pip install "beanie[zstd]" ``` Client-Side Field Level Encryption requires `encryption` extra dependency: ```bash pip install "beanie[encryption]" ``` You can install all dependencies automatically with the following command: ```bash pip install "beanie[gssapi,aws,ocsp,snappy,srv,zstd,encryption]" ``` ## Initialization Getting Beanie setup in your code is really easy: 1. Write your database model as a Pydantic class but use `beanie.Document` instead of `pydantic.BaseModel`. 2. Initialize Motor, as Beanie uses this as an async database engine under the hood. 3. Call `beanie.init_beanie` with the Motor client and list of Beanie models The code below should get you started and shows some of the field types that you can use with beanie. ```python from typing import Optional import motor.motor_asyncio from motor.motor_asyncio import AsyncIOMotorClient from pydantic import BaseModel from beanie import Document, Indexed, init_beanie class Category(BaseModel): name: str description: str # This is the model that will be saved to the database class Product(Document): name: str # You can use normal types just like in pydantic description: Optional[str] = None price: Indexed(float) # You can also specify that a field should correspond to an index category: Category # You can include pydantic models as well # Call this from within your event loop to get beanie setup. async def init(): # Create Motor client client = AsyncIOMotorClient("mongodb://user:pass@host:27017") # Init beanie with the Product document class await init_beanie(database=client.db_name, document_models=[Product]) ``` python-beanie-1.29.0/docs/index.md000066400000000000000000000125141473701376500167460ustar00rootroot00000000000000[![Beanie](https://raw.githubusercontent.com/roman-right/beanie/main/assets/logo/white_bg.svg)](https://github.com/roman-right/beanie) [![shields badge](https://shields.io/badge/-docs-blue)](https://beanie-odm.dev) [![pypi](https://img.shields.io/pypi/v/beanie.svg)](https://pypi.python.org/pypi/beanie) ## 📢 Important Update 📢 We are excited to announce that Beanie is transitioning from solo development to a team-based approach! This move will help us enhance the project with new features and more collaborative development. At this moment we are establishing a board of members that will decide all the future steps of the project. We are looking for contributors and maintainers to join the board. ### Join Us If you are interested in contributing or want to stay updated, please join our Discord channel. We're looking forward to your ideas and contributions! [Join our Discord](https://discord.gg/AwwTrbCASP) Let’s make Beanie better, together! ## Overview [Beanie](https://github.com/roman-right/beanie) - is an asynchronous Python object-document mapper (ODM) for MongoDB. Data models are based on [Pydantic](https://pydantic-docs.helpmanual.io/). When using Beanie each database collection has a corresponding `Document` that is used to interact with that collection. In addition to retrieving data, Beanie allows you to add, update, or delete documents from the collection as well. Beanie saves you time by removing boilerplate code, and it helps you focus on the parts of your app that actually matter. Data and schema migrations are supported by Beanie out of the box. There is a synchronous version of Beanie ODM - [Bunnet](https://github.com/roman-right/bunnet) ## Installation ### PIP ```shell pip install beanie ``` ### Poetry ```shell poetry add beanie ``` ## Example ```python import asyncio from typing import Optional from motor.motor_asyncio import AsyncIOMotorClient from pydantic import BaseModel from beanie import Document, Indexed, init_beanie class Category(BaseModel): name: str description: str class Product(Document): name: str # You can use normal types just like in pydantic description: Optional[str] = None price: Indexed(float) # You can also specify that a field should correspond to an index category: Category # You can include pydantic models as well # This is an asynchronous example, so we will access it from an async function async def example(): # Beanie uses Motor async client under the hood client = AsyncIOMotorClient("mongodb://user:pass@host:27017") # Initialize beanie with the Product document class await init_beanie(database=client.db_name, document_models=[Product]) chocolate = Category(name="Chocolate", description="A preparation of roasted and ground cacao seeds.") # Beanie documents work just like pydantic models tonybar = Product(name="Tony's", price=5.95, category=chocolate) # And can be inserted into the database await tonybar.insert() # You can find documents with pythonic syntax product = await Product.find_one(Product.price < 10) # And update them await product.set({Product.name:"Gold bar"}) if __name__ == "__main__": asyncio.run(example()) ``` ## Links ### Documentation - **[Doc](https://beanie-odm.dev/)** - Tutorial, API documentation, and development guidelines. ### Example Projects - **[fastapi-cosmos-beanie](https://github.com/tonybaloney/ants-azure-demos/tree/master/fastapi-cosmos-beanie)** - FastAPI + Beanie ODM + Azure Cosmos Demo Application by [Anthony Shaw](https://github.com/tonybaloney) - **[fastapi-beanie-jwt](https://github.com/flyinactor91/fastapi-beanie-jwt)** - Sample FastAPI server with JWT auth and Beanie ODM by [Michael duPont](https://github.com/flyinactor91) - **[Shortify](https://github.com/IHosseini083/Shortify)** - URL shortener RESTful API (FastAPI + Beanie ODM + JWT & OAuth2) by [ Iliya Hosseini](https://github.com/IHosseini083) - **[LCCN Predictor](https://github.com/baoliay2008/lccn_predictor)** - Leetcode contest rating predictor (FastAPI + Beanie ODM + React) by [L. Bao](https://github.com/baoliay2008) ### Articles - **[Announcing Beanie - MongoDB ODM](https://dev.to/romanright/announcing-beanie-mongodb-odm-56e)** - **[Build a Cocktail API with Beanie and MongoDB](https://developer.mongodb.com/article/beanie-odm-fastapi-cocktails/)** - **[MongoDB indexes with Beanie](https://dev.to/romanright/mongodb-indexes-with-beanie-43e8)** - **[Beanie Projections. Reducing network and database load.](https://dev.to/romanright/beanie-projections-reducing-network-and-database-load-3bih)** - **[Beanie 1.0 - Query Builder](https://dev.to/romanright/announcing-beanie-1-0-mongodb-odm-with-query-builder-4mbl)** - **[Beanie 1.8 - Relations, Cache, Actions and more!](https://dev.to/romanright/announcing-beanie-odm-18-relations-cache-actions-and-more-24ef)** ### Resources - **[GitHub](https://github.com/roman-right/beanie)** - GitHub page of the project - **[Changelog](https://beanie-odm.dev/changelog)** - list of all the valuable changes - **[Discord](https://discord.gg/AwwTrbCASP)** - ask your questions, share ideas or just say `Hello!!` ---- Supported by [JetBrains](https://jb.gg/OpenSource) [![JetBrains](https://raw.githubusercontent.com/roman-right/beanie/main/assets/logo/jetbrains.svg)](https://jb.gg/OpenSource) python-beanie-1.29.0/docs/publishing.md000066400000000000000000000044501473701376500200030ustar00rootroot00000000000000# Publishing a New Version of Beanie This guide provides step-by-step instructions on how to prepare and publish a new version of Beanie. Before starting, ensure that you have the necessary permissions to update the repository. ## 1. Prepare a New Version PR To publish a new version of Beanie, you need to create a pull request (PR) with the following updates: ### 1.1 Update the Version in `pyproject.toml` 1. Open the [`pyproject.toml`](https://github.com/BeanieODM/beanie/blob/main/pyproject.toml) file. 2. Update the `version` field to the new version number. ### 1.2 Update the Version in the `__init__.py` File 1. Open the [`__init__.py`](https://github.com/BeanieODM/beanie/blob/main/beanie/__init__.py) file. 2. Update the `__version__` variable to the new version number. ### 1.3 Update the Changelog To update the changelog, follow these steps: #### 1.3.1 Set the Version in the Changelog Script 1. Open the [`scripts/generate_changelog.py`](https://github.com/BeanieODM/beanie/blob/main/scripts/generate_changelog.py) file. 2. Set the `current_version` to the current version and `new_version` to the new version in the script. #### 1.3.2 Run the Changelog Script 1. Run the following command to generate the updated changelog: ```bash python scripts/generate_changelog.py ``` 2. The script will generate the changelog for the new version. #### 1.3.3 Update the Changelog File 1. Open the [`changelog.md`](https://github.com/BeanieODM/beanie/blob/main/docs/changelog.md) file. 2. Copy the generated changelog and paste it at the top of the `changelog.md` file. ### 1.4 Create and Submit the PR Once you have made the necessary updates, create a PR with a descriptive title and summary of the changes. Ensure that all checks pass before merging the PR. ## 2. Publishing the Version After the PR has been merged, respective GH action will publish it to the PyPI. ## 3. Create a Git Tag and GitHub Release After the version has been published: 1. Pull the latest changes from the `master` branch: ```bash git pull origin master ``` 2. Create a new Git tag with the version number: ```bash git tag -a v1.xx.y -m "Release v1.xx.y" ``` 3. Push the tag to the remote repository: ```bash git push origin v1.xx.y ``` 4. Create a new release on GitHub using the GitHub interface.python-beanie-1.29.0/docs/tutorial/000077500000000000000000000000001473701376500171555ustar00rootroot00000000000000python-beanie-1.29.0/docs/tutorial/actions.md000066400000000000000000000046541473701376500211500ustar00rootroot00000000000000# Event-based actions You can register methods as pre- or post- actions for document events. Currently supported events: - Insert - Replace - Update - SaveChanges - Delete - ValidateOnSave Currently supported directions: - `Before` - `After` Current operations creating events: - `insert()` for Insert - `replace()` for Replace - `save()` triggers Insert if it is creating a new document, triggers Replace if it replaces an existing document - `save_changes()` for SaveChanges - `insert()`, `replace()`, `save_changes()`, and `save()` for ValidateOnSave - `set()`, `update()` for Update - `delete()` for Delete To register an action, you can use `@before_event` and `@after_event` decorators respectively: ```python from beanie import Insert, Replace class Sample(Document): num: int name: str @before_event(Insert) def capitalize_name(self): self.name = self.name.capitalize() @after_event(Replace) def num_change(self): self.num -= 1 ``` It is possible to register action for several events: ```python from beanie import Insert, Replace class Sample(Document): num: int name: str @before_event(Insert, Replace) def capitalize_name(self): self.name = self.name.capitalize() ``` This will capitalize the `name` field value before each document's Insert and Replace. And sync and async methods could work as actions. ```python from beanie import Insert, Replace class Sample(Document): num: int name: str @after_event(Insert, Replace) async def send_callback(self): await client.send(self.id) ``` Actions can be selectively skipped by passing the `skip_actions` argument when calling the operations that trigger events. `skip_actions` accepts a list of directions and action names. ```python from beanie import After, Before, Insert, Replace class Sample(Document): num: int name: str @before_event(Insert) def capitalize_name(self): self.name = self.name.capitalize() @before_event(Replace) def redact_name(self): self.name = "[REDACTED]" @after_event(Replace) def num_change(self): self.num -= 1 sample = Sample() # capitalize_name will not be executed await sample.insert(skip_actions=['capitalize_name']) # num_change will not be executed await sample.replace(skip_actions=[After]) # redact_name and num_change will not be executed await sample.replace(skip_actions[Before, 'num_change']) ``` python-beanie-1.29.0/docs/tutorial/aggregate.md000066400000000000000000000017431473701376500214320ustar00rootroot00000000000000# Aggregations You can perform aggregation queries through beanie as well. For example, to calculate the average: ```python # With a search: avg_price = await Product.find( Product.category.name == "Chocolate" ).avg(Product.price) # Over the whole collection: avg_price = await Product.avg(Product.price) ``` A full list of available methods can be found [here](../api-documentation/interfaces.md/#aggregatemethods). You can also use the native PyMongo syntax by calling the `aggregate` method. However, as Beanie will not know what output to expect, you will have to supply a projection model yourself. If you do not supply a projection model, then a dictionary will be returned. ```python class OutputItem(BaseModel): id: str = Field(None, alias="_id") total: float result = await Product.find( Product.category.name == "Chocolate").aggregate( [{"$group": {"_id": "$category.name", "total": {"$avg": "$price"}}}], projection_model=OutputItem ).to_list() ``` python-beanie-1.29.0/docs/tutorial/cache.md000066400000000000000000000017411473701376500205450ustar00rootroot00000000000000# Cache All query results could be locally cached. This feature must be explicitly turned on in the `Settings` inner class. ```python class Sample(Document): num: int name: str class Settings: use_cache = True ``` Beanie uses LRU cache with expiration time. You can set `capacity` (the maximum number of the cached queries) and expiration time in the `Settings` inner class. ```python class Sample(Document): num: int name: str class Settings: use_cache = True cache_expiration_time = datetime.timedelta(seconds=10) cache_capacity = 5 ``` Any query will be cached for this document class. ```python # on the first call it will go to the database samples = await Sample.find(num>10).to_list() # on the second - it will use cache instead samples = await Sample.find(num>10).to_list() await asyncio.sleep(15) # if the expiration time was reached it will go to the database again samples = await Sample.find(num>10).to_list() ```python-beanie-1.29.0/docs/tutorial/defining-a-document.md000066400000000000000000000145161473701376500233230ustar00rootroot00000000000000# Defining a document The `Document` class in Beanie is responsible for mapping and handling the data from the collection. It is inherited from the `BaseModel` Pydantic class, so it follows the same data typing and parsing behavior. ```python from typing import Optional import pymongo from pydantic import BaseModel from beanie import Document, Indexed class Category(BaseModel): name: str description: str class Product(Document): # This is the model name: str description: Optional[str] = None price: Indexed(float, pymongo.DESCENDING) category: Category class Settings: name = "products" indexes = [ [ ("name", pymongo.TEXT), ("description", pymongo.TEXT), ], ] ``` ## Fields As it was mentioned before, the `Document` class is inherited from the Pydantic `BaseModel` class. It uses all the same patterns of `BaseModel`. But also it has special types of fields: - id - Indexed ### id `id` field of the `Document` class reflects the unique `_id` field of the MongoDB document. Each object of the `Document` type has this field. The default type of this is [PydanticObjectId](../api-documentation/fields.md/#pydanticobjectid). ```python class Sample(Document): num: int description: str foo = await Sample.find_one(Sample.num > 5) print(foo.id) # This will print id bar = await Sample.get(foo.id) # get by id ``` If you prefer another type, you can set it up too. For example, UUID: ```python from uuid import UUID, uuid4 from pydantic import Field class Sample(Document): id: UUID = Field(default_factory=uuid4) num: int description: str ``` ### Indexed To set up an index over a single field, the `Indexed` function can be used to wrap the type: ```python from beanie import Indexed class Sample(Document): num: Indexed(int) description: str ``` The `Indexed` function takes an optional argument `index_type`, which may be set to a pymongo index type: ```python class Sample(Document): description: Indexed(str, index_type=pymongo.TEXT) ``` The `Indexed` function also supports pymongo `IndexModel` kwargs arguments ([PyMongo Documentation](https://pymongo.readthedocs.io/en/stable/api/pymongo/operations.html#pymongo.operations.IndexModel)). For example, to create a `unique` index: ```python class Sample(Document): name: Indexed(str, unique=True) ``` ## Settings The inner class `Settings` is used to configure: - MongoDB collection name - Indexes - Encoders - Use of `revision_id` - Use of cache - Use of state management - Validation on save - Configure if nulls should be saved to the database - Configure nesting depth for linked documents on the fetch operation ### Collection name To set MongoDB collection name, you can use the `name` field of the `Settings` inner class. ```python class Sample(Document): num: int description: str class Settings: name = "samples" ``` ### Indexes The `indexes` field of the inner `Settings` class is responsible for the indexes' setup. It is a list where items can be: - Single key. Name of the document's field (this is equivalent to using the Indexed function described above) - List of (key, direction) pairs. Key - string, name of the document's field. Direction - pymongo direction ( example: `pymongo.ASCENDING`) - `pymongo.IndexModel` instance - the most flexible option. [PyMongo Documentation](https://pymongo.readthedocs.io/en/stable/api/pymongo/operations.html#pymongo.operations.IndexModel) ```python class DocumentTestModelWithIndex(Document): test_int: int test_list: List[SubDocument] test_str: str class Settings: indexes = [ "test_int", [ ("test_int", pymongo.ASCENDING), ("test_str", pymongo.DESCENDING), ], IndexModel( [("test_str", pymongo.DESCENDING)], name="test_string_index_DESCENDING", ), ] ``` ### Encoders The `bson_encoders` field of the inner `Settings` class defines how the Python types are going to be represented when saved in the database. The default conversions can be overridden with this. The `ip` field in the following example is converted to String by default: ```python from ipaddress import IPv4Address class Sample(Document): ip: IPv4Address ``` > **Note:** Default conversions are defined in `beanie.odm.utils.bson.ENCODERS_BY_TYPE`. However, if you want the `ip` field to be represented as Integer in the database, you need to override the default encoders like this: ```python from ipaddress import IPv4Address class Sample(Document): ip: IPv4Address class Settings: bson_encoders = { IPv4Address: int } ``` You can also define your own function for the encoding: ```python from ipaddress import IPv4Address def ipv4address_to_int(v: IPv4Address): return int(v) class Sample(Document): ip: IPv4Address class Settings: bson_encoders = { IPv4Address: ipv4address_to_int } ``` ### Keep nulls By default, Beanie saves fields with `None` value as `null` in the database. But if you don't want to save `null` values, you can set `keep_nulls` to `False` in the `Settings` class: ```python class Sample(Document): num: int description: Optional[str] = None class Settings: keep_nulls = False ``` ### Nested Documents Depth It is possible to define nested linked documents with Beanie. Sometimes this can lead to infinite recursion. To prevent this, or to decrease the database load, you can limit the maximum nesting depth. By default, it is set to 3, which means it will fetch up to 3 levels of nested documents. You can configure: - maximum depth for all linked documents - depth for a specific linked document Maximum: ```python class Sample(Document): num: int category: Link[Category] class Settings: max_nesting_depth = 2 # Maximum nesting depth for all linked documents of this model ``` Specific: ```python class Sample(Document): num: int category: Link[Category] class Settings: max_nesting_depths_per_field = { "category": 1 # Nesting depth for a specific field } ``` Also, you can limit the nesting depth during find operations. You can read more about this [here](/tutorial/relations/#nested-links).python-beanie-1.29.0/docs/tutorial/delete.md000066400000000000000000000006231473701376500207420ustar00rootroot00000000000000# Delete documents Beanie supports single and batch deletions: ## Single ```python await Product.find_one(Product.name == "Milka").delete() # Or bar = await Product.find_one(Product.name == "Milka") await bar.delete() ``` ## Many ```python await Product.find(Product.category.name == "Chocolate").delete() ``` ## All ```python await Product.delete_all() # Or await Product.all().delete() ```python-beanie-1.29.0/docs/tutorial/find.md000066400000000000000000000132441473701376500204230ustar00rootroot00000000000000To populate the database, please run the examples from the [previous section of the tutorial](inserting-into-the-database.md) as we will be using the same setup here. ## Finding documents The basic syntax for finding multiple documents in the database is to call the class method `find()` or it's synonym `find_many()` with some search criteria (see next section): ```python findresult = Product.find(search_criteria) ``` This returns a `FindMany` object, which can be used to access the results in different ways. To loop through the results, use a `async for` loop: ```python async for result in Product.find(search_criteria): print(result) ``` If you prefer a list of the results, then you can call `to_list()` method: ```python result = await Product.find(search_criteria).to_list() ``` To get the first document, you can use `.first_or_none()` method. It returns the first found document or `None`, if no documents were found. ```python result = await Product.find(search_criteria).first_or_none() ``` ### Search criteria As search criteria, Beanie supports Python-based syntax. For comparisons Python comparison operators can be used on the class fields (and nested fields): ```python products = await Product.find(Product.price < 10).to_list() ``` This is supported for the following operators: `==`, `>`, `>=`, `<`, `<=`, `!=`. Other MongoDB query operators can be used with the included wrappers. For example, the `$in` operator can be used as follows: ```python from beanie.operators import In products = await Product.find( In(Product.category.name, ["Chocolate", "Fruits"]) ).to_list() ``` The whole list of the find query operators can be found [here](../api-documentation/operators/find.md). For more complex cases native PyMongo syntax is also supported: ```python products = await Product.find({"price": 1000}).to_list() ``` ## Finding single documents Sometimes you will only need to find a single document. If you are searching by `id`, then you can use the [get](../api-documentation/document.md/#documentget) method: ```python bar = await Product.get("608da169eb9e17281f0ab2ff") ``` To find a single document via a single search criterion, you can use the [find_one](../api-documentation/interfaces.md/#findinterfacefind_one) method: ```python bar = await Product.find_one(Product.name == "Peanut Bar") ``` ## Syncing from the Database If you wish to apply changes from the database to the document, utilize the [sync](../api-documentation/document.md/#documentsync) method: ```python await bar.sync() ``` Two merging strategies are available: `local` and `remote`. ### Remote Merge Strategy The remote merge strategy replaces the local document with the one from the database, disregarding local changes: ```python from beanie import MergeStrategy await bar.sync(merge_strategy=MergeStrategy.remote) ``` The remote merge strategy is the default. ### Local Merge Strategy The local merge strategy retains changes made locally to the document and updates other fields from the database. **BE CAREFUL**: it may raise an `ApplyChangesException` in case of a merging conflict. ```python from beanie import MergeStrategy await bar.sync(merge_strategy=MergeStrategy.local) ``` ## More complex queries ### Multiple search criteria If you have multiple criteria to search against, you can pass them as separate arguments to any of the `find` functions: ```python chocolates = await Product.find( Product.category.name == "Chocolate", Product.price < 5 ).to_list() ``` Alternatively, you can chain `find` methods: ```python chocolates = await Product .find(Product.category.name == "Chocolate") .find(Product.price < 5).to_list() ``` ### Sorting Sorting can be done with the [sort](../api-documentation/query.md/#findmanysort) method. You can pass it one or multiple fields to sort by. You may optionally specify a `+` or `-` (denoting ascending and descending respectively). ```python chocolates = await Product.find( Product.category.name == "Chocolate").sort(-Product.price,+Product.name).to_list() ``` You can also specify fields as strings or as tuples: ```python chocolates = await Product.find( Product.category.name == "Chocolate").sort("-price","+name").to_list() chocolates = await Product.find( Product.category.name == "Chocolate").sort( [ (Product.price, pymongo.DESCENDING), (Product.name, pymongo.ASCENDING), ] ).to_list() ``` ### Skip and limit To skip a certain number of documents, or limit the total number of elements returned, the `skip` and `limit` methods can be used: ```python chocolates = await Product.find( Product.category.name == "Chocolate").skip(2).to_list() chocolates = await Product.find( Product.category.name == "Chocolate").limit(2).to_list() ``` ### Projections When only a part of a document is required, projections can save a lot of database bandwidth and processing. For simple projections we can just define a pydantic model with the required fields and pass it to `project()` method: ```python class ProductShortView(BaseModel): name: str price: float chocolates = await Product.find( Product.category.name == "Chocolate").project(ProductShortView).to_list() ``` For more complex projections an inner `Settings` class with a `projection` field can be added: ```python class ProductView(BaseModel): name: str category: str class Settings: projection = {"name": 1, "category": "$category.name"} chocolates = await Product.find( Product.category.name == "Chocolate").project(ProductView).to_list() ``` ### Finding all documents If you ever want to find all documents, you can use the `find_all()` class method. This is equivalent to `find({})`. python-beanie-1.29.0/docs/tutorial/indexes.md000066400000000000000000000046621473701376500211460ustar00rootroot00000000000000## Indexes setup There are more than one way to set up indexes using Beanie ### Indexed function To set up an index over a single field, the `Indexed` function can be used to wrap the type and does not require a `Settings` class: ```python from beanie import Document, Indexed class Sample(Document): num: Annotated[int, Indexed()] description: str ``` The `Indexed` function takes an optional `index_type` argument, which may be set to a pymongo index type: ```python import pymongo from beanie import Document, Indexed class Sample(Document): description: Annotated[str, Indexed(index_type=pymongo.TEXT)] ``` The `Indexed` function also supports PyMongo's `IndexModel` kwargs arguments (see the [PyMongo Documentation](https://pymongo.readthedocs.io/en/stable/api/pymongo/operations.html#pymongo.operations.IndexModel) for details). For example, to create a `unique` index: ```python from beanie import Document, Indexed class Sample(Document): name: Annotated[str, Indexed(unique=True)] ``` The `Indexed` function can also be used directly in the type annotation, by giving it the wrapped type as the first argument. Note that this might not work with some Pydantic V2 types, such as `UUID4` or `EmailStr`. ```python from beanie import Document, Indexed class Sample(Document): name: Indexed(str, unique=True) ``` ### Multi-field indexes The `indexes` field of the inner `Settings` class is responsible for more complex indexes. It is a list where items can be: - Single key. Name of the document's field (this is equivalent to using the Indexed function described above without any additional arguments) - List of (key, direction) pairs. Key - string, name of the document's field. Direction - pymongo direction ( example: `pymongo.ASCENDING`) - `pymongo.IndexModel` instance - the most flexible option. [PyMongo Documentation](https://pymongo.readthedocs.io/en/stable/api/pymongo/operations.html#pymongo.operations.IndexModel) ```python import pymongo from pymongo import IndexModel from beanie import Document class Sample(Document): test_int: int test_str: str class Settings: indexes = [ "test_int", [ ("test_int", pymongo.ASCENDING), ("test_str", pymongo.DESCENDING), ], IndexModel( [("test_str", pymongo.DESCENDING)], name="test_string_index_DESCENDING", ), ] ``` python-beanie-1.29.0/docs/tutorial/inheritance.md000066400000000000000000000115241473701376500217730ustar00rootroot00000000000000## Inheritance for multi-model use case Beanie `Documents` support inheritance as any other Python classes. But there are additional features available if you mark the root model with the parameter `is_root = True` in the inner Settings class. This behavior is similar to `UnionDoc`, but you don't need an additional entity. Parent `Document` act like a "controller", that handles proper storing and fetches different `Document` types. Also, parent `Document` can have some shared attributes which are propagated to all children. All classes in the inheritance chain can be used as `Link` in foreign `Documents`. Depending on the business logic, parent `Document` can be like an "abstract" class that is not used to store objects of its type (like in the example below), as well as can be a full-fledged entity, like its children. ### Defining models To set the root model you have to set `is_root = True` in the inner Settings class. All the inherited documents (on any level) will be stored in the same collection. ```py hl_lines="20 20" from typing import Optional, List from motor.motor_asyncio import AsyncIOMotorClient from pydantic import BaseModel from beanie import Document, Link, init_beanie class Vehicle(Document): """Inheritance scheme bellow""" # Vehicle # / | \ # / | \ # Bicycle Bike Car # \ # \ # Bus # shared attribute for all children color: str class Settings: is_root = True class Fuelled(BaseModel): """Just a mixin""" fuel: Optional[str] class Bicycle(Vehicle): """Derived from Vehicle, will use its collection""" frame: int wheels: int class Bike(Vehicle, Fuelled): ... class Car(Vehicle, Fuelled): body: str class Bus(Car, Fuelled): """Inheritance chain is Vehicle -> Car -> Bus, it is also stored in Vehicle collection""" seats: int class Owner(Document): vehicles: Optional[List[Link[Vehicle]]] ``` ### Inserts Inserts work the same way as usual ```python client = AsyncIOMotorClient() await init_beanie(client.test_db, document_models=[Vehicle, Bicycle, Bike, Car, Bus, Owner]) bike_1 = await Bike(color='black', fuel='gasoline').insert() car_1 = await Car(color='grey', body='sedan', fuel='gasoline').insert() car_2 = await Car(color='white', body='crossover', fuel='diesel').insert() bus_1 = await Bus(color='white', seats=80, body='bus', fuel='diesel').insert() bus_2 = await Bus(color='yellow', seats=26, body='minibus', fuel='diesel').insert() owner = await Owner(name='John', vehicles=[car_1, car_2, bus_1]).insert() ``` ### Find operations With parameter `with_children = True` the find query results will contain all the children classes' objects. ```python # this query returns vehicles of all types that have white color, because `with_children` is True white_vehicles = await Vehicle.find(Vehicle.color == 'white', with_children=True).to_list() # [ # Bicycle(..., color='white', frame=54, wheels=29), # Car(fuel='diesel', ..., color='white', body='crossover'), # Bus(fuel='diesel', ..., color='white', body='bus', seats=80) # ] ``` If the search is based on a child, the query returns this child type and all sub-children (with parameter `with_children=True`) ```python cars_and_buses = await Car.find(Car.fuel == 'diesel', with_children=True).to_list() # [ # Car(fuel='diesel', ..., color='white', body='crossover'), # Bus(fuel='diesel', ..., color='white', body='bus', seats=80), # Bus(fuel='diesel', ..., color='yellow', body='minibus', seats=26) # ] ``` If you need to return objects of the specific class only, you can use this class for finding: ```python # however it is possible to limit by Vehicle type cars_only = await Car.find().to_list() # [ # Car(fuel='gasoline', ..., color='grey', body='sedan'), # Car(fuel='diesel', ..., color='white', body='crossover') # ] ``` To get a single Document it is not necessary to know the type. You can query using the parent class ```python await Vehicle.get(bus_2.id, with_children=True) # returns Bus instance: # Bus(fuel='diesel', ..., color='yellow', body='minibus', seats=26) ``` ### Relations Linked documents will be resolved into the respective classes ```python owner = await Owner.get(owner.id, fetch_links=True) print(owner.vehicles) # [ # Car(fuel='diesel', ..., color='white', body='crossover'), # Bus(fuel='diesel', ..., color='white', body='bus', seats=80), # Car(fuel='gasoline', ..., color='grey', body='sedan') # ] ``` The same result will be if the owner gets objects without fetching the links, and they will be fetched manually later ### Other All other operations work the same way as for simple Documents ```python await Bike.find().update({"$set": {Bike.color: 'yellow'}}) await Car.find_one(Car.body == 'sedan') ``` python-beanie-1.29.0/docs/tutorial/init.md000066400000000000000000000025301473701376500204420ustar00rootroot00000000000000Beanie uses Motor as an async database engine. To initialize previously created documents, you should provide a Motor database instance and a list of your document models to the `init_beanie(...)` function, as it is shown in the example: ```python from beanie import init_beanie, Document from motor.motor_asyncio import AsyncIOMotorClient class Sample(Document): name: str async def init(): # Create Motor client client = AsyncIOMotorClient( "mongodb://user:pass@host:27017" ) # Initialize beanie with the Sample document class and a database await init_beanie(database=client.db_name, document_models=[Sample]) ``` This creates the collection (if necessary) and sets up any indexes that are defined. `init_beanie` supports not only a list of classes as the document_models argument, but also strings with dot-separated paths: ```python await init_beanie( database=client.db_name, document_models=[ "app.models.DemoDocument", ], ) ``` ### Warning `init_beanie` supports the parameter named `allow_index_dropping` that will drop indexes from your collections. `allow_index_dropping` is by default set to `False`. If you set this to `True`, ensure that you are not managing your indexes in another manner. If you are, these will be deleted when setting `allow_index_dropping=True`.python-beanie-1.29.0/docs/tutorial/insert.md000066400000000000000000000031611473701376500210040ustar00rootroot00000000000000# Insert the documents Beanie documents behave just like pydantic models (because they subclass `pydantic.BaseModel`). Hence, a document can be created in a similar fashion to pydantic: ```python from typing import Optional from pydantic import BaseModel from beanie import Document, Indexed class Category(BaseModel): name: str description: str class Product(Document): # This is the model name: str description: Optional[str] = None price: Indexed(float) category: Category class Settings: name = "products" chocolate = Category(name="Chocolate", description="A preparation of roasted and ground cacao seeds.") tonybar = Product(name="Tony's", price=5.95, category=chocolate) marsbar = Product(name="Mars", price=1, category=chocolate) ``` This however does not save the documents to the database yet. ## Insert a single document To insert a document into the database, you can call either `insert()` or `create()` on it (they are synonyms): ```python await tonybar.insert() await marsbar.create() # does exactly the same as insert() ``` You can also call `save()`, which behaves in the same manner for new documents, but will also update existing documents. See the [section on updating](updating-&-deleting.md) of this tutorial for more details. If you prefer, you can also call the `insert_one` class method: ```python await Product.insert_one(tonybar) ``` ## Inserting many documents To reduce the number of database queries, similarly typed documents should be inserted together by calling the class method `insert_many`: ```python await Product.insert_many([tonybar,marsbar]) ``` python-beanie-1.29.0/docs/tutorial/lazy_parse.md000066400000000000000000000015471473701376500216570ustar00rootroot00000000000000## Using Lazy Parsing in Queries Lazy parsing allows you to skip the parsing and validation process for documents and instead call it on demand for each field separately. This can be useful for optimizing performance in certain scenarios. To use lazy parsing in your queries, you can pass the `lazy_parse=True` parameter to your find method. Here's an example of how to use lazy parsing in a find query: ```python await Sample.find(Sample.number == 10, lazy_parse=True).to_list() ``` By setting lazy_parse=True, the parsing and validation process will be skipped and be called on demand when the respective fields will be used. This can potentially improve the performance of your query by reducing the amount of processing required upfront. However, keep in mind that using lazy parsing may also introduce some additional overhead when accessing the fields later on.python-beanie-1.29.0/docs/tutorial/migrations.md000066400000000000000000000140761473701376500216630ustar00rootroot00000000000000## Attention! ## Create To create a new migration, run: ```shell beanie new-migration -n migration_name -p relative/path/to/migrations/directory/ ``` It will create a file named `*_migration_name.py` in the directory `relative/path/to/migrations/directory/` Migration file contains two classes: `Forward` and `Backward`. Each one contains instructions to roll migration respectively forward and backward. ## Run **Attention**: By default, migrations use transactions. This approach only works with **MongoDB replica sets**. If you prefer to run migrations without transactions, pass the `--no-use-transaction` flag to the `migrate` command. However, be aware that this approach is risky, as there is no way to roll back migrations without transactions. To roll one forward migration, run: ```shell beanie migrate -uri 'mongodb+srv://user:pass@host' -db db -p relative/path/to/migrations/directory/ --distance 1 ``` To roll all forward migrations, run: ```shell beanie migrate -uri 'mongodb://user:pass@host' -db db -p relative/path/to/migrations/directory/ ``` To roll one backward migration, run: ```shell beanie migrate -uri 'mongodb+srv://user:pass@host' -db db -p relative/path/to/migrations/directory/ --distance 1 --backward ``` To roll all backward migrations, run: ```shell beanie migrate -uri 'mongodb+srv://user:pass@host' -db db -p relative/path/to/migrations/directory/ --backward ``` To show the help message with all the parameters and descriptions, run: ```shell beanie migrate --help ``` ## Migration types Migration class contains instructions - decorated async functions. There are two types of instructions: - Iterative migration - instruction that iterates over all the documents of the input_document collection and updates it. Most convenient to use, should be used in 99% cases. - Free fall migrations - instruction where user can write any logic. Most flexible, but verbose. ### Iterative migrations To mark a function as iterative migration, `@iterative_migration()` decorator must be used. The function itself must accept typed `input_document` and `output_document` arguments. Like here: ```python @iterative_migration() async def name_to_title( self, input_document: OldNote, output_document: Note ): ``` #### A simple example of field name changing There are the next models: ```python class Tag(BaseModel): color: str name: str class OldNote(Document): name: str tag: Tag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" ``` To migrate from `OldNote` to `Note`, file `name` has to be renamed to `title`. Forward migration: ```python class Forward: @iterative_migration() async def name_to_title( self, input_document: OldNote, output_document: Note ): output_document.title = input_document.name ``` Backward migration: ```python class Backward: @iterative_migration() async def title_to_name( self, input_document: Note, output_document: OldNote ): output_document.name = input_document.title ``` And a little more complex example: ```python from pydantic.main import BaseModel from beanie import Document, iterative_migration class OldTag(BaseModel): color: str name: str class Tag(BaseModel): color: str title: str class OldNote(Document): title: str tag: OldTag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_color( self, input_document: OldNote, output_document: Note ): output_document.tag.title = input_document.tag.name class Backward: @iterative_migration() async def change_title( self, input_document: Note, output_document: OldNote ): output_document.tag.name = input_document.tag.title ``` All the examples of migrations can be found by [link](https://github.com/roman-right/beanie/tree/main/tests/migrations/migrations_for_test) ### Free fall migrations It is a much more flexible migration type, which allows the implementation of any migration logic. But at the same time, it is more verbose. To mark function as a free fall migration, `@free_fall_migration()` decorator with the list of Document classes must be used. Function itself accepts `session` as an argument. It is used in order to roll back the migration in case something has gone wrong. To be able to roll back, please pass session to the Documents methods. Like here: ```python @free_fall_migration(document_models=[OldNote, Note]) async def name_to_title(self, session): async for old_note in OldNote.find_all(): new_note = Note( id=old_note.id, title=old_note.name, tag=old_note.tag ) await new_note.replace(session=session) ``` #### The same example as for the iterative migration, but with free fall migration type ```python from pydantic.main import BaseModel from beanie import Document, free_fall_migration class Tag(BaseModel): color: str name: str class OldNote(Document): name: str tag: Tag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @free_fall_migration(document_models=[OldNote, Note]) async def name_to_title(self, session): async for old_note in OldNote.find_all(): new_note = Note( id=old_note.id, title=old_note.name, tag=old_note.tag ) await new_note.replace(session=session) class Backward: @free_fall_migration(document_models=[OldNote, Note]) async def title_to_name(self, session): async for old_note in Note.find_all(): new_note = OldNote( id=old_note.id, name=old_note.title, tag=old_note.tag ) await new_note.replace(session=session) ``` All the examples of migrations can be found by [link](https://github.com/roman-right/beanie/tree/main/tests/migrations/migrations_for_test) python-beanie-1.29.0/docs/tutorial/multi-model.md000066400000000000000000000033221473701376500217270ustar00rootroot00000000000000# Multi-model pattern Documents with different schemas could be stored in a single collection and managed correctly. `UnionDoc` class is used for this. It supports `find` and `aggregate` methods. For `find`, it will fetch all the found documents into the respective `Document` classes. Documents with `union_doc` in their settings can still be used in `find` and other queries. Queries of one such class will not see the data of others. ## Example Create documents: ```python from beanie import Document, UnionDoc class Parent(UnionDoc): # Union class Settings: name = "union_doc_collection" # Collection name class_id = "_class_id" # _class_id is the default beanie internal field used to filter children Documents class One(Document): int_field: int = 0 shared: int = 0 class Settings: name = "One" # Name used to filer union document 'One', default to class name union_doc = Parent class Two(Document): str_field: str = "test" shared: int = 0 class Settings: union_doc = Parent ``` The schemas could be incompatible. Insert a document ```python await One().insert() await One().insert() await One().insert() await Two().insert() ``` Find all the documents of the first type: ```python docs = await One.all().to_list() print(len(docs)) >> 3 # It found only documents of class One ``` Of the second type: ```python docs = await Two.all().to_list() print(len(docs)) >> 1 # It found only documents of class Two ``` Of both: ```python docs = await Parent.all().to_list() print(len(docs)) >> 4 # instances of both classes will be in the output here ``` Aggregations will work separately for these two document classes too. python-beanie-1.29.0/docs/tutorial/on_save_validation.md000066400000000000000000000014721473701376500233470ustar00rootroot00000000000000# On save validation Pydantic has a very useful config to validate values on assignment - `validate_assignment = True`. But, unfortunately, this is an expensive operation and doesn't fit some use cases. You can validate all the values before saving the document (`insert`, `replace`, `save`, `save_changes`) with beanie config `validate_on_save` instead. This feature must be turned on in the `Settings` inner class explicitly: ```python class Sample(Document): num: int name: str class Settings: validate_on_save = True ``` If any field has a wrong value, it will raise an error on write operations (`insert`, `replace`, `save`, `save_changes`). ```python sample = Sample.find_one(Sample.name == "Test") sample.num = "wrong value type" # Next call will raise an error await sample.replace() ``` python-beanie-1.29.0/docs/tutorial/relations.md000066400000000000000000000155021473701376500215020ustar00rootroot00000000000000# Relations The document can contain links to other documents in their fields. *Only top-level fields are fully supported for now.* The following field types are supported: - `Link[...]` - `Optional[Link[...]]` - `List[Link[...]]` - `Optional[List[Link[...]]]` Also, backward links are supported: - `BackLink[...]` - `Optional[BackLink[...]]` - `List[BackLink[...]]` - `Optional[List[BackLink[...]]]` Direct link to the document: ```python from beanie import Document, Link class Door(Document): height: int = 2 width: int = 1 class House(Document): name: str door: Link[Door] ``` Optional direct link to the document: ```python from typing import Optional from beanie import Document, Link class Door(Document): height: int = 2 width: int = 1 class House(Document): name: str door: Optional[Link[Door]] ``` List of the links: ```python from typing import List from beanie import Document, Link class Window(Document): x: int = 10 y: int = 10 class House(Document): name: str door: Link[Door] windows: List[Link[Window]] ``` Optional list of the links: ```python from typing import List, Optional from beanie import Document, Link class Window(Document): x: int = 10 y: int = 10 class Yard(Document): v: int = 10 y: int = 10 class House(Document): name: str door: Link[Door] windows: List[Link[Window]] yards: Optional[List[Link[Yard]]] ``` Other link patterns are not supported at this moment. If you need something more specific for your use-case, please open an issue on the GitHub page - ## Write The following write methods support relations: - `insert(...)` - `replace(...)` - `save(...)` To apply a write method to the linked documents, you should pass the respective `link_rule` argument ```python house.windows = [Window(x=100, y=100)] house.name = "NEW NAME" # The next call will insert a new window object and replace the house instance with updated data await house.save(link_rule=WriteRules.WRITE) # `insert` and `replace` methods will work the same way ``` Otherwise, Beanie can ignore internal links with the `link_rule` parameter `WriteRules.DO_NOTHING` ```python house.door.height = 3 house.name = "NEW NAME" # The next call will just replace the house instance with new data, but the linked door object will not be synced await house.replace(link_rule=WriteRules.DO_NOTHING) # `insert` and `save` methods will work the same way ``` ## Fetch ### Prefetch You can fetch linked documents on the find query step using the `fetch_links` parameter ```python houses = await House.find( House.name == "test", fetch_links=True ).to_list() ``` Supported find methods: - `find` - `find_one` - `get` Beanie uses the single aggregation query under the hood to fetch all the linked documents. This operation is very effective. If a direct link is referred to a non-existent document, after fetching it will remain the object of the `Link` class. Fetching will ignore non-existent documents for the list of links fields. #### Search by linked documents fields If the `fetch_links` parameter is set to `True`, search by linked documents fields is available. By field of the direct link: ```python houses = await House.find( House.door.height == 2, fetch_links=True ).to_list() ``` By list of links: ```python houses = await House.find( House.windows.x > 10, fetch_links=True ).to_list() ``` Search by `id` of the linked documents works using the following syntax: ```python houses = await House.find( House.door.id == PydanticObjectId("DOOR_ID_HERE") ).to_list() ``` It works the same way with `fetch_links` equal to `True` and `False` and for `find_many` and `find_one` methods. #### Nested links With Beanie you can set up nested links. Document can even link to itself. This can lead to infinite recursion. To prevent this, or to decrease the database load, you can limit the nesting depth during find operations. ```python from beanie import Document, Link from typing import Optional class SelfLinkedSample(Document): name: str left: Optional[Link["SelfLinkedSample"]] right: Optional[Link["SelfLinkedSample"]] ``` You can set up depth for all linked documents independently of the field: ```python await SelfLinkedSample.find( SelfLinkedSample.name == "test", fetch_links=True, nesting_depth=2 ).to_list() ``` Or you can set up depth for a specific field: ```python await SelfLinkedSample.find( SelfLinkedSample.name == "test", fetch_links=True, nesting_depths_per_field={ "left": 1, "right": 2 } ).to_list() ``` Also, you can set up the maximum nesting depth on the document definition level. You can read more about this [here](/tutorial/defining-a-document/#nested-documents-depth). ### On-demand fetch If you don't use prefetching, linked documents will be presented as objects of the `Link` class. You can fetch them manually afterwards. To fetch all the linked documents, you can use the `fetch_all_links` method ```python await house.fetch_all_links() ``` It will fetch all the linked documents and replace `Link` objects with them. Otherwise, you can fetch a single field: ```python await house.fetch_link(House.door) ``` This will fetch the Door object and put it into the `door` field of the `house` object. ## Delete Delete method works the same way as write operations, but it uses other rules. To delete all the links on the document deletion, you should use the `DeleteRules.DELETE_LINKS` value for the `link_rule` parameter: ```python await house.delete(link_rule=DeleteRules.DELETE_LINKS) ``` To keep linked documents, you can use the `DO_NOTHING` rule: ```python await house.delete(link_rule=DeleteRules.DO_NOTHING) ``` ## Back Links To init the back link you should have a document with the direct or list of links to the current document. ```python from typing import List from beanie import Document, BackLink, Link from pydantic import Field class House(Document): name: str door: Link["Door"] owners: List[Link["Person"]] class Door(Document): height: int = 2 width: int = 1 house: BackLink[House] = Field(original_field="door") class Person(Document): name: str house: List[BackLink[House]] = Field(original_field="owners") ``` The `original_field` parameter is required for the back link field. Back links support all the operations that normal links support, but are virtual. This means that when searching the database, you will need to include `fetch_links=True` (see [Finding documents](/tutorial/finding-documents).), or you will recieve an empty 'BackLink' virtual object. It is not possible to `fetch()` this virtual link after the initial search. ## Limitations - Find operations with the `fetch_links` parameter can not be used in the chaning with `delete` and `update` methods. python-beanie-1.29.0/docs/tutorial/revision.md000066400000000000000000000022111473701376500213310ustar00rootroot00000000000000# Revision This feature helps with concurrent operations. It stores `revision_id` together with the document and changes it on each document update. If the application with an older local copy of the document tries to change it, an exception will be raised. Only when the local copy is synced with the database, the application will be allowed to change the data. This helps to avoid data losses. ### Be aware revision id feature may work incorrectly with BulkWriter. ### Usage This feature must be explicitly turned on in the `Settings` inner class: ```python class Sample(Document): num: int name: str class Settings: use_revision = True ``` Any changing operation will check if the local copy of the document has the up-to-date `revision_id` value: ```python s = await Sample.find_one(Sample.name="TestName") s.num = 10 # If a concurrent process already changed the doc, the next operation will raise an error await s.replace() ``` If you want to ignore revision and apply all the changes even if the local copy is outdated, you can use the `ignore_revision` parameter: ```python await s.replace(ignore_revision=True) ``` python-beanie-1.29.0/docs/tutorial/state_management.md000066400000000000000000000067221473701376500230220ustar00rootroot00000000000000# State Management Beanie can keep the document state synced with the database in order to find local changes and save only them. This feature must be explicitly turned on in the `Settings` inner class: ```python class Sample(Document): num: int name: str class Settings: use_state_management = True ``` Beanie keeps the current changes (not yet saved in the database) by default (with `use_state_management = True`), AND the previous changes (saved to the database) with `state_management_save_previous = True`. ```python class Sample(Document): num: int name: str class Settings: use_state_management = True state_management_save_previous = True ``` Every new save override the previous changes and clears the current changes. ## Saving changes To save only changed values, the `save_changes()` method should be used. ```python s = await Sample.find_one(Sample.name == "Test") s.num = 100 await s.save_changes() ``` The `save_changes()` method can only be used with already inserted documents. ## Interacting with changes Beanie exposes several methods that can be used to interact with the saved changes: ```python s = await Sample.find_one(Sample.name == "Test") s.is_changed == False s.get_changes == {} s.num = 200 s.is_changed == True s.get_changes() == {"num": 200} s.rollback() s.is_changed == False s.get_changes() == {} ``` And similar methods can be used with the previous changes that have been saved in the database if `state_management_save_previous` is set to `True`: ```python s = await Sample.find_one(Sample.name == "Test") s.num = 200 await s.save_changes() s.has_changed == True s.get_previous_changes() == {"num": 200} s.get_changes() == {} ``` ## Options By default, state management will merge the changes made to nested objects, which is fine for most cases as it is non-destructive and does not re-assign the whole object if only one of its attributes changed: ```python from typing import Dict class Item(Document): name: str attributes: Dict[str, float] class Settings: use_state_management = True ``` ```python i = Item(name="Test", attributes={"attribute_1": 1.0, "attribute_2": 2.0}) await i.insert() i.attributes = {"attribute_1": 1.0} await i.save_changes() # Changes will consist of: {"attributes.attribute_1": 1.0} # Keeping attribute_2 ``` However, there are some cases where you would want to replace the whole object when one of its attributes changed. You can enable the `state_management_replace_objects` attribute in your model's `Settings` inner class: ```python from typing import Dict class Item(Document): name: str attributes: Dict[str, float] class Settings: use_state_management = True state_management_replace_objects = True ``` With this setting activated, the whole object will be overridden when one attribute of the nested object is changed: ```python i = Item(name="Test", attributes={"attribute_1": 1.0, "attribute_2": 2.0}) await i.insert() i.attributes.attribute_1 = 1.0 await i.save_changes() # Changes will consist of: {"attributes.attribute_1": 1.0, "attributes.attribute_2": 2.0} # Keeping attribute_2 ``` When the whole object is assigned, the whole nested object will be overridden: ```python i = Item(name="Test", attributes={"attribute_1": 1.0, "attribute_2": 2.0}) await i.insert() i.attributes = {"attribute_1": 1.0} await i.save_changes() # Changes will consist of: {"attributes": {"attribute_1": 1.0}} # Removing attribute_2 ``` python-beanie-1.29.0/docs/tutorial/time_series.md000066400000000000000000000020411473701376500220040ustar00rootroot00000000000000# Time series You can set up a timeseries collection using the inner `Settings` class. **Be aware, timeseries collections a supported by MongoDB 5.0 and higher only. The fields `bucket_max_span_seconds` and `bucket_rounding_seconds` however require MongoDB 6.3 or higher** ```python from datetime import datetime from beanie import Document, TimeSeriesConfig, Granularity from pydantic import Field class Sample(Document): ts: datetime = Field(default_factory=datetime.now) meta: str class Settings: timeseries = TimeSeriesConfig( time_field="ts", # Required meta_field="meta", # Optional granularity=Granularity.hours, # Optional bucket_max_span_seconds=3600, # Optional bucket_rounding_seconds=3600, # Optional expire_after_seconds=2 # Optional ) ``` TimeSeriesConfig fields reflect the respective parameters of the MongoDB timeseries creation function. MongoDB documentation: https://docs.mongodb.com/manual/core/timeseries-collections/python-beanie-1.29.0/docs/tutorial/update.md000066400000000000000000000060211473701376500207600ustar00rootroot00000000000000# Updating & Deleting Now that we know how to find documents, how do we change them or delete them? ## Saving changes to existing documents The easiest way to change a document in the database is to use either the `replace` or `save` method on an altered document. These methods both write the document to the database, but `replace` will raise an exception when the document does not exist yet, while `save` will insert the document. Using `save()` method: ```python bar = await Product.find_one(Product.name == "Mars") bar.price = 10 await bar.save() ``` Otherwise, use the `replace()` method, which throws: - a `ValueError` if the document does not have an `id` yet, or - a `beanie.exceptions.DocumentNotFound` if it does, but the `id` is not present in the collection ```python bar.price = 10 try: await bar.replace() except (ValueError, beanie.exceptions.DocumentNotFound): print("Can't replace a non existing document") ``` Note that these methods require multiple queries to the database and replace the entire document with the new version. A more tailored solution can often be created by applying update queries directly on the database level. ## Update queries Update queries can be performed on the result of a `find` or `find_one` query, or on a document that was returned from an earlier query. Simpler updates can be performed using the `set`, `inc`, and `current_date` methods: ```python bar = await Product.find_one(Product.name == "Mars") await bar.set({Product.name:"Gold bar"}) bar = await Product.find(Product.price > .5).inc({Product.price: 1}) ``` More complex update operations can be performed by calling `update()` with an update operator, similar to find queries: ```python await Product.find_one(Product.name == "Tony's").update(Set({Product.price: 3.33})) ``` The whole list of the update query operators can be found [here](../api-documentation/operators/update.md). Native MongoDB syntax is also supported: ```python await Product.find_one(Product.name == "Tony's").update({"$set": {Product.price: 3.33}}) ``` ## Upsert To insert a document when no documents are matched against the search criteria, the `upsert` method can be used: ```python await Product.find_one(Product.name == "Tony's").upsert( Set({Product.price: 3.33}), on_insert=Product(name="Tony's", price=3.33, category=chocolate) ) ``` ## Deleting documents Deleting objects works just like updating them, you simply call `delete()` on the found documents: ```python bar = await Product.find_one(Product.name == "Milka") await bar.delete() await Product.find_one(Product.name == "Milka").delete() await Product.find(Product.category.name == "Chocolate").delete() ``` ## Response Type For the object methods `update` and `upsert`, you can use the `response_type` parameter to specify the type of response. The options are: - `UpdateResponse.UPDATE_RESULT` - returns the result of the update operation. - `UpdateResponse.NEW_DOCUMENT` - returns the newly updated document. - `UpdateResponse.OLD_DOCUMENT` - returns the document before the update. python-beanie-1.29.0/docs/tutorial/views.md000066400000000000000000000040701473701376500206350ustar00rootroot00000000000000# Views Virtual views are aggregation pipelines stored in MongoDB that act as collections for reading operations. You can use the `View` class the same way as `Document` for `find` and `aggregate` operations. ## Here are some examples. Create a view: ```python from pydantic import Field from beanie import Document, View class Bike(Document): type: str frame_size: int is_new: bool class Metrics(View): type: str = Field(alias="_id") number: int new: int class Settings: source = Bike pipeline = [ { "$group": { "_id": "$type", "number": {"$sum": 1}, "new": {"$sum": {"$cond": ["$is_new", 1, 0]}} } }, ] ``` Initialize Beanie: ```python from motor.motor_asyncio import AsyncIOMotorClient from beanie import init_beanie async def main(): uri = "mongodb://beanie:beanie@localhost:27017" client = AsyncIOMotorClient(uri) db = client.bikes await init_beanie( database=db, document_models=[Bike, Metrics], recreate_views=True, ) ``` Create bikes: ```python await Bike(type="Mountain", frame_size=54, is_new=True).insert() await Bike(type="Mountain", frame_size=60, is_new=False).insert() await Bike(type="Road", frame_size=52, is_new=True).insert() await Bike(type="Road", frame_size=54, is_new=True).insert() await Bike(type="Road", frame_size=58, is_new=False).insert() ``` Find metrics for `type == "Road"` ```python results = await Metrics.find(Metrics.type == "Road").to_list() print(results) >> [Metrics(type='Road', number=3, new=2)] ``` Aggregate over metrics to get the count of all the new bikes: ```python results = await Metrics.aggregate([{ "$group": { "_id": None, "new_total": {"$sum": "$new"} } }]).to_list() print(results) >> [{'_id': None, 'new_total': 3}] ``` A better result can be achieved by using find query aggregation syntactic sugar: ```python results = await Metrics.all().sum(Metrics.new) print(results) >> 3 ``` python-beanie-1.29.0/pydoc-markdown.yml000066400000000000000000000073501473701376500200500ustar00rootroot00000000000000loaders: - type: python processors: - type: filter - type: smart - type: crossref hooks: pre-render: - mkdir -p docs/build/content - cp -r docs/assets docs/build/content - cp docs/CNAME docs/build/content renderer: type: mkdocs output_directory: docs/build mkdocs_config: site_name: Beanie Documentation site_url: https://beanie-odm.dev/ theme: name: material palette: - scheme: default primary: white toggle: icon: material/toggle-switch name: Switch to dark mode - scheme: slate toggle: icon: material/toggle-switch-off-outline name: Switch to light mode logo: assets/logo.svg favicon: assets/favicon.png extra_css: - assets/color_scheme.css markdown_extensions: - pymdownx.highlight - pymdownx.superfences repo_url: https://github.com/roman-right/beanie markdown: use_fixed_header_levels: true header_level_by_type: Module: 2 Class: 3 Method: 4 Function: 4 Data: 4 descriptive_class_title: false descriptive_module_title: false add_method_class_prefix: true add_member_class_prefix: true docstrings_as_blockquote: true pages: - title: Overview name: index source: docs/index.md - title: Getting started source: docs/getting-started.md - title: Tutorial children: - title: Defining a document source: docs/tutorial/defining-a-document.md - title: Initialization source: docs/tutorial/init.md - title: Inserting into the database source: docs/tutorial/insert.md - title: Finding documents source: docs/tutorial/find.md - title: Lazy parsing source: docs/tutorial/lazy_parse.md - title: Updating & Deleting source: docs/tutorial/update.md - title: Indexes source: docs/tutorial/indexes.md - title: Multi-model pattern source: docs/tutorial/multi-model.md - title: Inheritance source: docs/tutorial/inheritance.md - title: Aggregation source: docs/tutorial/aggregate.md - title: Relations source: docs/tutorial/relations.md - title: Views source: docs/tutorial/views.md - title: Time Series source: docs/tutorial/time_series.md - title: Event-based actions source: docs/tutorial/actions.md - title: Cache source: docs/tutorial/cache.md - title: Revision source: docs/tutorial/revision.md - title: State Management source: docs/tutorial/state_management.md - title: On save validation source: docs/tutorial/on_save_validation.md - title: Migrations source: docs/tutorial/migrations.md - title: Batteries children: - title: Queue source: docs/batteries/queue.md - title: API Documentation children: - title: Document contents: - beanie.odm.documents.* - title: Query contents: - beanie.odm.queries.* - title: Interfaces contents: - beanie.odm.interfaces.* - title: Operators children: - title: Find contents: - beanie.odm.operators.find.* - title: Update contents: - beanie.odm.operators.update.* - title: Fields contents: - beanie.odm.fields.* - title: Development source: docs/development.md - title: Code of conduct source: docs/code-of-conduct.md - title: Changelog source: docs/changelog.md python-beanie-1.29.0/pyproject.toml000066400000000000000000000057131473701376500173040ustar00rootroot00000000000000[build-system] requires = ["flit_core >=3.2,<4"] build-backend = "flit_core.buildapi" [project] name = "beanie" version = "1.29.0" description = "Asynchronous Python ODM for MongoDB" readme = "README.md" requires-python = ">=3.8,<4.0" license = { file="LICENSE" } authors = [ {name = "Roman Right", email = "roman-right@protonmail.com"} ] keywords = ["mongodb", "odm", "orm", "pydantic", "mongo", "async", "python"] classifiers = [ "License :: OSI Approved :: Apache Software License", "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Operating System :: OS Independent", "Topic :: Database", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed", ] dependencies = [ "pydantic>=1.10.18,<3.0", "motor>=2.5.0,<4.0.0", "click>=7", "toml", "lazy-model==0.2.0", "typing-extensions>=4.7", ] [project.optional-dependencies] test = [ "pre-commit>=3.5.0", "pytest>=8.3.3", "pytest-asyncio>=0.24.0", "pytest-cov>=5.0.0", "dnspython>=2.1.0", "pyright>=0", "asgi-lifespan>=1.0.1", "httpx>=0.23.0", "fastapi>=0.100", "pydantic-settings>=2", "pydantic-extra-types>=2", "pydantic[email]", ] doc = [ "Pygments>=2.8.0", "Markdown>=3.3", "pydoc-markdown>=4.8", "mkdocs>=1.4", "mkdocs-material>=9.0", "jinja2>=3.0.3" ] queue = ["beanie-batteries-queue>=0.2"] ci = [ "toml", "requests", "types-requests", ] aws = ["motor[aws]>=2.5.0,<4.0.0"] encryption = ["motor[encryption]>=2.5.0,<4.0.0"] gssapi = ["motor[gssapi]>=2.5.0,<4.0.0"] ocsp = ["motor[ocsp]>=2.5.0,<4.0.0"] snappy = ["motor[snappy]>=2.5.0,<4.0.0"] zstd = ["motor[zstd]>=2.5.0,<4.0.0"] [project.urls] homepage = "https://beanie-odm.dev" repository = "https://github.com/roman-right/beanie" [project.scripts] beanie = "beanie.executors.migrate:migrations" # TOOLS [tool.coverage.run] branch = true source = ["beanie"] [tool.coverage.report] ignore_errors = true show_missing = true fail_under = 80 exclude_lines = [ 'pragma: no cover', 'if TYPE_CHECKING:', 'if typing.TYPE_CHECKING:', 'if __name__ == .__main__.:' ] [tool.pytest.ini_options] minversion = "8.0" addopts = "--cov" testpaths = [ "tests", ] filterwarnings = [ "error", "ignore::DeprecationWarning", "ignore::UserWarning", ] asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" [tool.beanie.migrations] path = "beanie/example_migration" database_name = "beanie_db" [tool.mypy] ignore_missing_imports = true [[tool.mypy.overrides]] module = "toml" ignore_missing_imports = true [tool.pyright] include = ["tests/typing", "beanie"] [tool.ruff] line-length = 79 fix = true target-version = "py312" include = ["**/*.py", ".github/**/*.py"] [tool.ruff.lint] ignore = ["E501"] extend-select = ["I001"] per-file-ignores = { "tests/*" = ["E711"] } # Allow unused variables when underscore-prefixed. dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" python-beanie-1.29.0/scripts/000077500000000000000000000000001473701376500160515ustar00rootroot00000000000000python-beanie-1.29.0/scripts/generate_changelog.py000066400000000000000000000043571473701376500222350ustar00rootroot00000000000000import subprocess from dataclasses import dataclass from datetime import datetime from typing import List import requests # type: ignore @dataclass class PullRequest: number: int title: str user: str user_url: str url: str class ChangelogGenerator: def __init__( self, username: str, repository: str, current_version: str, new_version: str, ): self.username = username self.repository = repository self.base_url = f"https://api.github.com/repos/{username}/{repository}" self.current_version = current_version self.new_version = new_version self.commits = self.get_commits_after_tag(current_version) self.prs = [self.get_pr_for_commit(commit) for commit in self.commits] def get_commits_after_tag(self, tag: str) -> List[str]: result = subprocess.run( ["git", "log", f"{tag}..HEAD", "--pretty=format:%H"], stdout=subprocess.PIPE, text=True, ) return result.stdout.split() def get_pr_for_commit(self, commit_sha: str) -> PullRequest: url = f"{self.base_url}/commits/{commit_sha}/pulls" response = requests.get(url) response.raise_for_status() pr_data = response.json()[0] return PullRequest( number=pr_data["number"], title=pr_data["title"], user=pr_data["user"]["login"], user_url=pr_data["user"]["html_url"], url=pr_data["html_url"], ) def generate_changelog(self) -> str: markdown = f"\n## [{self.new_version}] - {datetime.now().strftime('%Y-%m-%d')}\n" for pr in self.prs: markdown += ( f"### {pr.title.capitalize()}\n" f"- Author - [{pr.user}]({pr.user_url})\n" f"- PR <{pr.url}>\n" ) markdown += f"\n[{self.new_version}]: https://pypi.org/project/{self.repository}/{self.new_version}\n" return markdown if __name__ == "__main__": generator = ChangelogGenerator( username="BeanieODM", repository="beanie", current_version="1.29.0", new_version="1.30.0", ) changelog = generator.generate_changelog() print(changelog) python-beanie-1.29.0/scripts/publish_docs.sh000066400000000000000000000003371473701376500210660ustar00rootroot00000000000000pydoc-markdown cd docs/build remote_repo="https://x-access-token:${GITHUB_TOKEN}@${GITHUB_DOMAIN:-"github.com"}/${GITHUB_REPOSITORY}.git" git remote rm origin git remote add origin "${remote_repo}" mkdocs gh-deploy --forcepython-beanie-1.29.0/tests/000077500000000000000000000000001473701376500155245ustar00rootroot00000000000000python-beanie-1.29.0/tests/__init__.py000066400000000000000000000000001473701376500176230ustar00rootroot00000000000000python-beanie-1.29.0/tests/conftest.py000066400000000000000000000011151473701376500177210ustar00rootroot00000000000000import motor.motor_asyncio import pytest from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 if IS_PYDANTIC_V2: from pydantic_settings import BaseSettings else: from pydantic import BaseSettings class Settings(BaseSettings): mongodb_dsn: str = "mongodb://localhost:27017/beanie_db" mongodb_db_name: str = "beanie_db" @pytest.fixture def settings(): return Settings() @pytest.fixture() def cli(settings): return motor.motor_asyncio.AsyncIOMotorClient(settings.mongodb_dsn) @pytest.fixture() def db(cli, settings): return cli[settings.mongodb_db_name] python-beanie-1.29.0/tests/fastapi/000077500000000000000000000000001473701376500171535ustar00rootroot00000000000000python-beanie-1.29.0/tests/fastapi/__init__.py000066400000000000000000000000001473701376500212520ustar00rootroot00000000000000python-beanie-1.29.0/tests/fastapi/app.py000066400000000000000000000013731473701376500203110ustar00rootroot00000000000000from contextlib import asynccontextmanager import motor.motor_asyncio from fastapi import FastAPI from beanie import init_beanie from tests.conftest import Settings from tests.fastapi.models import ( DoorAPI, House, HouseAPI, Person, RoofAPI, WindowAPI, ) from tests.fastapi.routes import house_router @asynccontextmanager async def live_span(_: FastAPI): # CREATE MOTOR CLIENT client = motor.motor_asyncio.AsyncIOMotorClient(Settings().mongodb_dsn) # INIT BEANIE await init_beanie( client.beanie_db, document_models=[House, Person, HouseAPI, WindowAPI, DoorAPI, RoofAPI], ) yield app = FastAPI(lifespan=live_span) # ADD ROUTES app.include_router(house_router, prefix="/v1", tags=["house"]) python-beanie-1.29.0/tests/fastapi/conftest.py000066400000000000000000000015551473701376500213600ustar00rootroot00000000000000import pytest from asgi_lifespan import LifespanManager from httpx import ASGITransport, AsyncClient from tests.fastapi.app import app from tests.fastapi.models import ( DoorAPI, House, HouseAPI, Person, RoofAPI, WindowAPI, ) @pytest.fixture(autouse=True) async def api_client(clean_db): """api client fixture.""" async with LifespanManager(app, startup_timeout=100, shutdown_timeout=100): server_name = "https://localhost" async with AsyncClient( transport=ASGITransport(app=app), base_url=server_name ) as ac: yield ac @pytest.fixture(autouse=True) async def clean_db(db): models = [House, Person, HouseAPI, WindowAPI, DoorAPI, RoofAPI] yield None for model in models: await model.get_motor_collection().drop() await model.get_motor_collection().drop_indexes() python-beanie-1.29.0/tests/fastapi/models.py000066400000000000000000000013111473701376500210040ustar00rootroot00000000000000from typing import List from pydantic import Field from beanie import Document, Indexed, Link from beanie.odm.fields import BackLink from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 class WindowAPI(Document): x: int y: int class DoorAPI(Document): t: int = 10 class RoofAPI(Document): r: int = 100 class HouseAPI(Document): windows: List[Link[WindowAPI]] name: Indexed(str) height: Indexed(int) = 2 class House(Document): name: str owner: Link["Person"] class Person(Document): name: str house: BackLink[House] = ( Field(json_schema_extra={"original_field": "owner"}) if IS_PYDANTIC_V2 else Field(original_field="owner") ) python-beanie-1.29.0/tests/fastapi/routes.py000066400000000000000000000040511473701376500210460ustar00rootroot00000000000000from typing import Optional from fastapi import APIRouter, Body, status from pydantic import BaseModel from beanie import PydanticObjectId, WriteRules from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.fastapi.models import House, HouseAPI, Person, WindowAPI house_router = APIRouter() if not IS_PYDANTIC_V2: from fastapi.encoders import ENCODERS_BY_TYPE from pydantic.json import ENCODERS_BY_TYPE as PYDANTIC_ENCODERS_BY_TYPE ENCODERS_BY_TYPE.update(PYDANTIC_ENCODERS_BY_TYPE) class WindowInput(BaseModel): id: PydanticObjectId @house_router.post("/windows/", response_model=WindowAPI) async def create_window(window: WindowAPI): await window.create() return window @house_router.post("/windows_2/") async def create_window_2(window: WindowAPI): return await window.save() @house_router.get("/windows/{id}", response_model=Optional[WindowAPI]) async def get_window(id: PydanticObjectId): return await WindowAPI.get(id) @house_router.post("/houses/", response_model=HouseAPI) async def create_house(window: WindowAPI): house = HouseAPI(name="test_name", windows=[window]) await house.insert(link_rule=WriteRules.WRITE) return house @house_router.post("/houses_with_window_link/", response_model=HouseAPI) async def create_houses_with_window_link(window: WindowInput): validator = ( HouseAPI.model_validate if IS_PYDANTIC_V2 else HouseAPI.parse_obj ) house = validator( dict(name="test_name", windows=[WindowAPI.link_from_id(window.id)]) ) await house.insert(link_rule=WriteRules.WRITE) return house @house_router.post("/houses_2/", response_model=HouseAPI) async def create_houses_2(house: HouseAPI): await house.insert(link_rule=WriteRules.WRITE) return house @house_router.post( "/house", response_model=House, status_code=status.HTTP_201_CREATED, ) async def create_house_new(house: House = Body(...)): person = Person(name="Bob") house.owner = person await house.save(link_rule=WriteRules.WRITE) await house.sync() return house python-beanie-1.29.0/tests/fastapi/test_api.py000066400000000000000000000043261473701376500213420ustar00rootroot00000000000000from tests.fastapi.models import WindowAPI async def test_create_window(api_client): payload = {"x": 10, "y": 20} resp = await api_client.post("/v1/windows/", json=payload) resp_json = resp.json() assert resp_json["x"] == 10 assert resp_json["y"] == 20 async def test_get_window(api_client): payload = {"x": 10, "y": 20} data1 = ( (await api_client.post("/v1/windows/", json=payload)) .raise_for_status() .json() ) window_id = data1["_id"] data2 = ( (await api_client.get(f"/v1/windows/{window_id}")) .raise_for_status() .json() ) assert data2 == data1 async def test_create_house(api_client): payload = {"x": 10, "y": 20} resp = await api_client.post("/v1/houses/", json=payload) resp_json = resp.json() assert len(resp_json["windows"]) == 1 async def test_create_house_with_window_link(api_client): payload = {"x": 10, "y": 20} resp = await api_client.post("/v1/windows/", json=payload) window_id = resp.json()["_id"] payload = {"id": window_id} resp = await api_client.post("/v1/houses_with_window_link/", json=payload) resp_json = resp.json() assert resp_json["windows"][0]["collection"] == "WindowAPI" async def test_create_house_2(api_client): window = WindowAPI(x=10, y=10) await window.insert() payload = {"name": "TEST", "windows": [str(window.id)]} resp = await api_client.post("/v1/houses_2/", json=payload) resp_json = resp.json() assert len(resp_json["windows"]) == 1 async def test_revision_id(api_client): payload = {"x": 10, "y": 20} resp = await api_client.post("/v1/windows_2/", json=payload) resp_json = resp.json() assert "revision_id" not in resp_json assert resp_json == {"x": 10, "y": 20, "_id": resp_json["_id"]} async def test_create_house_new(api_client): payload = { "name": "FreshHouse", "owner": {"name": "will_be_overridden_to_Bob"}, } resp = await api_client.post("/v1/house", json=payload) resp_json = resp.json() assert resp_json["name"] == payload["name"] assert resp_json["owner"]["name"] == payload["owner"]["name"][-3:] assert resp_json["owner"]["house"]["collection"] == "House" python-beanie-1.29.0/tests/fastapi/test_openapi_schema_generation.py000066400000000000000000000007261473701376500257570ustar00rootroot00000000000000from json import dumps from fastapi.openapi.utils import get_openapi from tests.fastapi.app import app def test_openapi_schema_generation(): openapi_schema_json_str = dumps( get_openapi( title=app.title, version=app.version, openapi_version=app.openapi_version, description=app.description, routes=app.routes, ), ) assert openapi_schema_json_str is not None python-beanie-1.29.0/tests/migrations/000077500000000000000000000000001473701376500177005ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/__init__.py000066400000000000000000000000001473701376500217770ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/conftest.py000066400000000000000000000005641473701376500221040ustar00rootroot00000000000000import pytest from beanie import init_beanie from beanie.migrations.models import MigrationLog @pytest.fixture(autouse=True) async def init(db): await init_beanie( database=db, document_models=[ MigrationLog, ], ) @pytest.fixture(autouse=True) async def remove_migrations_log(db, init): await MigrationLog.delete_all() python-beanie-1.29.0/tests/migrations/iterative/000077500000000000000000000000001473701376500216745ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/iterative/__init__.py000066400000000000000000000000001473701376500237730ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/iterative/test_change_subfield.py000066400000000000000000000033251473701376500264120ustar00rootroot00000000000000import pytest from pydantic.main import BaseModel from beanie import init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.migrations.models import RunningDirections from beanie.odm.documents import Document from beanie.odm.models import InspectionStatuses class OldTag(BaseModel): color: str name: str class Tag(BaseModel): color: str title: str class OldNote(Document): title: str tag: OldTag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[OldNote]) await OldNote.delete_all() for i in range(10): note = OldNote(title=str(i), tag=OldTag(name="test", color="red")) await note.insert() yield await OldNote.delete_all() async def test_migration_change_subfield_value(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/change_subfield", ) await run_migrate(migration_settings) await init_beanie(database=db, document_models=[Note]) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({}) assert note.tag.title == "test" migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) inspection = await OldNote.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await OldNote.find_one({}) assert note.tag.name == "test" python-beanie-1.29.0/tests/migrations/iterative/test_change_value.py000066400000000000000000000032641473701376500257330ustar00rootroot00000000000000import pytest from pydantic.main import BaseModel from beanie import init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.migrations.models import RunningDirections from beanie.odm.documents import Document from beanie.odm.models import InspectionStatuses class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[Note]) await Note.delete_all() for i in range(10): note = Note(title=str(i), tag=Tag(name="test", color="red")) await note.insert() yield await Note.delete_all() async def test_migration_change_value(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/change_value", ) await run_migrate(migration_settings) await init_beanie(database=db, document_models=[Note]) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({"title": "five"}) assert note is not None note = await Note.find_one({"title": "5"}) assert note is None migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({"title": "5"}) assert note is not None note = await Note.find_one({"title": "five"}) assert note is None python-beanie-1.29.0/tests/migrations/iterative/test_change_value_subfield.py000066400000000000000000000030471473701376500276070ustar00rootroot00000000000000import pytest from pydantic.main import BaseModel from beanie import init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.migrations.models import RunningDirections from beanie.odm.documents import Document from beanie.odm.models import InspectionStatuses class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[Note]) await Note.delete_all() for i in range(10): note = Note(title=str(i), tag=Tag(name="test", color="red")) await note.insert() yield await Note.delete_all() async def test_migration_change_subfield_value(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/change_subfield_value", ) await run_migrate(migration_settings) await init_beanie(database=db, document_models=[Note]) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({}) assert note.tag.color == "blue" migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({}) assert note.tag.color == "red" python-beanie-1.29.0/tests/migrations/iterative/test_pack_unpack.py000066400000000000000000000033261473701376500255700ustar00rootroot00000000000000import pytest from pydantic.main import BaseModel from beanie import init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.migrations.models import RunningDirections from beanie.odm.documents import Document from beanie.odm.models import InspectionStatuses class OldTag(BaseModel): color: str name: str class Tag(BaseModel): color: str name: str class OldNote(Document): title: str tag_name: str tag_color: str class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[OldNote]) await OldNote.delete_all() for i in range(10): note = OldNote(title=str(i), tag_name="test", tag_color="red") await note.insert() yield await OldNote.delete_all() async def test_migration_pack_unpack(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/pack_unpack", ) await run_migrate(migration_settings) await init_beanie(database=db, document_models=[Note]) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({}) assert note.tag.name == "test" migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) inspection = await OldNote.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await OldNote.find_one({}) assert note.tag_name == "test" python-beanie-1.29.0/tests/migrations/iterative/test_rename_field.py000066400000000000000000000031741473701376500257240ustar00rootroot00000000000000import pytest from pydantic.main import BaseModel from beanie import init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.migrations.models import RunningDirections from beanie.odm.documents import Document from beanie.odm.models import InspectionStatuses class Tag(BaseModel): color: str name: str class OldNote(Document): name: str tag: Tag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[OldNote]) await OldNote.delete_all() for i in range(10): note = OldNote(name=str(i), tag=Tag(name="test", color="red")) await note.insert() yield # await OldNote.delete_all() async def test_migration_rename_field(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/rename_field", ) await run_migrate(migration_settings) await init_beanie(database=db, document_models=[Note]) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({}) assert note.title == "0" migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) inspection = await OldNote.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await OldNote.find_one({}) assert note.name == "0" python-beanie-1.29.0/tests/migrations/migrations_for_test/000077500000000000000000000000001473701376500237615ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/break/000077500000000000000000000000001473701376500250455ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/break/20210413211219_break.py000066400000000000000000000016721473701376500301450ustar00rootroot00000000000000from pydantic.main import BaseModel from beanie import Document, Indexed, PydanticObjectId, iterative_migration class Tag(BaseModel): color: str name: str class OldNote(Document): name: Indexed(str, unique=True) tag: Tag class Settings: name = "notes" class Note(Document): name: Indexed(str, unique=True) title: str tag: Tag class Settings: name = "notes" fixed_id = PydanticObjectId("6076f1f3e4b7f6b7a0f6e5a0") class Forward: @iterative_migration(batch_size=2) async def name_to_title( self, input_document: OldNote, output_document: Note ): output_document.title = input_document.name if output_document.title > "5": output_document.name = "5" class Backward: @iterative_migration() async def title_to_name( self, input_document: Note, output_document: OldNote ): output_document.name = input_document.title python-beanie-1.29.0/tests/migrations/migrations_for_test/change_subfield/000077500000000000000000000000001473701376500270635ustar00rootroot0000000000000020210413152406_change_subfield.py000066400000000000000000000014331473701376500341170ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/change_subfieldfrom pydantic.main import BaseModel from beanie import Document, iterative_migration class OldTag(BaseModel): color: str name: str class Tag(BaseModel): color: str title: str class OldNote(Document): title: str tag: OldTag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_color( self, input_document: OldNote, output_document: Note ): output_document.tag.title = input_document.tag.name class Backward: @iterative_migration() async def change_title( self, input_document: Note, output_document: OldNote ): output_document.tag.name = input_document.tag.title python-beanie-1.29.0/tests/migrations/migrations_for_test/change_subfield_value/000077500000000000000000000000001473701376500302575ustar00rootroot0000000000000020210413143405_change_subfield_value.py000066400000000000000000000010671473701376500365110ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/change_subfield_valuefrom pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_color(self, input_document: Note, output_document: Note): output_document.tag.color = "blue" class Backward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): output_document.tag.color = "red" python-beanie-1.29.0/tests/migrations/migrations_for_test/change_value/000077500000000000000000000000001473701376500264025ustar00rootroot0000000000000020210413115234_change_value.py000066400000000000000000000012101473701376500327440ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/change_valuefrom pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): if input_document.title == "5": output_document.title = "five" class Backward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): if input_document.title == "five": output_document.title = "5" python-beanie-1.29.0/tests/migrations/migrations_for_test/free_fall/000077500000000000000000000000001473701376500257005ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/free_fall/20210413210446_free_fall.py000066400000000000000000000017521473701376500316330ustar00rootroot00000000000000from pydantic.main import BaseModel from beanie import Document, free_fall_migration class Tag(BaseModel): color: str name: str class OldNote(Document): name: str tag: Tag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @free_fall_migration(document_models=[OldNote, Note]) async def name_to_title(self, session): async for old_note in OldNote.find_all(): new_note = Note( id=old_note.id, title=old_note.name, tag=old_note.tag ) await new_note.replace(session=session) class Backward: @free_fall_migration(document_models=[OldNote, Note]) async def title_to_name(self, session): async for old_note in Note.find_all(): new_note = OldNote( id=old_note.id, name=old_note.title, tag=old_note.tag ) await new_note.replace(session=session) python-beanie-1.29.0/tests/migrations/migrations_for_test/many_migrations/000077500000000000000000000000001473701376500271615ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/many_migrations/20210413170640_1.py000066400000000000000000000012061473701376500313300ustar00rootroot00000000000000from pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): if input_document.title == "1": output_document.title = "one" class Backward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): if input_document.title == "one": output_document.title = "1" python-beanie-1.29.0/tests/migrations/migrations_for_test/many_migrations/20210413170645_2.py000066400000000000000000000012061473701376500313360ustar00rootroot00000000000000from pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): if input_document.title == "2": output_document.title = "two" class Backward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): if input_document.title == "two": output_document.title = "2" 20210413170700_3_skip_backward.py000066400000000000000000000006721473701376500341420ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/many_migrationsfrom pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): if input_document.title == "3": output_document.title = "three" 20210413170709_3_skip_forward.py000066400000000000000000000006731473701376500340420ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/many_migrationsfrom pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" class Backward: @iterative_migration() async def change_title(self, input_document: Note, output_document: Note): if input_document.title == "three": output_document.title = "3" python-beanie-1.29.0/tests/migrations/migrations_for_test/many_migrations/20210413170728_4_5.py000066400000000000000000000021041473701376500315640ustar00rootroot00000000000000from pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_title_4( self, input_document: Note, output_document: Note ): if input_document.title == "4": output_document.title = "four" @iterative_migration() async def change_title_5( self, input_document: Note, output_document: Note ): if input_document.title == "5": output_document.title = "five" class Backward: @iterative_migration() async def change_title_5( self, input_document: Note, output_document: Note ): if input_document.title == "five": output_document.title = "5" @iterative_migration() async def change_title_4( self, input_document: Note, output_document: Note ): if input_document.title == "four": output_document.title = "4" python-beanie-1.29.0/tests/migrations/migrations_for_test/many_migrations/20210413170734_6_7.py000066400000000000000000000021041473701376500315650ustar00rootroot00000000000000from pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def change_title_6( self, input_document: Note, output_document: Note ): if input_document.title == "6": output_document.title = "six" @iterative_migration() async def change_title_7( self, input_document: Note, output_document: Note ): if input_document.title == "7": output_document.title = "seven" class Backward: @iterative_migration() async def change_title_7( self, input_document: Note, output_document: Note ): if input_document.title == "seven": output_document.title = "7" @iterative_migration() async def change_title_6( self, input_document: Note, output_document: Note ): if input_document.title == "six": output_document.title = "6" python-beanie-1.29.0/tests/migrations/migrations_for_test/pack_unpack/000077500000000000000000000000001473701376500262405ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/pack_unpack/20210413135927_pack_unpack.py000066400000000000000000000015031473701376500325370ustar00rootroot00000000000000from pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class OldNote(Document): title: str tag_name: str tag_color: str class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def pack(self, input_document: OldNote, output_document: Note): output_document.tag = Tag( name=input_document.tag_name, color=input_document.tag_color ) class Backward: @iterative_migration() async def unpack(self, input_document: Note, output_document: OldNote): output_document.tag_name = input_document.tag.name output_document.tag_color = input_document.tag.color python-beanie-1.29.0/tests/migrations/migrations_for_test/remove_index/000077500000000000000000000000001473701376500264455ustar00rootroot0000000000000020210414135045_remove_index.py000066400000000000000000000012761473701376500330710ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/remove_indexfrom pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class OldNote(Document): title: str tag: Tag class Settings: name = "notes" indexes = ["title"] class Note(Document): title: str tag: Tag class Settings: name = "notes" indexes = [ "_id", ] class Forward: @iterative_migration() async def name_to_title( self, input_document: OldNote, output_document: Note ): ... class Backward: @iterative_migration() async def title_to_name( self, input_document: Note, output_document: OldNote ): ... python-beanie-1.29.0/tests/migrations/migrations_for_test/rename_field/000077500000000000000000000000001473701376500263735ustar00rootroot0000000000000020210407203225_rename_field.py000066400000000000000000000013201473701376500327310ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/migrations_for_test/rename_fieldfrom pydantic.main import BaseModel from beanie import Document, iterative_migration class Tag(BaseModel): color: str name: str class OldNote(Document): name: str tag: Tag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" class Forward: @iterative_migration() async def name_to_title( self, input_document: OldNote, output_document: Note ): output_document.title = input_document.name class Backward: @iterative_migration() async def title_to_name( self, input_document: Note, output_document: OldNote ): output_document.name = input_document.title python-beanie-1.29.0/tests/migrations/models.py000066400000000000000000000000001473701376500215230ustar00rootroot00000000000000python-beanie-1.29.0/tests/migrations/test_break.py000066400000000000000000000033301473701376500223740ustar00rootroot00000000000000import pytest from pydantic.main import BaseModel from beanie import Indexed, init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.odm.documents import Document from beanie.odm.models import InspectionStatuses class Tag(BaseModel): color: str name: str class OldNote(Document): name: Indexed(str, unique=True) tag: Tag class Settings: name = "notes" class Note(Document): name: Indexed(str, unique=True) title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[OldNote]) await OldNote.delete_all() for i in range(10): note = OldNote(name=str(i), tag=Tag(name="test", color="red")) await note.insert() yield await OldNote.delete_all() await OldNote.get_motor_collection().drop() await OldNote.get_motor_collection().drop_indexes() @pytest.mark.skip("TODO: Fix this test") async def test_migration_break(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/break", ) with pytest.raises(Exception): await run_migrate(migration_settings) await init_beanie(database=db, document_models=[OldNote]) inspection = await OldNote.inspect_collection() assert inspection.status == InspectionStatuses.OK notes = await OldNote.get_motor_collection().find().to_list(length=100) names = set(n["name"] for n in notes) assert names == {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"} for note in notes: assert "title" not in note python-beanie-1.29.0/tests/migrations/test_directions.py000066400000000000000000000156541473701376500234670ustar00rootroot00000000000000import pytest from pydantic.main import BaseModel from beanie import init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.migrations.models import RunningDirections from beanie.odm.documents import Document class Tag(BaseModel): color: str name: str class Note(Document): title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[Note]) await Note.delete_all() for i in range(1, 8): note = Note(title=str(i), tag=Tag(name="test", color="red")) await note.insert() yield i await Note.delete_all() async def test_migration_by_one(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/many_migrations", distance=1, ) await init_beanie(database=db, document_models=[Note]) await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "2", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "three", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "three", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "three", "four", "five", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in [ "one", "two", "three", "four", "five", "six", "seven", ] await run_migrate(migration_settings) migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "three", "four", "five", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "three", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "2", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["1", "2", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async def test_migration_by_two(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/many_migrations", distance=2, ) await init_beanie(database=db, document_models=[Note]) await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "three", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in [ "one", "two", "three", "four", "five", "six", "seven", ] await run_migrate(migration_settings) migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "three", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["one", "two", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["1", "2", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async def test_migration_by_10(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/many_migrations", distance=10, ) await init_beanie(database=db, document_models=[Note]) await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in [ "one", "two", "three", "four", "five", "six", "seven", ] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in [ "one", "two", "three", "four", "five", "six", "seven", ] migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["1", "2", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["1", "2", "3", "4", "5", "6", "7"] async def test_migration_all(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/many_migrations", ) await init_beanie(database=db, document_models=[Note]) await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in [ "one", "two", "three", "four", "five", "six", "seven", ] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in [ "one", "two", "three", "four", "five", "six", "seven", ] migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["1", "2", "3", "4", "5", "6", "7"] await run_migrate(migration_settings) async for note in Note.find_all(): assert note.title in ["1", "2", "3", "4", "5", "6", "7"] python-beanie-1.29.0/tests/migrations/test_free_fall.py000066400000000000000000000052541473701376500232360ustar00rootroot00000000000000import pytest from pydantic.main import BaseModel from beanie import init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.migrations.models import RunningDirections from beanie.odm.documents import Document from beanie.odm.models import InspectionStatuses class Tag(BaseModel): color: str name: str class OldNote(Document): name: str tag: Tag class Settings: name = "notes" class Note(Document): title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[OldNote]) await OldNote.delete_all() for i in range(10): note = OldNote(name=str(i), tag=Tag(name="test", color="red")) await note.insert() yield await OldNote.delete_all() async def test_migration_free_fall(settings, notes, db): if not db.client.is_mongos and not len(db.client.nodes) > 1: return pytest.skip( "MongoDB server does not support transactions as it is neighter a mongos instance not a replica set." ) migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/free_fall", ) await run_migrate(migration_settings) await init_beanie(database=db, document_models=[Note]) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({}) assert note.title == "0" migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) inspection = await OldNote.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await OldNote.find_one({}) assert note.name == "0" async def test_migration_free_fall_no_use_transactions(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/free_fall", use_transaction=False, ) await run_migrate(migration_settings) await init_beanie(database=db, document_models=[Note]) inspection = await Note.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await Note.find_one({}) assert note.title == "0" migration_settings.direction = RunningDirections.BACKWARD await run_migrate(migration_settings) inspection = await OldNote.inspect_collection() assert inspection.status == InspectionStatuses.OK note = await OldNote.find_one({}) assert note.name == "0" python-beanie-1.29.0/tests/migrations/test_remove_indexes.py000066400000000000000000000054411473701376500243310ustar00rootroot00000000000000import pytest from motor.motor_asyncio import AsyncIOMotorCollection from pydantic.main import BaseModel from beanie import init_beanie from beanie.executors.migrate import MigrationSettings, run_migrate from beanie.odm.documents import Document class Tag(BaseModel): color: str name: str class OldNote(Document): title: str tag: Tag class Settings: name = "notes" indexes = ["title"] class Note(Document): title: str tag: Tag class Settings: name = "notes" @pytest.fixture() async def notes(db): await init_beanie(database=db, document_models=[OldNote]) await OldNote.delete_all() for i in range(10): note = OldNote(title=str(i), tag=Tag(name="test", color="red")) await note.insert() yield await OldNote.delete_all() async def test_remove_index_allowed(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/remove_index", allow_index_dropping=True, ) await run_migrate(migration_settings) await init_beanie( database=db, document_models=[Note], allow_index_dropping=False ) collection: AsyncIOMotorCollection = Note.get_motor_collection() index_info = await collection.index_information() assert index_info == { "_id_": {"key": [("_id", 1)], "v": 2}, } async def test_remove_index_default(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/remove_index", ) await run_migrate(migration_settings) await init_beanie( database=db, document_models=[Note], allow_index_dropping=False ) collection: AsyncIOMotorCollection = Note.get_motor_collection() index_info = await collection.index_information() assert index_info == { "_id_": {"key": [("_id", 1)], "v": 2}, "title_1": {"key": [("title", 1)], "v": 2}, } async def test_remove_index_not_allowed(settings, notes, db): migration_settings = MigrationSettings( connection_uri=settings.mongodb_dsn, database_name=settings.mongodb_db_name, path="tests/migrations/migrations_for_test/remove_index", allow_index_dropping=False, ) await run_migrate(migration_settings) await init_beanie( database=db, document_models=[Note], allow_index_dropping=False ) collection: AsyncIOMotorCollection = Note.get_motor_collection() index_info = await collection.index_information() assert index_info == { "_id_": {"key": [("_id", 1)], "v": 2}, "title_1": {"key": [("title", 1)], "v": 2}, } python-beanie-1.29.0/tests/odm/000077500000000000000000000000001473701376500163035ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/__init__.py000066400000000000000000000000001473701376500204020ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/conftest.py000066400000000000000000000246511473701376500205120ustar00rootroot00000000000000import warnings from datetime import datetime, timedelta, timezone from random import randint from typing import List import pytest from beanie.odm.utils.init import init_beanie from tests.odm.models import ( ADocument, BDocument, Bicycle, Bike, BsonRegexDoc, Bus, Car, Doc2NonRoot, DocNonRoot, DocumentForEncodingTest, DocumentForEncodingTestDate, DocumentMultiModelOne, DocumentMultiModelTwo, DocumentTestModel, DocumentTestModelFailInspection, DocumentTestModelIndexFlagsAnnotated, DocumentTestModelWithComplexIndex, DocumentTestModelWithCustomCollectionName, DocumentTestModelWithIndexFlags, DocumentTestModelWithIndexFlagsAliases, DocumentTestModelWithLink, DocumentTestModelWithSimpleIndex, DocumentTestModelWithSoftDelete, DocumentToBeLinked, DocumentToTestSync, DocumentUnion, DocumentWithActions, DocumentWithActions2, DocumentWithBackLink, DocumentWithBackLinkForNesting, DocumentWithBsonBinaryField, DocumentWithBsonEncodersFiledsTypes, DocumentWithComplexDictKey, DocumentWithCustomFiledsTypes, DocumentWithCustomIdInt, DocumentWithCustomIdUUID, DocumentWithCustomInit, DocumentWithDecimalField, DocumentWithDeprecatedHiddenField, DocumentWithEnumKeysDict, DocumentWithExtras, DocumentWithHttpUrlField, DocumentWithIndexedObjectId, DocumentWithIndexMerging1, DocumentWithIndexMerging2, DocumentWithKeepNullsFalse, DocumentWithLink, DocumentWithLinkForNesting, DocumentWithList, DocumentWithListBackLink, DocumentWithListLink, DocumentWithListOfLinks, DocumentWithOptionalBackLink, DocumentWithOptionalListBackLink, DocumentWithPydanticConfig, DocumentWithRevisionTurnedOn, DocumentWithRootModelAsAField, DocumentWithStringField, DocumentWithTextIndexAndLink, DocumentWithTimeStampToTestConsistency, DocumentWithTurnedOffStateManagement, DocumentWithTurnedOnReplaceObjects, DocumentWithTurnedOnSavePrevious, DocumentWithTurnedOnStateManagement, DocumentWithTurnedOnStateManagementWithCustomId, DocumentWithValidationOnSave, DocWithCallWrapper, Door, GeoObject, House, HouseWithRevision, InheritedDocumentWithActions, LinkDocumentForTextSeacrh, Lock, LockWithRevision, LongSelfLink, LoopedLinksA, LoopedLinksB, NativeRegexDoc, Nested, Option1, Option2, Owner, PackageElemMatch, Region, Roof, RootDocument, Sample, SampleLazyParsing, SampleWithMutableObjects, SelfLinked, StateAndDecimalFieldModel, SubDocument, UsersAddresses, Vehicle, Window, WindowWithRevision, WindowWithValidationOnSave, Yard, YardWithRevision, ) from tests.odm.views import ViewForTest, ViewForTestWithLink TESTING_MODELS = [ DocumentWithExtras, DocumentWithPydanticConfig, DocumentTestModel, DocumentTestModelWithSoftDelete, DocumentTestModelWithLink, DocumentTestModelWithCustomCollectionName, DocumentTestModelWithSimpleIndex, DocumentTestModelWithIndexFlags, DocumentTestModelWithIndexFlagsAliases, DocumentTestModelIndexFlagsAnnotated, DocumentTestModelWithComplexIndex, DocumentTestModelFailInspection, DocumentWithBsonEncodersFiledsTypes, DocumentWithCustomFiledsTypes, DocumentWithCustomIdUUID, DocumentWithCustomIdInt, Sample, DocumentWithActions, DocumentWithTurnedOnStateManagement, DocumentWithTurnedOnReplaceObjects, DocumentWithTurnedOnSavePrevious, DocumentWithTurnedOffStateManagement, DocumentWithValidationOnSave, DocumentWithRevisionTurnedOn, DocumentWithHttpUrlField, House, Window, WindowWithValidationOnSave, Door, Roof, Yard, Lock, InheritedDocumentWithActions, DocumentForEncodingTest, DocumentForEncodingTestDate, DocumentWithStringField, ViewForTest, ViewForTestWithLink, DocumentMultiModelOne, DocumentMultiModelTwo, DocumentUnion, HouseWithRevision, WindowWithRevision, LockWithRevision, YardWithRevision, DocumentWithActions2, Vehicle, Bicycle, Bike, Car, Bus, Owner, SampleWithMutableObjects, DocNonRoot, Doc2NonRoot, SampleLazyParsing, RootDocument, ADocument, BDocument, StateAndDecimalFieldModel, Region, UsersAddresses, SelfLinked, LoopedLinksA, LoopedLinksB, DocumentWithTurnedOnStateManagementWithCustomId, DocumentWithDecimalField, DocumentWithKeepNullsFalse, PackageElemMatch, DocumentWithLink, DocumentWithBackLink, DocumentWithListLink, DocumentWithListBackLink, DocumentWithListOfLinks, DocumentToBeLinked, DocumentWithTimeStampToTestConsistency, DocumentWithIndexMerging1, DocumentWithIndexMerging2, DocumentWithCustomInit, DocumentWithTextIndexAndLink, LinkDocumentForTextSeacrh, DocumentWithList, DocumentWithBsonBinaryField, DocumentWithRootModelAsAField, DocWithCallWrapper, DocumentWithOptionalBackLink, DocumentWithOptionalListBackLink, DocumentWithComplexDictKey, DocumentWithIndexedObjectId, DocumentToTestSync, DocumentWithLinkForNesting, DocumentWithBackLinkForNesting, DocumentWithEnumKeysDict, LongSelfLink, BsonRegexDoc, NativeRegexDoc, ] @pytest.fixture def point(): return { "longitude": 13.404954, "latitude": 52.520008, } @pytest.fixture async def preset_documents(point): docs = [] for i in range(10): timestamp = datetime.now(tz=timezone.utc) - timedelta(days=i) integer_1: int = i // 3 integer_2: int = i // 2 float_num = integer_1 + 0.3 string: str = f"test_{integer_1}" option_1 = Option1(s="TEST") option_2 = Option2(f=3.14) union = option_1 if i % 2 else option_2 optional = option_2 if not i % 3 else None geo = GeoObject( coordinates=[ point["longitude"] + i / 10, point["latitude"] + i / 10, ] ) nested = Nested( integer=integer_2, option_1=option_1, union=union, optional=optional, ) const = "TEST" sample = Sample( timestamp=timestamp, increment=i, integer=integer_1, float_num=float_num, string=string, nested=nested, optional=optional, union=union, geo=geo, const=const, ) docs.append(sample) await Sample.insert_many(documents=docs) @pytest.fixture() def sample_doc_not_saved(point): nested = Nested( integer=0, option_1=Option1(s="TEST"), union=Option1(s="TEST"), optional=None, ) geo = GeoObject( coordinates=[ point["longitude"], point["latitude"], ] ) return Sample( timestamp=datetime.now(tz=timezone.utc), increment=0, integer=0, float_num=0, string="TEST_NOT_SAVED", nested=nested, optional=None, union=Option1(s="TEST"), geo=geo, ) @pytest.fixture() async def session(cli): s = await cli.start_session() yield s await s.end_session() @pytest.fixture() async def deprecated_init_beanie(db): for model in TESTING_MODELS: # crude clear from init await model.get_motor_collection().drop() await model.get_motor_collection().drop_indexes() with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") await init_beanie( database=db, document_models=[DocumentWithDeprecatedHiddenField], ) assert len(w) == 1 assert issubclass(w[-1].category, DeprecationWarning) assert ( "DocumentWithDeprecatedHiddenField: 'hidden=True' is deprecated, please use 'exclude=True'" in str(w[-1].message) ) @pytest.fixture(autouse=True) async def init(db): await init_beanie( database=db, document_models=TESTING_MODELS, ) try: yield None finally: for model in TESTING_MODELS: await model.get_motor_collection().drop() await model.get_motor_collection().drop_indexes() @pytest.fixture def document_not_inserted(): return DocumentTestModel( test_int=42, test_list=[SubDocument(test_str="foo"), SubDocument(test_str="bar")], test_doc=SubDocument(test_str="foobar"), test_str="kipasa", ) @pytest.fixture def documents_not_inserted(): def generate_documents( number: int, test_str: str = None, random: bool = False ) -> List[DocumentTestModel]: return [ DocumentTestModel( test_int=randint(0, 1000000) if random else i, test_list=[ SubDocument(test_str="foo"), SubDocument(test_str="bar"), ], test_doc=SubDocument(test_str="foobar"), test_str="kipasa" if test_str is None else test_str, ) for i in range(number) ] return generate_documents @pytest.fixture def document_soft_delete_not_inserted(): return DocumentTestModelWithSoftDelete( test_int=randint(0, 1000000), test_str="kipasa", ) @pytest.fixture def documents_soft_delete_not_inserted(): docs = [] for i in range(3): docs.append( DocumentTestModelWithSoftDelete( test_int=randint(0, 1000000), test_str="kipasa", ) ) return docs @pytest.fixture async def document(document_not_inserted) -> DocumentTestModel: return await document_not_inserted.insert() @pytest.fixture def documents(documents_not_inserted): async def generate_documents( number: int, test_str: str = None, random: bool = False ): result = await DocumentTestModel.insert_many( documents_not_inserted(number, test_str, random) ) return result.inserted_ids return generate_documents @pytest.fixture def documents_with_links(documents): async def generate_documents(): await documents(15) results = await DocumentTestModel.all().to_list() for document in results: await DocumentTestModelWithLink(test_link=document).insert() return generate_documents python-beanie-1.29.0/tests/odm/custom_types/000077500000000000000000000000001473701376500210415ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/custom_types/__init__.py000066400000000000000000000000001473701376500231400ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/custom_types/test_bson_binary.py000066400000000000000000000015421473701376500247610ustar00rootroot00000000000000import bson import pytest from beanie import BsonBinary from beanie.odm.utils.pydantic import get_model_dump, parse_model from tests.odm.models import DocumentWithBsonBinaryField @pytest.mark.parametrize("binary_field", [bson.Binary(b"test"), b"test"]) async def test_bson_binary(binary_field): doc = DocumentWithBsonBinaryField(binary_field=binary_field) await doc.insert() assert doc.binary_field == BsonBinary(b"test") new_doc = await DocumentWithBsonBinaryField.get(doc.id) assert new_doc.binary_field == BsonBinary(b"test") @pytest.mark.parametrize("binary_field", [bson.Binary(b"test"), b"test"]) def test_bson_binary_roundtrip(binary_field): doc = DocumentWithBsonBinaryField(binary_field=binary_field) doc_dict = get_model_dump(doc) new_doc = parse_model(DocumentWithBsonBinaryField, doc_dict) assert new_doc == doc python-beanie-1.29.0/tests/odm/custom_types/test_decimal_annotation.py000066400000000000000000000010501473701376500262760ustar00rootroot00000000000000from decimal import Decimal from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import DocumentWithDecimalField def test_decimal_deserialize(): m = DocumentWithDecimalField(amt=Decimal("1.4")) if IS_PYDANTIC_V2: m_json = m.model_dump_json() m_from_json = DocumentWithDecimalField.model_validate_json(m_json) else: m_json = m.json() m_from_json = DocumentWithDecimalField.parse_raw(m_json) assert isinstance(m_from_json.amt, Decimal) assert m_from_json.amt == Decimal("1.4") python-beanie-1.29.0/tests/odm/documents/000077500000000000000000000000001473701376500203045ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/documents/__init__.py000066400000000000000000000000001473701376500224030ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/documents/test_aggregate.py000066400000000000000000000043221473701376500236440ustar00rootroot00000000000000from pydantic import Field from pydantic.main import BaseModel from tests.odm.models import DocumentTestModel async def test_aggregate(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.aggregate( [{"$group": {"_id": "$test_str", "total": {"$sum": "$test_int"}}}] ).to_list() assert len(result) == 3 assert {"_id": "cuatro", "total": 0} in result assert {"_id": "dos", "total": 1} in result assert {"_id": "uno", "total": 6} in result async def test_aggregate_with_filter(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = ( await DocumentTestModel.find(DocumentTestModel.test_int >= 1) .aggregate( [{"$group": {"_id": "$test_str", "total": {"$sum": "$test_int"}}}] ) .to_list() ) assert len(result) == 2 assert {"_id": "dos", "total": 1} in result assert {"_id": "uno", "total": 6} in result async def test_aggregate_with_item_model(documents): class OutputItem(BaseModel): id: str = Field(None, alias="_id") total: int await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") ids = [] async for i in DocumentTestModel.aggregate( [{"$group": {"_id": "$test_str", "total": {"$sum": "$test_int"}}}], projection_model=OutputItem, ): if i.id == "cuatro": assert i.total == 0 elif i.id == "dos": assert i.total == 1 elif i.id == "uno": assert i.total == 6 else: raise KeyError ids.append(i.id) assert set(ids) == {"cuatro", "dos", "uno"} async def test_aggregate_with_session(documents, session): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.aggregate( [{"$group": {"_id": "$test_str", "total": {"$sum": "$test_int"}}}], session=session, ).to_list() assert len(result) == 3 assert {"_id": "cuatro", "total": 0} in result assert {"_id": "dos", "total": 1} in result assert {"_id": "uno", "total": 6} in result python-beanie-1.29.0/tests/odm/documents/test_bulk_write.py000066400000000000000000000165351473701376500240760ustar00rootroot00000000000000import pytest from pymongo.errors import BulkWriteError from beanie.odm.bulk import BulkWriter from beanie.odm.operators.update.general import Set from tests.odm.models import ( DocumentMultiModelOne, DocumentMultiModelTwo, DocumentTestModel, DocumentUnion, SubDocument, ) async def test_insert(documents_not_inserted): documents = documents_not_inserted(2) async with BulkWriter() as bulk_writer: await DocumentTestModel.insert_one( documents[0], bulk_writer=bulk_writer ) await DocumentTestModel.insert_one( documents[1], bulk_writer=bulk_writer ) new_documents = await DocumentTestModel.find_all().to_list() assert len(new_documents) == 2 async def test_update(documents): await documents(5) doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 0) doc.test_int = 100 async with BulkWriter() as bulk_writer: await doc.save_changes(bulk_writer=bulk_writer) await DocumentTestModel.find_one( DocumentTestModel.test_int == 1 ).update( Set({DocumentTestModel.test_int: 1000}), bulk_writer=bulk_writer ) await DocumentTestModel.find(DocumentTestModel.test_int < 100).update( Set({DocumentTestModel.test_int: 2000}), bulk_writer=bulk_writer ) assert len(await DocumentTestModel.find_all().to_list()) == 5 assert ( len( await DocumentTestModel.find( DocumentTestModel.test_int == 100 ).to_list() ) == 1 ) assert ( len( await DocumentTestModel.find( DocumentTestModel.test_int == 1000 ).to_list() ) == 1 ) assert ( len( await DocumentTestModel.find( DocumentTestModel.test_int == 2000 ).to_list() ) == 3 ) async def test_unordered_update(documents, document): await documents(5) doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 0) doc.test_int = 100 with pytest.raises(BulkWriteError): async with BulkWriter(ordered=False) as bulk_writer: await DocumentTestModel.insert_one( document, bulk_writer=bulk_writer ) await doc.save_changes(bulk_writer=bulk_writer) assert len(await DocumentTestModel.find_all().to_list()) == 6 assert ( len( await DocumentTestModel.find( DocumentTestModel.test_int == 100 ).to_list() ) == 1 ) async def test_delete(documents): await documents(5) doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 0) async with BulkWriter() as bulk_writer: await doc.delete(bulk_writer=bulk_writer) await DocumentTestModel.find_one( DocumentTestModel.test_int == 1 ).delete(bulk_writer=bulk_writer) await DocumentTestModel.find(DocumentTestModel.test_int < 4).delete( bulk_writer=bulk_writer ) assert len(await DocumentTestModel.find_all().to_list()) == 1 async def test_replace(documents, document_not_inserted): await documents(5) doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 0) doc.test_int = 100 async with BulkWriter() as bulk_writer: await doc.replace(bulk_writer=bulk_writer) document_not_inserted.test_int = 100 await DocumentTestModel.find_one( DocumentTestModel.test_int == 1 ).replace_one(document_not_inserted, bulk_writer=bulk_writer) assert len(await DocumentTestModel.find_all().to_list()) == 5 assert ( len( await DocumentTestModel.find( DocumentTestModel.test_int == 100 ).to_list() ) == 2 ) async def test_internal_error(document): with pytest.raises(BulkWriteError): async with BulkWriter() as bulk_writer: await DocumentTestModel.insert_one( document, bulk_writer=bulk_writer ) async def test_native_upsert_found(documents, document_not_inserted): await documents(5) document_not_inserted.test_int = -1000 async with BulkWriter() as bulk_writer: await DocumentTestModel.find_one( DocumentTestModel.test_int == 1 ).update_one( { "$addToSet": { "test_list": { "$each": [ SubDocument(test_str="TEST_ONE"), SubDocument(test_str="TEST_TWO"), ] } }, "$setOnInsert": {}, }, bulk_writer=bulk_writer, upsert=True, ) await bulk_writer.commit() doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 1) assert len(doc.test_list) == 4 async def test_native_upsert_not_found(documents, document_not_inserted): await documents(5) document_not_inserted.test_int = -1000 async with BulkWriter() as bulk_writer: await DocumentTestModel.find_one( DocumentTestModel.test_int == -1000 ).update_one( { "$addToSet": { "test_list": { "$each": [ SubDocument(test_str="TEST_ONE"), SubDocument(test_str="TEST_TWO"), ] } }, "$setOnInsert": {"TEST": "VALUE"}, }, bulk_writer=bulk_writer, upsert=True, ) await bulk_writer.commit() assert await DocumentTestModel.count() == 6 async def test_different_models_same_collection(): async with BulkWriter() as bulk_writer: await DocumentMultiModelOne.insert_one( DocumentMultiModelOne(), bulk_writer=bulk_writer ) await DocumentMultiModelTwo.insert_one( DocumentMultiModelTwo(), bulk_writer=bulk_writer ) assert len(await DocumentUnion.find(with_children=True).to_list()) == 2 async def test_empty_operations(): bulk = BulkWriter() await DocumentMultiModelOne.insert_one( DocumentMultiModelOne(), bulk_writer=bulk ) await DocumentMultiModelOne.insert_one( DocumentMultiModelOne(), bulk_writer=bulk ) assert len(bulk.operations) == 2 bulk.operations = [] bulk_result = await bulk.commit() assert bulk_result == None assert len(await DocumentMultiModelOne.find().to_list()) == 0 async def test_ordered_bulk(documents): await documents(1) doc = await DocumentMultiModelOne.insert_one(DocumentMultiModelOne()) assert doc assert doc.id with pytest.raises(BulkWriteError): async with BulkWriter(ordered=True) as bulk_writer: doc1 = DocumentMultiModelOne() doc1.id = doc.id await DocumentMultiModelOne.insert_one( doc1, bulk_writer=bulk_writer ) await DocumentMultiModelOne.insert_one( DocumentMultiModelOne(), bulk_writer=bulk_writer ) assert len(await DocumentMultiModelOne.find_all().to_list()) == 1 async def test_bulk_writer(): assert isinstance(DocumentMultiModelOne.bulk_writer(), BulkWriter) assert isinstance(DocumentUnion.bulk_writer(), BulkWriter) python-beanie-1.29.0/tests/odm/documents/test_count.py000066400000000000000000000012231473701376500230430ustar00rootroot00000000000000from tests.odm.models import DocumentTestModel async def test_count(documents): await documents(4, "uno", True) c = await DocumentTestModel.count() assert c == 4 async def test_count_with_filter_query(documents): await documents(4, "uno", True) await documents(2, "dos", True) await documents(1, "cuatro", True) c = await DocumentTestModel.find_many({"test_str": "dos"}).count() assert c == 2 async def test_count_with_limit(documents): await documents(5, "five", True) c = await DocumentTestModel.find_all().limit(1).count() assert c == 1 d = await DocumentTestModel.find_all().count() assert d == 5 python-beanie-1.29.0/tests/odm/documents/test_create.py000066400000000000000000000070741473701376500231700ustar00rootroot00000000000000import pytest from pymongo.errors import DuplicateKeyError from beanie.odm.fields import PydanticObjectId from tests.odm.models import ( DocumentTestModel, DocumentWithKeepNullsFalse, ModelWithOptionalField, ) async def test_insert_one(document_not_inserted): result = await DocumentTestModel.insert_one(document_not_inserted) document = await DocumentTestModel.get(result.id) assert document is not None assert document.test_int == document_not_inserted.test_int assert document.test_list == document_not_inserted.test_list assert document.test_str == document_not_inserted.test_str async def test_insert_many(documents_not_inserted): await DocumentTestModel.insert_many(documents_not_inserted(10)) documents = await DocumentTestModel.find_all().to_list() assert len(documents) == 10 async def test_create(document_not_inserted): await document_not_inserted.insert() assert isinstance(document_not_inserted.id, PydanticObjectId) async def test_create_twice(document_not_inserted): await document_not_inserted.insert() with pytest.raises(DuplicateKeyError): await document_not_inserted.insert() async def test_insert_one_with_session(document_not_inserted, session): result = await DocumentTestModel.insert_one( document_not_inserted, session=session ) document = await DocumentTestModel.get(result.id, session=session) assert document is not None assert document.test_int == document_not_inserted.test_int assert document.test_list == document_not_inserted.test_list assert document.test_str == document_not_inserted.test_str async def test_insert_many_with_session(documents_not_inserted, session): await DocumentTestModel.insert_many( documents_not_inserted(10), session=session ) documents = await DocumentTestModel.find_all(session=session).to_list() assert len(documents) == 10 async def test_create_with_session(document_not_inserted, session): await document_not_inserted.insert(session=session) assert isinstance(document_not_inserted.id, PydanticObjectId) async def test_insert_keep_nulls_false(): model = ModelWithOptionalField(i=10) doc = DocumentWithKeepNullsFalse(m=model) await doc.insert() new_doc = await DocumentWithKeepNullsFalse.get(doc.id) assert new_doc.m.i == 10 assert new_doc.m.s is None assert new_doc.o is None raw_data = ( await DocumentWithKeepNullsFalse.get_motor_collection().find_one( {"_id": doc.id} ) ) assert raw_data == { "_id": doc.id, "m": {"i": 10}, } async def test_insert_many_keep_nulls_false(): models = [ModelWithOptionalField(i=10), ModelWithOptionalField(i=11)] docs = [DocumentWithKeepNullsFalse(m=m) for m in models] await DocumentWithKeepNullsFalse.insert_many(docs) new_docs = await DocumentWithKeepNullsFalse.find_all().to_list() assert len(new_docs) == 2 assert new_docs[0].m.i == 10 assert new_docs[0].m.s is None assert new_docs[0].o is None assert new_docs[1].m.i == 11 assert new_docs[1].m.s is None assert new_docs[1].o is None raw_data = ( await DocumentWithKeepNullsFalse.get_motor_collection().find_one( {"_id": new_docs[0].id} ) ) assert raw_data == { "_id": new_docs[0].id, "m": {"i": 10}, } raw_data = ( await DocumentWithKeepNullsFalse.get_motor_collection().find_one( {"_id": new_docs[1].id} ) ) assert raw_data == { "_id": new_docs[1].id, "m": {"i": 11}, } python-beanie-1.29.0/tests/odm/documents/test_delete.py000066400000000000000000000032241473701376500231600ustar00rootroot00000000000000from tests.odm.models import DocumentTestModel async def test_delete_one(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") await DocumentTestModel.find_one({"test_str": "uno"}).delete() documents = await DocumentTestModel.find_all().to_list() assert len(documents) == 6 async def test_delete_one_not_found(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") await DocumentTestModel.find_one({"test_str": "wrong"}).delete() documents = await DocumentTestModel.find_all().to_list() assert len(documents) == 7 async def test_delete_many(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") await DocumentTestModel.find_many({"test_str": "uno"}).delete() documents = await DocumentTestModel.find_all().to_list() assert len(documents) == 3 async def test_delete_many_not_found(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") await DocumentTestModel.find_many({"test_str": "wrong"}).delete() documents = await DocumentTestModel.find_all().to_list() assert len(documents) == 7 async def test_delete_all(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") await DocumentTestModel.delete_all() documents = await DocumentTestModel.find_all().to_list() assert len(documents) == 0 async def test_delete(document): doc_id = document.id await document.delete() new_document = await DocumentTestModel.get(doc_id) assert new_document is None python-beanie-1.29.0/tests/odm/documents/test_distinct.py000066400000000000000000000023211473701376500235340ustar00rootroot00000000000000from tests.odm.models import DocumentTestModel async def test_distinct_unique(documents, document_not_inserted): await documents(1, "uno") await documents(2, "dos") await documents(3, "cuatro") expected_result = ["cuatro", "dos", "uno"] unique_test_strs = await DocumentTestModel.distinct("test_str", {}) assert unique_test_strs == expected_result document_not_inserted.test_str = "uno" await document_not_inserted.insert() another_unique_test_strs = await DocumentTestModel.distinct("test_str", {}) assert another_unique_test_strs == expected_result async def test_distinct_different_value(documents, document_not_inserted): await documents(1, "uno") await documents(2, "dos") await documents(3, "cuatro") expected_result = ["cuatro", "dos", "uno"] unique_test_strs = await DocumentTestModel.distinct("test_str", {}) assert unique_test_strs == expected_result document_not_inserted.test_str = "diff_val" await document_not_inserted.insert() another_unique_test_strs = await DocumentTestModel.distinct("test_str", {}) assert not another_unique_test_strs == expected_result assert another_unique_test_strs == ["cuatro", "diff_val", "dos", "uno"] python-beanie-1.29.0/tests/odm/documents/test_exists.py000066400000000000000000000007561473701376500232440ustar00rootroot00000000000000from tests.odm.models import DocumentTestModel async def test_count_with_filter_query(documents): await documents(4, "uno", True) await documents(2, "dos", True) await documents(1, "cuatro", True) e = await DocumentTestModel.find_many({"test_str": "dos"}).exists() assert e is True e = await DocumentTestModel.find_one({"test_str": "dos"}).exists() assert e is True e = await DocumentTestModel.find_many({"test_str": "wrong"}).exists() assert e is False python-beanie-1.29.0/tests/odm/documents/test_find.py000066400000000000000000000123501473701376500226360ustar00rootroot00000000000000import pymongo from beanie.odm.fields import PydanticObjectId from tests.odm.models import DocumentTestModel async def test_get(document): new_document = await DocumentTestModel.get(document.id) assert new_document == document async def test_get_not_found(document): new_document = await DocumentTestModel.get(PydanticObjectId()) assert new_document is None async def test_find_one(documents): inserted_one = await documents(1, "kipasa") await documents(10, "smthe else") expected_doc_id = PydanticObjectId(inserted_one[0]) new_document = await DocumentTestModel.find_one({"test_str": "kipasa"}) assert new_document.id == expected_doc_id async def test_find_one_not_found(documents): await documents(10, "smthe else") new_document = await DocumentTestModel.find_one({"test_str": "wrong"}) assert new_document is None async def test_find_one_more_than_one_found(documents): await documents(10, "one") await documents(10, "two") new_document = await DocumentTestModel.find_one({"test_str": "one"}) assert new_document.test_str == "one" async def test_find_all(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_all().to_list() assert len(result) == 7 async def test_find_all_limit(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_all(limit=5).to_list() assert len(result) == 5 async def test_find_all_skip(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_all(skip=1).to_list() assert len(result) == 6 async def test_find_all_sort(documents): await documents(4, "uno", True) await documents(2, "dos", True) await documents(1, "cuatro", True) result = await DocumentTestModel.find_all( sort=[ ("test_str", pymongo.ASCENDING), ("test_int", pymongo.DESCENDING), ] ).to_list() assert result[0].test_str == "cuatro" assert result[1].test_str == result[2].test_str == "dos" assert ( result[3].test_str == result[4].test_str == result[5].test_str == result[5].test_str == "uno" ) assert result[1].test_int >= result[2].test_int assert ( result[3].test_int >= result[4].test_int >= result[5].test_int >= result[6].test_int ) async def test_find_many(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_many( DocumentTestModel.test_str == "uno" ).to_list() assert len(result) == 4 async def test_find_many_limit(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_many( {"test_str": "uno"}, limit=2 ).to_list() assert len(result) == 2 async def test_find_many_skip(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_many( {"test_str": "uno"}, skip=1 ).to_list() assert len(result) == 3 async def test_find_many_sort(documents): await documents(4, "uno", True) await documents(2, "dos", True) await documents(1, "cuatro", True) result = await DocumentTestModel.find_many( {"test_str": "uno"}, sort="test_int" ).to_list() assert ( result[0].test_int <= result[1].test_int <= result[2].test_int <= result[3].test_int ) result = await DocumentTestModel.find_many( {"test_str": "uno"}, sort=[("test_int", pymongo.DESCENDING)] ).to_list() assert ( result[0].test_int >= result[1].test_int >= result[2].test_int >= result[3].test_int ) async def test_find_many_not_found(documents): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_many({"test_str": "wrong"}).to_list() assert len(result) == 0 async def test_get_with_session(document, session): new_document = await DocumentTestModel.get(document.id, session=session) assert new_document == document async def test_find_one_with_session(documents, session): inserted_one = await documents(1, "kipasa") await documents(10, "smthe else") expected_doc_id = PydanticObjectId(inserted_one[0]) new_document = await DocumentTestModel.find_one( {"test_str": "kipasa"}, session=session ) assert new_document.id == expected_doc_id async def test_find_all_with_session(documents, session): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_all(session=session).to_list() assert len(result) == 7 async def test_find_many_with_session(documents, session): await documents(4, "uno") await documents(2, "dos") await documents(1, "cuatro") result = await DocumentTestModel.find_many( {"test_str": "uno"}, session=session ).to_list() assert len(result) == 4 python-beanie-1.29.0/tests/odm/documents/test_inheritance.py000066400000000000000000000101071473701376500242050ustar00rootroot00000000000000from beanie import Link from tests.odm.models import ( Bicycle, Bike, Bus, Car, Doc2NonRoot, DocNonRoot, Owner, Vehicle, ) class TestInheritance: async def test_inheritance(self, db): bicycle_1 = await Bicycle(color="white", frame=54, wheels=29).insert() bicycle_2 = await Bicycle(color="red", frame=52, wheels=28).insert() bike_1 = await Bike(color="black", fuel="gasoline").insert() car_1 = await Car(color="grey", body="sedan", fuel="gasoline").insert() car_2 = await Car( color="white", body="crossover", fuel="diesel" ).insert() bus_1 = await Bus( color="white", seats=80, body="bus", fuel="diesel" ).insert() bus_2 = await Bus( color="yellow", seats=26, body="minibus", fuel="diesel" ).insert() white_vehicles = await Vehicle.find( Vehicle.color == "white", with_children=True ).to_list() cars_only = await Car.find().to_list() cars_and_buses = await Car.find( Car.fuel == "diesel", with_children=True ).to_list() big_bicycles = await Bicycle.find(Bicycle.wheels > 28).to_list() await Bike.find().update({"$set": {Bike.color: "yellow"}}) sedan = await Car.find_one(Car.body == "sedan") sedan.color = "yellow" await sedan.save() # get using Vehicle should return Bike instance updated_bike = await Vehicle.get(bike_1.id, with_children=True) assert isinstance(sedan, Car) assert isinstance(updated_bike, Bike) assert updated_bike.color == "yellow" assert Bus._parent is Car assert len(big_bicycles) == 1 assert big_bicycles[0].wheels > 28 assert len(white_vehicles) == 3 assert len(cars_only) == 2 assert {Car, Bus} == set(i.__class__ for i in cars_and_buses) assert {Bicycle, Car, Bus} == set(i.__class__ for i in white_vehicles) white_vehicles_2 = await Car.find(Vehicle.color == "white").to_list() assert len(white_vehicles_2) == 1 for i in cars_and_buses: assert i.fuel == "diesel" for e in (bicycle_1, bicycle_2, bike_1, car_1, car_2, bus_1, bus_2): assert isinstance(e, Vehicle) await e.delete() async def test_links(self, db): car_1 = await Car(color="grey", body="sedan", fuel="gasoline").insert() car_2 = await Car( color="white", body="crossover", fuel="diesel" ).insert() bus_1 = await Bus( color="white", seats=80, body="bus", fuel="diesel" ).insert() owner = await Owner(name="John").insert() owner.vehicles = [car_1, car_2, bus_1] await owner.save() # re-fetch from DB w/o links owner = await Owner.get(owner.id) assert {Link} == set(i.__class__ for i in owner.vehicles) await owner.fetch_all_links() assert {Car, Bus} == set(i.__class__ for i in owner.vehicles) # re-fetch from DB with resolved links owner = await Owner.get(owner.id, fetch_links=True) assert {Car, Bus} == set(i.__class__ for i in owner.vehicles) for e in (owner, car_1, car_2, bus_1): await e.delete() def test_non_root_inheritance(self): assert DocNonRoot._class_id is None assert Doc2NonRoot._class_id is None assert DocNonRoot.get_collection_name() == "DocNonRoot" assert Doc2NonRoot.get_collection_name() == "Doc2NonRoot" def test_class_ids(self): assert Vehicle._class_id == "Vehicle" assert Vehicle.get_collection_name() == "Vehicle" assert Car._class_id == "Vehicle.Car" assert Car.get_collection_name() == "Vehicle" assert Bus._class_id == "Vehicle.Car.Bus" assert Bus.get_collection_name() == "Vehicle" assert Bike._class_id == "Vehicle.Bike" assert Bike.get_collection_name() == "Vehicle" assert Bicycle._class_id == "Vehicle.Bicycle" assert Bicycle.get_collection_name() == "Vehicle" assert Owner._class_id is None python-beanie-1.29.0/tests/odm/documents/test_init.py000066400000000000000000000256411473701376500226700ustar00rootroot00000000000000import pytest from motor.motor_asyncio import AsyncIOMotorCollection from pymongo import IndexModel from beanie import Document, Indexed, init_beanie from beanie.exceptions import CollectionWasNotInitialized from beanie.odm.utils.projection import get_projection from tests.odm.models import ( Color, DocumentTestModel, DocumentTestModelIndexFlagsAnnotated, DocumentTestModelStringImport, DocumentTestModelWithComplexIndex, DocumentTestModelWithCustomCollectionName, DocumentTestModelWithDroppedIndex, DocumentTestModelWithIndexFlags, DocumentTestModelWithIndexFlagsAliases, DocumentTestModelWithSimpleIndex, DocumentWithCustomInit, DocumentWithIndexMerging2, DocumentWithLink, DocumentWithListLink, DocumentWithUnionTypeExpressionOptionalBackLink, ) async def test_init_collection_was_not_initialized(): class NewDocument(Document): test_str: str with pytest.raises(CollectionWasNotInitialized): NewDocument(test_str="test") async def test_init_connection_string(settings): class NewDocumentCS(Document): test_str: str await init_beanie( connection_string=settings.mongodb_dsn, document_models=[NewDocumentCS] ) assert ( NewDocumentCS.get_motor_collection().database.name == settings.mongodb_dsn.split("/")[-1] ) async def test_init_wrong_params(settings, db): class NewDocumentCS(Document): test_str: str with pytest.raises(ValueError): await init_beanie( database=db, connection_string=settings.mongodb_dsn, document_models=[NewDocumentCS], ) with pytest.raises(ValueError): await init_beanie(document_models=[NewDocumentCS]) with pytest.raises(ValueError): await init_beanie(connection_string=settings.mongodb_dsn) async def test_collection_with_custom_name(): collection: AsyncIOMotorCollection = ( DocumentTestModelWithCustomCollectionName.get_motor_collection() ) assert collection.name == "custom" async def test_simple_index_creation(): collection: AsyncIOMotorCollection = ( DocumentTestModelWithSimpleIndex.get_motor_collection() ) index_info = await collection.index_information() assert index_info["test_int_1"] == {"key": [("test_int", 1)], "v": 2} assert index_info["test_str_text"]["key"] == [ ("_fts", "text"), ("_ftsx", 1), ] async def test_flagged_index_creation(): collection: AsyncIOMotorCollection = ( DocumentTestModelWithIndexFlags.get_motor_collection() ) index_info = await collection.index_information() assert index_info["test_int_1"] == { "key": [("test_int", 1)], "sparse": True, "v": 2, } assert index_info["test_str_-1"] == { "key": [("test_str", -1)], "unique": True, "v": 2, } async def test_flagged_index_creation_with_alias(): collection: AsyncIOMotorCollection = ( DocumentTestModelWithIndexFlagsAliases.get_motor_collection() ) index_info = await collection.index_information() assert index_info["testInt_1"] == { "key": [("testInt", 1)], "sparse": True, "v": 2, } assert index_info["testStr_-1"] == { "key": [("testStr", -1)], "unique": True, "v": 2, } async def test_annotated_index_creation(): collection: AsyncIOMotorCollection = ( DocumentTestModelIndexFlagsAnnotated.get_motor_collection() ) index_info = await collection.index_information() assert index_info["str_index_text"]["key"] == [ ("_fts", "text"), ("_ftsx", 1), ] assert index_info["str_index_annotated_1"] == { "key": [("str_index_annotated", 1)], "v": 2, } assert index_info["uuid_index_annotated_1"] == { "key": [("uuid_index_annotated", 1)], "unique": True, "v": 2, } if "uuid_index" in index_info: assert index_info["uuid_index"] == { "key": [("uuid_index", 1)], "unique": True, "v": 2, } async def test_complex_index_creation(): collection: AsyncIOMotorCollection = ( DocumentTestModelWithComplexIndex.get_motor_collection() ) index_info = await collection.index_information() assert index_info == { "_id_": {"key": [("_id", 1)], "v": 2}, "test_int_1": {"key": [("test_int", 1)], "v": 2}, "test_int_1_test_str_-1": { "key": [("test_int", 1), ("test_str", -1)], "v": 2, }, "test_string_index_DESCENDING": {"key": [("test_str", -1)], "v": 2}, } async def test_index_dropping_is_allowed(db): await init_beanie( database=db, document_models=[DocumentTestModelWithComplexIndex] ) collection: AsyncIOMotorCollection = ( DocumentTestModelWithComplexIndex.get_motor_collection() ) await init_beanie( database=db, document_models=[DocumentTestModelWithDroppedIndex], allow_index_dropping=True, ) collection: AsyncIOMotorCollection = ( DocumentTestModelWithComplexIndex.get_motor_collection() ) index_info = await collection.index_information() assert index_info == { "_id_": {"key": [("_id", 1)], "v": 2}, "test_int_1": {"key": [("test_int", 1)], "v": 2}, } async def test_index_dropping_is_not_allowed(db): await init_beanie( database=db, document_models=[DocumentTestModelWithComplexIndex] ) await init_beanie( database=db, document_models=[DocumentTestModelWithDroppedIndex], allow_index_dropping=False, ) collection: AsyncIOMotorCollection = ( DocumentTestModelWithComplexIndex.get_motor_collection() ) index_info = await collection.index_information() assert index_info == { "_id_": {"key": [("_id", 1)], "v": 2}, "test_int_1": {"key": [("test_int", 1)], "v": 2}, "test_int_1_test_str_-1": { "key": [("test_int", 1), ("test_str", -1)], "v": 2, }, "test_string_index_DESCENDING": {"key": [("test_str", -1)], "v": 2}, } async def test_index_dropping_is_not_allowed_as_default(db): await init_beanie( database=db, document_models=[DocumentTestModelWithComplexIndex] ) await init_beanie( database=db, document_models=[DocumentTestModelWithDroppedIndex], ) collection: AsyncIOMotorCollection = ( DocumentTestModelWithComplexIndex.get_motor_collection() ) index_info = await collection.index_information() assert index_info == { "_id_": {"key": [("_id", 1)], "v": 2}, "test_int_1": {"key": [("test_int", 1)], "v": 2}, "test_int_1_test_str_-1": { "key": [("test_int", 1), ("test_str", -1)], "v": 2, }, "test_string_index_DESCENDING": {"key": [("test_str", -1)], "v": 2}, } async def test_document_string_import(db): await init_beanie( database=db, document_models=[ "tests.odm.models.DocumentTestModelStringImport", ], ) document = DocumentTestModelStringImport(test_int=1) assert document.id is None await document.insert() assert document.id is not None with pytest.raises(ValueError): await init_beanie( database=db, document_models=[ "tests", ], ) with pytest.raises(AttributeError): await init_beanie( database=db, document_models=[ "tests.wrong", ], ) async def test_projection(): projection = get_projection(DocumentTestModel) assert projection == { "_id": 1, "test_int": 1, "test_list": 1, "test_str": 1, "test_doc": 1, "revision_id": 1, } async def test_index_recreation(db): class Sample1(Document): name: Indexed(str, unique=True) class Settings: name = "sample" class Sample2(Document): name: str status: str = "active" class Settings: indexes = [ IndexModel( "name", unique=True, partialFilterExpression={"is_active": {"$eq": "active"}}, ), ] name = "sample" await db.drop_collection("sample") await init_beanie( database=db, document_models=[Sample1], ) await init_beanie( database=db, document_models=[Sample2], allow_index_dropping=True ) await db.drop_collection("sample") async def test_merge_indexes(): assert ( await DocumentWithIndexMerging2.get_motor_collection().index_information() == { "_id_": {"key": [("_id", 1)], "v": 2}, "s0_1": {"key": [("s0", 1)], "v": 2}, "s1_1": {"key": [("s1", 1)], "v": 2}, "s2_-1": {"key": [("s2", -1)], "v": 2}, "s3_index": {"key": [("s3", -1)], "v": 2}, "s4_index": {"key": [("s4", 1)], "v": 2}, } ) async def test_custom_init(): assert DocumentWithCustomInit.s == "TEST2" async def test_index_on_custom_types(db): class Sample1(Document): name: Indexed(Color, unique=True) class Settings: name = "sample" await db.drop_collection("sample") await init_beanie( database=db, document_models=[Sample1], ) await db.drop_collection("sample") async def test_init_document_with_union_type_expression_optional_back_link(db): await init_beanie( database=db, document_models=[ DocumentWithUnionTypeExpressionOptionalBackLink, DocumentWithListLink, DocumentWithLink, ], ) assert ( DocumentWithUnionTypeExpressionOptionalBackLink.get_link_fields().keys() == { "back_link_list", "back_link", } ) async def test_init_document_can_inhert_and_extend_settings(db): class Sample1(Document): class Settings: name = "sample1" bson_encoders = {Color: lambda x: x.value} class Sample2(Sample1): class Settings(Sample1.Settings): name = "sample2" await init_beanie( database=db, document_models=[Sample2], ) assert Sample2.get_settings().bson_encoders != {} assert Sample2.get_settings().name == "sample2" async def test_init_beanie_with_skip_indexes(db): class NewDocument(Document): test_str: str class Settings: indexes = ["test_str"] await init_beanie( database=db, document_models=[NewDocument], skip_indexes=True, ) # To force collection creation await NewDocument(test_str="Roman Right").save() collection: AsyncIOMotorCollection = NewDocument.get_motor_collection() index_info = await collection.index_information() assert len(index_info) == 1 # Only the default _id index should be present python-beanie-1.29.0/tests/odm/documents/test_inspect.py000066400000000000000000000016541473701376500233700ustar00rootroot00000000000000from beanie.odm.models import InspectionStatuses from tests.odm.models import DocumentTestModel, DocumentTestModelFailInspection async def test_inspect_ok(documents): await documents(10, "smth") result = await DocumentTestModel.inspect_collection() assert result.status == InspectionStatuses.OK assert result.errors == [] async def test_inspect_fail(documents): await documents(10, "smth") result = await DocumentTestModelFailInspection.inspect_collection() assert result.status == InspectionStatuses.FAIL assert len(result.errors) == 10 assert ( "1 validation error for DocumentTestModelFailInspection" in result.errors[0].error ) async def test_inspect_ok_with_session(documents, session): await documents(10, "smth") result = await DocumentTestModel.inspect_collection(session=session) assert result.status == InspectionStatuses.OK assert result.errors == [] python-beanie-1.29.0/tests/odm/documents/test_multi_model.py000066400000000000000000000045061473701376500242340ustar00rootroot00000000000000from tests.odm.models import ( DocumentMultiModelOne, DocumentMultiModelTwo, DocumentUnion, ) class TestMultiModel: async def test_multi_model(self): doc_1 = await DocumentMultiModelOne().insert() doc_2 = await DocumentMultiModelTwo().insert() new_doc_1 = await DocumentMultiModelOne.get(doc_1.id) new_doc_2 = await DocumentMultiModelTwo.get(doc_2.id) assert new_doc_1 is not None assert new_doc_2 is not None new_doc_1 = await DocumentMultiModelTwo.get(doc_1.id) new_doc_2 = await DocumentMultiModelOne.get(doc_2.id) assert new_doc_1 is None assert new_doc_2 is None new_docs_1 = await DocumentMultiModelOne.find({}).to_list() new_docs_2 = await DocumentMultiModelTwo.find({}).to_list() assert len(new_docs_1) == 1 assert len(new_docs_2) == 1 await DocumentMultiModelOne.update_all({"$set": {"shared": 100}}) new_doc_1 = await DocumentMultiModelOne.get(doc_1.id) new_doc_2 = await DocumentMultiModelTwo.get(doc_2.id) assert new_doc_1.shared == 100 assert new_doc_2.shared == 0 async def test_union_doc(self): await DocumentMultiModelOne().insert() await DocumentMultiModelTwo().insert() await DocumentMultiModelOne().insert() await DocumentMultiModelTwo().insert() docs = await DocumentUnion.all().to_list() assert isinstance(docs[0], DocumentMultiModelOne) assert isinstance(docs[1], DocumentMultiModelTwo) assert isinstance(docs[2], DocumentMultiModelOne) assert isinstance(docs[3], DocumentMultiModelTwo) async def test_union_doc_aggregation(self): await DocumentMultiModelOne().insert() await DocumentMultiModelTwo().insert() await DocumentMultiModelOne().insert() await DocumentMultiModelTwo().insert() docs = await DocumentUnion.aggregate( [{"$match": {"$expr": {"$eq": ["$int_filed", 0]}}}] ).to_list() assert len(docs) == 2 async def test_union_doc_link(self): doc_1 = await DocumentMultiModelOne().insert() await DocumentMultiModelTwo(linked_doc=doc_1).insert() docs = await DocumentMultiModelTwo.find({}, fetch_links=True).to_list() assert isinstance(docs[0].linked_doc, DocumentMultiModelOne) python-beanie-1.29.0/tests/odm/documents/test_pydantic_config.py000066400000000000000000000003751473701376500250620ustar00rootroot00000000000000import pytest from pydantic import ValidationError from tests.odm.models import DocumentWithPydanticConfig def test_pydantic_config(): doc = DocumentWithPydanticConfig(num_1=2) with pytest.raises(ValidationError): doc.num_1 = "wrong" python-beanie-1.29.0/tests/odm/documents/test_pydantic_extras.py000066400000000000000000000014551473701376500251230ustar00rootroot00000000000000import pytest from tests.odm.models import ( DocumentWithExtras, DocumentWithExtrasKw, DocumentWithPydanticConfig, ) async def test_pydantic_extras(): doc = DocumentWithExtras(num_1=2) doc.extra_value = "foo" await doc.save() loaded_doc = await DocumentWithExtras.get(doc.id) assert loaded_doc.extra_value == "foo" @pytest.mark.skip(reason="setting extra to allow via class kwargs not working") async def test_pydantic_extras_kw(): doc = DocumentWithExtrasKw(num_1=2) doc.extra_value = "foo" await doc.save() loaded_doc = await DocumentWithExtras.get(doc.id) assert loaded_doc.extra_value == "foo" async def test_fail_with_no_extras(): doc = DocumentWithPydanticConfig(num_1=2) with pytest.raises(ValueError): doc.extra_value = "foo" python-beanie-1.29.0/tests/odm/documents/test_replace.py000066400000000000000000000021411473701376500233260ustar00rootroot00000000000000from tests.odm.models import Sample async def test_replace_one(preset_documents): count_1_before = await Sample.find_many(Sample.integer == 1).count() count_2_before = await Sample.find_many(Sample.integer == 2).count() a_2 = await Sample.find_one(Sample.integer == 2) await Sample.find_one(Sample.integer == 1).replace_one(a_2) count_1_after = await Sample.find_many(Sample.integer == 1).count() count_2_after = await Sample.find_many(Sample.integer == 2).count() assert count_1_after == count_1_before - 1 assert count_2_after == count_2_before + 1 async def test_replace_self(preset_documents): count_1_before = await Sample.find_many(Sample.integer == 1).count() count_2_before = await Sample.find_many(Sample.integer == 2).count() a_1 = await Sample.find_one(Sample.integer == 1) a_1.integer = 2 await a_1.replace() count_1_after = await Sample.find_many(Sample.integer == 1).count() count_2_after = await Sample.find_many(Sample.integer == 2).count() assert count_1_after == count_1_before - 1 assert count_2_after == count_2_before + 1 python-beanie-1.29.0/tests/odm/documents/test_revision.py000066400000000000000000000105041473701376500235530ustar00rootroot00000000000000import pytest from pymongo.errors import BulkWriteError from beanie import BulkWriter from beanie.exceptions import RevisionIdWasChanged from beanie.odm.operators.update.general import Inc from tests.odm.models import ( DocumentWithRevisionTurnedOn, LockWithRevision, WindowWithRevision, ) async def test_replace(): doc = DocumentWithRevisionTurnedOn(num_1=1, num_2=2) await doc.insert() doc.num_1 = 2 await doc.replace() doc.num_2 = 3 await doc.replace() for i in range(5): found_doc = await DocumentWithRevisionTurnedOn.get(doc.id) found_doc.num_1 += 1 await found_doc.replace() doc.revision_id = "wrong" doc.num_1 = 4 with pytest.raises(RevisionIdWasChanged): await doc.replace() await doc.replace(ignore_revision=True) await doc.replace() async def test_update(): doc = DocumentWithRevisionTurnedOn(num_1=1, num_2=2) await doc.insert() await doc.update(Inc({DocumentWithRevisionTurnedOn.num_1: 1})) await doc.update(Inc({DocumentWithRevisionTurnedOn.num_1: 1})) for i in range(5): found_doc = await DocumentWithRevisionTurnedOn.get(doc.id) await found_doc.update(Inc({DocumentWithRevisionTurnedOn.num_1: 1})) doc.revision_id = "wrong" with pytest.raises(RevisionIdWasChanged): await doc.update(Inc({DocumentWithRevisionTurnedOn.num_1: 1})) await doc.update( Inc({DocumentWithRevisionTurnedOn.num_1: 1}), ignore_revision=True ) await doc.update(Inc({DocumentWithRevisionTurnedOn.num_1: 1})) async def test_save_changes(): doc = DocumentWithRevisionTurnedOn(num_1=1, num_2=2) await doc.insert() doc.num_1 = 2 await doc.save_changes() doc.num_2 = 3 await doc.save_changes() for i in range(5): found_doc = await DocumentWithRevisionTurnedOn.get(doc.id) found_doc.num_1 += 1 await found_doc.save_changes() doc.revision_id = "wrong" doc.num_1 = 4 with pytest.raises(RevisionIdWasChanged): await doc.save_changes() await doc.save_changes(ignore_revision=True) await doc.save_changes() async def test_save(): doc = DocumentWithRevisionTurnedOn(num_1=1, num_2=2) doc.num_1 = 2 await doc.save() doc.num_2 = 3 await doc.save() for i in range(5): found_doc = await DocumentWithRevisionTurnedOn.get(doc.id) found_doc.num_1 += 1 await found_doc.save() doc.revision_id = "wrong" doc.num_1 = 4 with pytest.raises(RevisionIdWasChanged): await doc.save() await doc.save(ignore_revision=True) await doc.save() async def test_update_bulk_writer(): doc = DocumentWithRevisionTurnedOn(num_1=1, num_2=2) await doc.save() doc.num_1 = 2 async with BulkWriter() as bulk_writer: await doc.save(bulk_writer=bulk_writer) doc = await DocumentWithRevisionTurnedOn.get(doc.id) doc.num_2 = 3 async with BulkWriter() as bulk_writer: await doc.save(bulk_writer=bulk_writer) doc = await DocumentWithRevisionTurnedOn.get(doc.id) for i in range(5): found_doc = await DocumentWithRevisionTurnedOn.get(doc.id) found_doc.num_1 += 1 async with BulkWriter() as bulk_writer: await found_doc.save(bulk_writer=bulk_writer) doc.revision_id = "wrong" doc.num_1 = 4 with pytest.raises(BulkWriteError): async with BulkWriter() as bulk_writer: await doc.save(bulk_writer=bulk_writer) async with BulkWriter() as bulk_writer: await doc.save(bulk_writer=bulk_writer, ignore_revision=True) async def test_empty_update(): doc = DocumentWithRevisionTurnedOn(num_1=1, num_2=2) await doc.insert() # This fails with RevisionIdWasChanged await doc.update({"$set": {"num_1": 1}}) async def test_save_changes_when_there_were_no_changes(): doc = DocumentWithRevisionTurnedOn(num_1=1, num_2=2) await doc.insert() revision = doc.revision_id await doc.save_changes() assert doc.revision_id == revision await DocumentWithRevisionTurnedOn.get(doc.id) assert doc.revision_id == revision async def test_revision_id_for_link(): lock = LockWithRevision(k=1) await lock.insert() lock_rev_id = lock.revision_id window = WindowWithRevision(x=0, y=0, lock=lock) await window.insert() assert lock.revision_id == lock_rev_id python-beanie-1.29.0/tests/odm/documents/test_soft_delete.py000066400000000000000000000064501473701376500242170ustar00rootroot00000000000000from tests.odm.models import DocumentTestModelWithSoftDelete async def test_get_item(document_soft_delete_not_inserted): # insert a document with soft delete result = await document_soft_delete_not_inserted.insert() # get from db by id document = await DocumentTestModelWithSoftDelete.get(document_id=result.id) assert document.is_deleted() is False assert document.deleted_at is None assert document.test_int == result.test_int assert document.test_str == result.test_str # # delete the document await document.delete() assert document.is_deleted() is True # check if document exist with `.get()` document = await DocumentTestModelWithSoftDelete.get(document_id=result.id) assert document is None # check document exist in trashed results = ( await DocumentTestModelWithSoftDelete.find_many_in_all().to_list() ) assert len(results) == 1 async def test_find_one(document_soft_delete_not_inserted): result = await document_soft_delete_not_inserted.insert() # # delete the document await result.delete() # check if document exist with `.find_one()` document = await DocumentTestModelWithSoftDelete.find_one( DocumentTestModelWithSoftDelete.id == result.id ) assert document is None async def test_find(documents_soft_delete_not_inserted): # insert 3 documents inserted_docs = [] for doc in documents_soft_delete_not_inserted: result = await doc.insert() inserted_docs.append(result) # use `.find_many()` to get them all results = await DocumentTestModelWithSoftDelete.find().to_list() assert len(results) == 3 # delete one of them await inserted_docs[0].delete() # check items in with `.find_many()` results = await DocumentTestModelWithSoftDelete.find_many().to_list() assert len(results) == 2 founded_documents_id = [doc.id for doc in results] assert inserted_docs[0].id not in founded_documents_id # check in trashed items results = ( await DocumentTestModelWithSoftDelete.find_many_in_all().to_list() ) assert len(results) == 3 async def test_find_many(documents_soft_delete_not_inserted): # insert 2 documents item_1 = await documents_soft_delete_not_inserted[0].insert() item_2 = await documents_soft_delete_not_inserted[1].insert() # use `.find_many()` to get them all results = await DocumentTestModelWithSoftDelete.find_many().to_list() assert len(results) == 2 # delete one of them await item_1.delete() # check items in with `.find_many()` results = await DocumentTestModelWithSoftDelete.find_many().to_list() assert len(results) == 1 assert results[0].id == item_2.id # check in trashed items results = ( await DocumentTestModelWithSoftDelete.find_many_in_all().to_list() ) assert len(results) == 2 async def test_hard_delete(document_soft_delete_not_inserted): result = await document_soft_delete_not_inserted.insert() await result.hard_delete() # check items in with `.find_many()` results = await DocumentTestModelWithSoftDelete.find_many().to_list() assert len(results) == 0 # check in trashed results = ( await DocumentTestModelWithSoftDelete.find_many_in_all().to_list() ) assert len(results) == 0 python-beanie-1.29.0/tests/odm/documents/test_sync.py000066400000000000000000000026731473701376500227010ustar00rootroot00000000000000import pytest from beanie.exceptions import ApplyChangesException from beanie.odm.documents import MergeStrategy from tests.odm.models import DocumentToTestSync class TestSync: async def test_merge_remote(self): doc = DocumentToTestSync() await doc.insert() doc2 = await DocumentToTestSync.get(doc.id) doc2.s = "foo" doc.i = 100 await doc.save() await doc2.sync() assert doc2.s == "TEST" assert doc2.i == 100 async def test_merge_local(self): doc = DocumentToTestSync(d={"option_1": {"s": "foo"}}) await doc.insert() doc2 = await DocumentToTestSync.get(doc.id) doc2.s = "foo" doc2.n.option_1.s = "bar" doc2.d["option_1"]["s"] = "bar" doc.i = 100 await doc.save() await doc2.sync(merge_strategy=MergeStrategy.local) assert doc2.s == "foo" assert doc2.n.option_1.s == "bar" assert doc2.d["option_1"]["s"] == "bar" assert doc2.i == 100 async def test_merge_local_impossible_apply_changes(self): doc = DocumentToTestSync(d={"option_1": {"s": "foo"}}) await doc.insert() doc2 = await DocumentToTestSync.get(doc.id) doc2.d["option_1"]["s"] = {"foo": "bar"} doc.d = {"option_1": "nothing"} await doc.save() with pytest.raises(ApplyChangesException): await doc2.sync(merge_strategy=MergeStrategy.local) python-beanie-1.29.0/tests/odm/documents/test_update.py000066400000000000000000000235051473701376500232040ustar00rootroot00000000000000import pytest from beanie.exceptions import ( DocumentNotFound, ReplaceError, ) from beanie.odm.fields import PydanticObjectId from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import ( DocumentTestModel, DocumentWithKeepNullsFalse, DocumentWithList, ModelWithOptionalField, Sample, ) # REPLACE # # async def test_replace_one(document): # new_doc = DocumentTestModel( # test_int=0, test_str="REPLACED_VALUE", test_list=[] # ) # await DocumentTestModel.replace_one({"_id": document.id}, new_doc) # new_document = await DocumentTestModel.get(document.id) # assert new_document.test_str == "REPLACED_VALUE" async def test_replace_many(documents): await documents(10, "foo") created_documents = await DocumentTestModel.find_many( {"test_str": "foo"} ).to_list() to_replace = [] for document in created_documents[:5]: document.test_str = "REPLACED_VALUE" to_replace.append(document) await DocumentTestModel.replace_many(to_replace) replaced_documetns = await DocumentTestModel.find_many( {"test_str": "REPLACED_VALUE"} ).to_list() assert len(replaced_documetns) == 5 async def test_replace_many_not_all_the_docs_found(documents): await documents(10, "foo") created_documents = await DocumentTestModel.find_many( {"test_str": "foo"} ).to_list() to_replace = [] created_documents[0].id = PydanticObjectId() for document in created_documents[:5]: document.test_str = "REPLACED_VALUE" to_replace.append(document) with pytest.raises(ReplaceError): await DocumentTestModel.replace_many(to_replace) async def test_replace(document): update_data = {"test_str": "REPLACED_VALUE"} if IS_PYDANTIC_V2: new_doc = document.model_copy(update=update_data) else: new_doc = document.copy(update=update_data) # pydantic v1 doesn't copy excluded fields new_doc.test_list = document.test_list # document.test_str = "REPLACED_VALUE" await new_doc.replace() new_document = await DocumentTestModel.get(document.id) assert new_document.test_str == "REPLACED_VALUE" async def test_replace_not_saved(document_not_inserted): with pytest.raises(ValueError): await document_not_inserted.replace() async def test_replace_not_found(document_not_inserted): document_not_inserted.id = PydanticObjectId() with pytest.raises(DocumentNotFound): await document_not_inserted.replace() # SAVE async def test_save(document): update_data = {"test_str": "REPLACED_VALUE"} if IS_PYDANTIC_V2: new_doc = document.model_copy(update=update_data) else: new_doc = document.copy(update=update_data) # document.test_str = "REPLACED_VALUE" await new_doc.save() new_document = await DocumentTestModel.get(document.id) assert new_document.test_str == "REPLACED_VALUE" async def test_save_not_saved(document_not_inserted): await document_not_inserted.save() assert ( hasattr(document_not_inserted, "id") and document_not_inserted.id is not None ) from_db = await DocumentTestModel.get(document_not_inserted.id) assert from_db == document_not_inserted async def test_save_not_found(document_not_inserted): document_not_inserted.id = PydanticObjectId() await document_not_inserted.save() assert ( hasattr(document_not_inserted, "id") and document_not_inserted.id is not None ) from_db = await DocumentTestModel.get(document_not_inserted.id) assert from_db == document_not_inserted # UPDATE async def test_update_one(document): await DocumentTestModel.find_one( {"_id": document.id, "test_list.test_str": "foo"} ).update({"$set": {"test_list.$.test_str": "foo_foo"}}) new_document = await DocumentTestModel.get(document.id) assert new_document.test_list[0].test_str == "foo_foo" async def test_update_many(documents): await documents(10, "foo") await documents(7, "bar") await DocumentTestModel.find_many({"test_str": "foo"}).update( {"$set": {"test_str": "bar"}} ) bar_documetns = await DocumentTestModel.find_many( {"test_str": "bar"} ).to_list() assert len(bar_documetns) == 17 foo_documetns = await DocumentTestModel.find_many( {"test_str": "foo"} ).to_list() assert len(foo_documetns) == 0 async def test_update_all(documents): await documents(10, "foo") await documents(7, "bar") await DocumentTestModel.update_all( {"$set": {"test_str": "smth_else"}}, ) bar_documetns = await DocumentTestModel.find_many( {"test_str": "bar"} ).to_list() assert len(bar_documetns) == 0 foo_documetns = await DocumentTestModel.find_many( {"test_str": "foo"} ).to_list() assert len(foo_documetns) == 0 smth_else_documetns = await DocumentTestModel.find_many( {"test_str": "smth_else"} ).to_list() assert len(smth_else_documetns) == 17 async def test_save_keep_nulls_false(): model = ModelWithOptionalField(i=10, s="TEST_MODEL") doc = DocumentWithKeepNullsFalse(m=model, o="TEST_DOCUMENT") await doc.insert() doc.o = None doc.m.s = None await doc.save() from_db = await DocumentWithKeepNullsFalse.get(doc.id) assert from_db.o is None assert from_db.m.s is None raw_data = ( await DocumentWithKeepNullsFalse.get_motor_collection().find_one( {"_id": doc.id} ) ) assert raw_data == {"_id": doc.id, "m": {"i": 10}} async def test_save_changes_keep_nulls_false(): model = ModelWithOptionalField(i=10, s="TEST_MODEL") doc = DocumentWithKeepNullsFalse(m=model, o="TEST_DOCUMENT") await doc.insert() doc.o = None doc.m.s = None await doc.save_changes() from_db = await DocumentWithKeepNullsFalse.get(doc.id) assert from_db.o is None assert from_db.m.s is None raw_data = ( await DocumentWithKeepNullsFalse.get_motor_collection().find_one( {"_id": doc.id} ) ) assert raw_data == {"_id": doc.id, "m": {"i": 10}} # WITH SESSION # async def test_update_with_session(document: DocumentTestModel, session): # buf_len = len(document.test_list) # to_insert = SubDocument(test_str="test") # await document.update( # update_query={"$push": {"test_list": to_insert.dict()}}, # session=session, # ) # new_document = await DocumentTestModel.get(document.id, session=session) # assert len(new_document.test_list) == buf_len + 1 # # # async def test_replace_one_with_session(document, session): # new_doc = DocumentTestModel( # test_int=0, test_str="REPLACED_VALUE", test_list=[] # ) # await DocumentTestModel.replace_one( # {"_id": document.id}, new_doc, session=session # ) # new_document = await DocumentTestModel.get(document.id, session=session) # assert new_document.test_str == "REPLACED_VALUE" # # # async def test_replace_with_session(document, session): # update_data = {"test_str": "REPLACED_VALUE"} # new_doc: DocumentTestModel = document.copy(update=update_data) # # document.test_str = "REPLACED_VALUE" # await new_doc.replace(session=session) # new_document = await DocumentTestModel.get(document.id, session=session) # assert new_document.test_str == "REPLACED_VALUE" # # # async def test_update_one_with_session(document, session): # await DocumentTestModel.update_one( # update_query={"$set": {"test_list.$.test_str": "foo_foo"}}, # filter_query={"_id": document.id, "test_list.test_str": "foo"}, # session=session, # ) # new_document = await DocumentTestModel.get(document.id, session=session) # assert new_document.test_list[0].test_str == "foo_foo" # # # async def test_update_many_with_session(documents, session): # await documents(10, "foo") # await documents(7, "bar") # await DocumentTestModel.update_many( # update_query={"$set": {"test_str": "bar"}}, # filter_query={"test_str": "foo"}, # session=session, # ) # bar_documetns = await DocumentTestModel.find_many( # {"test_str": "bar"}, session=session # ).to_list() # assert len(bar_documetns) == 17 # foo_documetns = await DocumentTestModel.find_many( # {"test_str": "foo"}, session=session # ).to_list() # assert len(foo_documetns) == 0 # # # async def test_update_all_with_session(documents, session): # await documents(10, "foo") # await documents(7, "bar") # await DocumentTestModel.update_all( # update_query={"$set": {"test_str": "smth_else"}}, session=session # ) # bar_documetns = await DocumentTestModel.find_many( # {"test_str": "bar"}, session=session # ).to_list() # assert len(bar_documetns) == 0 # foo_documetns = await DocumentTestModel.find_many( # {"test_str": "foo"}, session=session # ).to_list() # assert len(foo_documetns) == 0 # smth_else_documetns = await DocumentTestModel.find_many( # {"test_str": "smth_else"}, session=session # ).to_list() # assert len(smth_else_documetns) == 17 async def test_update_list(): test_record = DocumentWithList(list_values=["1", "2", "3"]) test_record = await test_record.insert() if IS_PYDANTIC_V2: update_data = test_record.model_dump() else: update_data = test_record.dict() update_data["list_values"] = ["5", "6", "7"] updated_test_record = await test_record.update({"$set": update_data}) assert updated_test_record.list_values == update_data["list_values"] async def test_update_using_pipeline(preset_documents): await Sample.all().update( [{"$set": {"integer": 10000}}, {"$set": {"string": "TEST3"}}] ) all_docs = await Sample.find_many({}).to_list() for doc in all_docs: assert doc.integer == 10000 assert doc.string == "TEST3" python-beanie-1.29.0/tests/odm/documents/test_validation_on_save.py000066400000000000000000000044531473701376500255670ustar00rootroot00000000000000from typing import Optional import pytest from bson import ObjectId from pydantic import BaseModel, ValidationError from beanie import PydanticObjectId from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import ( DocumentWithValidationOnSave, Lock, WindowWithValidationOnSave, ) async def test_validate_on_insert(): doc = DocumentWithValidationOnSave(num_1=1, num_2=2) doc.num_1 = "wrong_value" with pytest.raises(ValidationError): await doc.insert() async def test_validate_on_replace(): doc = DocumentWithValidationOnSave(num_1=1, num_2=2) await doc.insert() doc.num_1 = "wrong_value" with pytest.raises(ValidationError): await doc.replace() async def test_validate_on_save_changes(): doc = DocumentWithValidationOnSave(num_1=1, num_2=2) await doc.insert() doc.num_1 = "wrong_value" with pytest.raises(ValidationError): await doc.save_changes() async def test_validate_on_save_keep_the_id_type(): class UpdateModel(BaseModel): num_1: Optional[int] = None related: Optional[PydanticObjectId] = None doc = DocumentWithValidationOnSave(num_1=1, num_2=2) await doc.insert() update = UpdateModel(related=PydanticObjectId()) if IS_PYDANTIC_V2: doc = doc.model_copy(update=update.model_dump(exclude_unset=True)) else: doc = doc.copy(update=update.dict(exclude_unset=True)) doc.num_2 = 1000 await doc.save() in_db = await DocumentWithValidationOnSave.get_motor_collection().find_one( {"_id": doc.id} ) assert isinstance(in_db["related"], ObjectId) new_doc = await DocumentWithValidationOnSave.get(doc.id) assert isinstance(new_doc.related, PydanticObjectId) async def test_validate_on_save_action(): doc = DocumentWithValidationOnSave(num_1=1, num_2=2) await doc.insert() assert doc.num_2 == 3 async def test_validate_on_save_skip_action(): doc = DocumentWithValidationOnSave(num_1=1, num_2=2) await doc.insert(skip_actions=["num_2_plus_1"]) assert doc.num_2 == 2 async def test_validate_on_save_dbref(): lock = Lock(k=1) await lock.insert() window = WindowWithValidationOnSave( x=1, y=1, lock=lock.to_ref(), # this is what exactly we want to test ) await window.insert() python-beanie-1.29.0/tests/odm/models.py000066400000000000000000000610361473701376500201460ustar00rootroot00000000000000import datetime import sys from enum import Enum from ipaddress import ( IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network, ) from pathlib import Path from typing import ( Any, Callable, ClassVar, Dict, List, Optional, Set, Tuple, Type, Union, ) from uuid import UUID, uuid4 import pymongo from bson import Regex from pydantic import ( UUID4, BaseModel, ConfigDict, EmailStr, Field, HttpUrl, PrivateAttr, SecretBytes, SecretStr, ) from pydantic_core import core_schema from pymongo import IndexModel from typing_extensions import Annotated from beanie import ( DecimalAnnotation, Document, DocumentWithSoftDelete, Indexed, Insert, Replace, Save, Update, ValidateOnSave, ) from beanie.odm.actions import Delete, after_event, before_event from beanie.odm.custom_types import re from beanie.odm.custom_types.bson.binary import BsonBinary from beanie.odm.fields import BackLink, Link, PydanticObjectId from beanie.odm.settings.timeseries import TimeSeriesConfig from beanie.odm.union_doc import UnionDoc from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 if IS_PYDANTIC_V2: from pydantic import RootModel, validate_call if sys.version_info >= (3, 10): def type_union(A, B): return A | B else: def type_union(A, B): return Union[A, B] class Color: def __init__(self, value): self.value = value def as_rgb(self): return self.value def as_hex(self): return self.value @classmethod def _validate(cls, value: Any) -> "Color": if isinstance(value, Color): return value if isinstance(value, dict): return Color(value["value"]) return Color(value) if IS_PYDANTIC_V2: @classmethod def __get_pydantic_core_schema__( cls, _source_type: Type[Any], _handler: Callable[[Any], core_schema.CoreSchema], ) -> core_schema.CoreSchema: return core_schema.json_or_python_schema( json_schema=core_schema.str_schema(), python_schema=core_schema.no_info_plain_validator_function( cls._validate ), ) else: @classmethod def __get_validators__(cls): yield cls._validate class Extra(str, Enum): allow = "allow" class Option2(BaseModel): f: float class Option1(BaseModel): s: str class Nested(BaseModel): integer: int option_1: Option1 union: Union[Option1, Option2] optional: Optional[Option2] = None class GeoObject(BaseModel): type: str = "Point" coordinates: Tuple[float, float] class Sample(Document): timestamp: datetime.datetime increment: Indexed(int) integer: Indexed(int) float_num: float string: str nested: Nested optional: Optional[Option2] = None union: Union[Option1, Option2] geo: GeoObject const: str = "TEST" class DocumentTestModelWithSoftDelete(DocumentWithSoftDelete): test_int: int test_str: str class SubDocument(BaseModel): test_str: str test_int: int = 42 class DocumentTestModel(Document): test_int: int test_doc: SubDocument test_str: str test_list: List[SubDocument] = Field(exclude=True) class Settings: use_cache = True cache_expiration_time = datetime.timedelta(seconds=10) cache_capacity = 5 use_state_management = True class DocumentTestModelWithLink(Document): test_link: Link[DocumentTestModel] class Settings: use_cache = True cache_expiration_time = datetime.timedelta(seconds=10) cache_capacity = 5 use_state_management = True class DocumentTestModelWithCustomCollectionName(Document): test_int: int test_list: List[SubDocument] test_str: str class Settings: name = "custom" class_id = "different_class_id" class DocumentTestModelWithSimpleIndex(Document): test_int: Indexed(int) test_list: List[SubDocument] test_str: Indexed(str, index_type=pymongo.TEXT) class DocumentTestModelWithIndexFlags(Document): test_int: Indexed(int, sparse=True) test_str: Indexed(str, index_type=pymongo.DESCENDING, unique=True) class DocumentTestModelWithIndexFlagsAliases(Document): test_int: Indexed(int, sparse=True) = Field(alias="testInt") test_str: Indexed(str, index_type=pymongo.DESCENDING, unique=True) = Field( alias="testStr" ) class DocumentTestModelIndexFlagsAnnotated(Document): str_index: Indexed(str, index_type=pymongo.TEXT) str_index_annotated: Indexed(str, index_type=pymongo.ASCENDING) uuid_index_annotated: Annotated[UUID4, Indexed(unique=True)] if not IS_PYDANTIC_V2: # The UUID4 type raises a ValueError with the current # implementation of Indexed when using Pydantic v2. uuid_index: Indexed(UUID4, unique=True) class DocumentTestModelWithComplexIndex(Document): test_int: int test_list: List[SubDocument] test_str: str class Settings: name = "docs_with_index" indexes = [ "test_int", [ ("test_int", pymongo.ASCENDING), ("test_str", pymongo.DESCENDING), ], IndexModel( [("test_str", pymongo.DESCENDING)], name="test_string_index_DESCENDING", ), ] class DocumentTestModelWithDroppedIndex(Document): test_int: int test_list: List[SubDocument] test_str: str class Settings: name = "docs_with_index" indexes = [ "test_int", ] class DocumentTestModelStringImport(Document): test_int: int class DocumentTestModelFailInspection(Document): test_int_2: int class Settings: name = "DocumentTestModel" class DocumentWithDeprecatedHiddenField(Document): if IS_PYDANTIC_V2: test_hidden: List[str] = Field(json_schema_extra={"hidden": True}) else: test_hidden: List[str] = Field(hidden=True) class DocumentWithCustomIdUUID(Document): id: UUID = Field(default_factory=uuid4) name: str class DocumentWithCustomIdInt(Document): id: int name: str class DocumentWithCustomFiledsTypes(Document): color: Color decimal: DecimalAnnotation secret_bytes: SecretBytes secret_string: SecretStr ipv4address: IPv4Address ipv4interface: IPv4Interface ipv4network: IPv4Network ipv6address: IPv6Address ipv6interface: IPv6Interface ipv6network: IPv6Network timedelta: datetime.timedelta set_type: Set[str] tuple_type: Tuple[int, str] path: Path class Settings: bson_encoders = {Color: vars} if IS_PYDANTIC_V2: model_config = ConfigDict( arbitrary_types_allowed=True, ) else: class Config: arbitrary_types_allowed = True class DocumentWithBsonEncodersFiledsTypes(Document): color: Color timestamp: datetime.datetime class Settings: bson_encoders = { Color: lambda c: c.as_rgb(), datetime.datetime: lambda o: o.isoformat(timespec="microseconds"), } if IS_PYDANTIC_V2: model_config = ConfigDict( arbitrary_types_allowed=True, ) else: class Config: arbitrary_types_allowed = True class DocumentWithActions(Document): name: str num_1: int = 0 num_2: int = 10 num_3: int = 100 _private_num: int = PrivateAttr(default=100) class Inner: inner_num_1 = 0 inner_num_2 = 0 @before_event(Insert) def capitalize_name(self): self.name = self.name.capitalize() @before_event([Insert, Replace, Save]) async def add_one(self): self.num_1 += 1 @after_event(Insert) def num_2_change(self): self.num_2 -= 1 @after_event(Replace) def num_3_change(self): self.num_3 -= 1 @before_event(Delete) def inner_num_to_one(self): self.Inner.inner_num_1 = 1 @after_event(Delete) def inner_num_to_two(self): self.Inner.inner_num_2 = 2 @before_event(Update) def inner_num_to_one_2(self): self._private_num += 1 @after_event(Update) def inner_num_to_two_2(self): self.num_2 -= 1 class DocumentWithActions2(Document): name: str num_1: int = 0 num_2: int = 10 num_3: int = 100 _private_num: int = PrivateAttr(default=100) class Inner: inner_num_1 = 0 inner_num_2 = 0 @before_event(Insert) def capitalize_name(self): self.name = self.name.capitalize() @before_event(Insert, Replace, Save) async def add_one(self): self.num_1 += 1 @after_event(Insert) def num_2_change(self): self.num_2 -= 1 @after_event(Replace) def num_3_change(self): self.num_3 -= 1 @before_event(Delete) def inner_num_to_one(self): self.Inner.inner_num_1 = 1 @after_event(Delete) def inner_num_to_two(self): self.Inner.inner_num_2 = 2 @before_event(Update) def inner_num_to_one_2(self): self._private_num += 1 @after_event(Update) def inner_num_to_two_2(self): self.num_2 -= 1 class InheritedDocumentWithActions(DocumentWithActions): ... class InternalDoc(BaseModel): _private_field: str = PrivateAttr(default="TEST_PRIVATE") num: int = 100 string: str = "test" lst: List[int] = [1, 2, 3, 4, 5] def change_private(self): self._private_field = "PRIVATE_CHANGED" def get_private(self): return self._private_field class DocumentWithTurnedOnStateManagement(Document): num_1: int num_2: int internal: InternalDoc class Settings: use_state_management = True class DocumentWithTurnedOnStateManagementWithCustomId(Document): id: int num_1: int num_2: int class Settings: use_state_management = True class DocumentWithTurnedOnReplaceObjects(Document): num_1: int num_2: int internal: InternalDoc class Settings: use_state_management = True state_management_replace_objects = True class DocumentWithTurnedOnSavePrevious(Document): num_1: int num_2: int internal: InternalDoc class Settings: use_state_management = True state_management_save_previous = True class DocumentWithTurnedOffStateManagement(Document): num_1: int num_2: int class DocumentWithValidationOnSave(Document): num_1: int num_2: int related: PydanticObjectId = Field(default_factory=PydanticObjectId) @after_event(ValidateOnSave) def num_2_plus_1(self): self.num_2 += 1 class Settings: validate_on_save = True use_state_management = True class DocumentWithRevisionTurnedOn(Document): num_1: int num_2: int class Settings: use_revision = True use_state_management = True class DocumentWithPydanticConfig(Document): if IS_PYDANTIC_V2: model_config = ConfigDict(validate_assignment=True) else: class Config: validate_assignment = True num_1: int class DocumentWithExtras(Document): if IS_PYDANTIC_V2: model_config = ConfigDict(extra="allow") else: class Config: extra = "allow" num_1: int class DocumentWithExtrasKw(Document, extra="allow"): num_1: int class Yard(Document): v: int w: int class Lock(Document): k: int class Window(Document): x: int y: int lock: Optional[Link[Lock]] = None class WindowWithValidationOnSave(Document): x: int y: int lock: Optional[Link[Lock]] = None class Settings: validate_on_save = True class Door(Document): t: int = 10 window: Optional[Link[Window]] = None locks: Optional[List[Link[Lock]]] = None class Roof(Document): r: int = 100 class House(Document): windows: List[Link[Window]] door: Link[Door] roof: Optional[Link[Roof]] = None yards: Optional[List[Link[Yard]]] = None height: Indexed(int) = 2 name: Indexed(str) = Field(exclude=True) if IS_PYDANTIC_V2: model_config = ConfigDict( extra="allow", ) else: class Config: extra = Extra.allow class DocumentForEncodingTest(Document): bytes_field: Optional[bytes] = None datetime_field: Optional[datetime.datetime] = None class DocumentWithTimeseries(Document): ts: datetime.datetime = Field(default_factory=datetime.datetime.now) class Settings: timeseries = TimeSeriesConfig(time_field="ts", expire_after_seconds=2) class DocumentWithStringField(Document): string_field: str class DocumentForEncodingTestDate(Document): date_field: datetime.date = Field(default_factory=datetime.date.today) class DocumentUnion(UnionDoc): class Settings: name = "multi_model" class_id = "123" class DocumentMultiModelOne(Document): int_filed: int = 0 shared: int = 0 class Settings: union_doc = DocumentUnion name = "multi_one" class_id = "123" class DocumentMultiModelTwo(Document): str_filed: str = "test" shared: int = 0 linked_doc: Optional[Link[DocumentMultiModelOne]] = None class Settings: union_doc = DocumentUnion name = "multi_two" class_id = "123" class YardWithRevision(Document): v: int w: int class Settings: use_revision = True use_state_management = True class LockWithRevision(Document): k: int class Settings: use_revision = True use_state_management = True class WindowWithRevision(Document): x: int y: int lock: Link[LockWithRevision] class Settings: use_revision = True use_state_management = True class HouseWithRevision(Document): windows: List[Link[WindowWithRevision]] class Settings: use_revision = True use_state_management = True # classes for inheritance test class Vehicle(Document): """Root parent for testing flat inheritance""" # Vehicle # / | \ # / | \ # Bicycle Bike Car # \ # \ # Bus color: str @after_event(Insert) def on_object_create(self): # this event will be triggered for all children too (self will have corresponding type) ... class Settings: is_root = True class Bicycle(Vehicle): frame: int wheels: int class Fuelled(BaseModel): """Just a mixin""" fuel: Optional[str] = None class Car(Vehicle, Fuelled): body: str class Bike(Vehicle, Fuelled): ... class Bus(Car, Fuelled): seats: int class Owner(Document): name: str vehicles: List[Link[Vehicle]] = [] class MixinNonRoot(BaseModel): id: int = Field(..., ge=1, le=254) class MyDocNonRoot(Document): class Settings: use_state_management = True class DocNonRoot(MixinNonRoot, MyDocNonRoot): name: str class Doc2NonRoot(MyDocNonRoot): name: str class Child(BaseModel): child_field: str class SampleWithMutableObjects(Document): d: Dict[str, Child] lst: List[Child] class SampleLazyParsing(Document): i: int s: str lst: List[int] = Field( [], ) if IS_PYDANTIC_V2: model_config = ConfigDict( validate_assignment=True, ) else: class Config: validate_assignment = True class Settings: lazy_parsing = True use_state_management = True class RootDocument(Document): name: str link_root: Link[Document] class ADocument(RootDocument): surname: str link_a: Link[Document] class Settings: name = "B" class BDocument(RootDocument): email: str link_b: Link[Document] class Settings: name = "B" class StateAndDecimalFieldModel(Document): amt: DecimalAnnotation other_amt: DecimalAnnotation = Field( decimal_places=1, multiple_of=0.5, default=0 ) class Settings: name = "amounts" use_revision = True use_state_management = True class Region(Document): state: Optional[str] = "TEST" city: Optional[str] = "TEST" district: Optional[str] = "TEST" class UsersAddresses(Document): region_id: Optional[Link[Region]] = None phone_number: Optional[str] = None street: Optional[str] = None class AddressView(BaseModel): id: Optional[PydanticObjectId] = Field(alias="_id", default=None) phone_number: Optional[str] = None street: Optional[str] = None state: Optional[str] = None city: Optional[str] = None district: Optional[str] = None class Settings: projection = { "id": "$_id", "phone_number": 1, "street": 1, "sub_district": "$region_id.sub_district", "city": "$region_id.city", "state": "$region_id.state", } class SelfLinked(Document): item: Optional[Link["SelfLinked"]] = None s: str class Settings: max_nesting_depth = 2 class LoopedLinksA(Document): b: Link["LoopedLinksB"] s: str class Settings: max_nesting_depths_per_field = {"b": 2} class LoopedLinksB(Document): a: Optional[Link[LoopedLinksA]] = None s: str class DocWithCollectionInnerClass(Document): s: str class Collection: name = "test" class DocumentWithDecimalField(Document): amt: DecimalAnnotation other_amt: DecimalAnnotation = Field( decimal_places=1, multiple_of=0.5, default=0 ) if IS_PYDANTIC_V2: model_config = ConfigDict( validate_assignment=True, ) else: class Config: validate_assignment = True class Settings: name = "amounts" use_revision = True use_state_management = True indexes = [ pymongo.IndexModel( keys=[("amt", pymongo.ASCENDING)], name="amt_ascending" ), pymongo.IndexModel( keys=[("other_amt", pymongo.DESCENDING)], name="other_amt_descending", ), ] class ModelWithOptionalField(BaseModel): s: Optional[str] = None i: int class DocumentWithKeepNullsFalse(Document): o: Optional[str] = None m: ModelWithOptionalField class Settings: keep_nulls = False use_state_management = True class ReleaseElemMatch(BaseModel): major_ver: int minor_ver: int build_ver: int class PackageElemMatch(Document): releases: List[ReleaseElemMatch] = [] class DocumentWithLink(Document): link: Link["DocumentWithBackLink"] s: str = "TEST" class DocumentWithOptionalLink(Document): link: Optional[Link["DocumentWithBackLink"]] s: str = "TEST" class DocumentWithBackLink(Document): if IS_PYDANTIC_V2: back_link: BackLink[DocumentWithLink] = Field( json_schema_extra={"original_field": "link"}, ) else: back_link: BackLink[DocumentWithLink] = Field(original_field="link") i: int = 1 class DocumentWithOptionalBackLink(Document): if IS_PYDANTIC_V2: back_link: Optional[BackLink[DocumentWithLink]] = Field( json_schema_extra={"original_field": "link"}, ) else: back_link: Optional[BackLink[DocumentWithLink]] = Field( original_field="link" ) i: int = 1 class DocumentWithListLink(Document): link: List[Link["DocumentWithListBackLink"]] s: str = "TEST" class DocumentWithListBackLink(Document): if IS_PYDANTIC_V2: back_link: List[BackLink[DocumentWithListLink]] = Field( json_schema_extra={"original_field": "link"}, ) else: back_link: List[BackLink[DocumentWithListLink]] = Field( original_field="link" ) i: int = 1 class DocumentWithOptionalListBackLink(Document): if IS_PYDANTIC_V2: back_link: Optional[List[BackLink[DocumentWithListLink]]] = Field( json_schema_extra={"original_field": "link"}, ) else: back_link: Optional[List[BackLink[DocumentWithListLink]]] = Field( original_field="link" ) i: int = 1 class DocumentWithUnionTypeExpressionOptionalBackLink(Document): if IS_PYDANTIC_V2: back_link_list: type_union( List[BackLink[DocumentWithListLink]], None ) = Field(json_schema_extra={"original_field": "link"}) back_link: type_union(BackLink[DocumentWithLink], None) = Field( json_schema_extra={"original_field": "link"} ) else: back_link_list: type_union( List[BackLink[DocumentWithListLink]], None ) = Field(original_field="link") back_link: type_union(BackLink[DocumentWithLink], None) = Field( original_field="link" ) i: int = 1 class DocumentToBeLinked(Document): s: str = "TEST" class DocumentWithListOfLinks(Document): links: List[Link[DocumentToBeLinked]] s: str = "TEST" class DocumentWithTimeStampToTestConsistency(Document): ts: datetime.datetime = Field( default_factory=lambda: datetime.datetime.now(datetime.timezone.utc) ) class DocumentWithIndexMerging1(Document): class Settings: indexes = [ "s1", [ ("s2", pymongo.ASCENDING), ], IndexModel( [("s3", pymongo.ASCENDING)], name="s3_index", ), IndexModel( [("s4", pymongo.ASCENDING)], name="s4_index", ), ] class DocumentWithIndexMerging2(DocumentWithIndexMerging1): class Settings: merge_indexes = True indexes = [ "s0", "s1", [ ("s2", pymongo.DESCENDING), ], IndexModel( [("s3", pymongo.DESCENDING)], name="s3_index", ), ] class DocumentWithCustomInit(Document): s: ClassVar[str] = "TEST" @classmethod async def custom_init(cls): cls.s = "TEST2" class LinkDocumentForTextSeacrh(Document): i: int class DocumentWithTextIndexAndLink(Document): s: str link: Link[LinkDocumentForTextSeacrh] class Settings: indexes = [ pymongo.IndexModel( [("s", pymongo.TEXT)], name="text_index", ) ] class DocumentWithList(Document): list_values: List[str] class DocumentWithBsonBinaryField(Document): binary_field: BsonBinary if IS_PYDANTIC_V2: Pets = RootModel[List[str]] else: Pets = List[str] class DocumentWithRootModelAsAField(Document): pets: Pets class DocWithCallWrapper(Document): name: str if IS_PYDANTIC_V2: @validate_call def foo(self, bar: str) -> None: print(f"foo {bar}") class DocumentWithHttpUrlField(Document): url_field: HttpUrl class DocumentWithComplexDictKey(Document): dict_field: Dict[UUID, datetime.datetime] class DocumentWithIndexedObjectId(Document): pyid: Indexed(PydanticObjectId) uuid: Annotated[UUID4, Indexed(unique=True)] email: Annotated[EmailStr, Indexed(unique=True)] class DocumentToTestSync(Document): s: str = "TEST" i: int = 1 n: Nested = Nested( integer=1, option_1=Option1(s="test"), union=Option1(s="test") ) o: Optional[Option2] = None d: Dict[str, Any] = {} class Settings: use_state_management = True class DocumentWithLinkForNesting(Document): link: Link["DocumentWithBackLinkForNesting"] s: str class Settings: max_nesting_depths_per_field = {"link": 0} class DocumentWithBackLinkForNesting(Document): if IS_PYDANTIC_V2: back_link: BackLink[DocumentWithLinkForNesting] = Field( json_schema_extra={"original_field": "link"}, ) else: back_link: BackLink[DocumentWithLinkForNesting] = Field( original_field="link" ) i: int class Settings: max_nesting_depths_per_field = {"back_link": 5} class LongSelfLink(Document): link: Optional[Link["LongSelfLink"]] = None class Settings: max_nesting_depth = 50 class DictEnum(str, Enum): RED = "Red" BLUE = "Blue" class DocumentWithEnumKeysDict(Document): color: Dict[DictEnum, str] class BsonRegexDoc(Document): regex: Optional[Regex] = None if IS_PYDANTIC_V2: model_config = ConfigDict( arbitrary_types_allowed=True, ) else: class Config: arbitrary_types_allowed = True class NativeRegexDoc(Document): regex: Optional[re.Pattern] python-beanie-1.29.0/tests/odm/operators/000077500000000000000000000000001473701376500203215ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/operators/__init__.py000066400000000000000000000000001473701376500224200ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/operators/find/000077500000000000000000000000001473701376500212415ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/operators/find/__init__.py000066400000000000000000000000001473701376500233400ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/operators/find/test_array.py000066400000000000000000000013321473701376500237670ustar00rootroot00000000000000from beanie.odm.operators.find.array import All, ElemMatch, Size from tests.odm.models import PackageElemMatch, Sample async def test_all(): q = All(Sample.integer, [1, 2, 3]) assert q == {"integer": {"$all": [1, 2, 3]}} async def test_elem_match(): q = ElemMatch(Sample.integer, {"a": "b"}) assert q == {"integer": {"$elemMatch": {"a": "b"}}} async def test_size(): q = Size(Sample.integer, 4) assert q == {"integer": {"$size": 4}} async def test_elem_match_nested(): q = ElemMatch( PackageElemMatch.releases, major_ver=7, minor_ver=1, build_ver=0 ) assert q == { "releases": { "$elemMatch": {"major_ver": 7, "minor_ver": 1, "build_ver": 0} } } python-beanie-1.29.0/tests/odm/operators/find/test_bitwise.py000066400000000000000000000012401473701376500243150ustar00rootroot00000000000000from beanie.odm.operators.find.bitwise import ( BitsAllClear, BitsAllSet, BitsAnyClear, BitsAnySet, ) from tests.odm.models import Sample async def test_bits_all_clear(): q = BitsAllClear(Sample.integer, "smth") assert q == {"integer": {"$bitsAllClear": "smth"}} async def test_bits_all_set(): q = BitsAllSet(Sample.integer, "smth") assert q == {"integer": {"$bitsAllSet": "smth"}} async def test_any_clear(): q = BitsAnyClear(Sample.integer, "smth") assert q == {"integer": {"$bitsAnyClear": "smth"}} async def test_any_set(): q = BitsAnySet(Sample.integer, "smth") assert q == {"integer": {"$bitsAnySet": "smth"}} python-beanie-1.29.0/tests/odm/operators/find/test_comparison.py000066400000000000000000000035061473701376500250300ustar00rootroot00000000000000from beanie.odm.operators.find.comparison import ( GT, GTE, LT, LTE, NE, Eq, In, NotIn, ) from tests.odm.models import Sample async def test_eq(): q = Sample.integer == 1 assert q == {"integer": 1} q = Eq(Sample.integer, 1) assert q == {"integer": 1} q = Eq("integer", 1) assert q == {"integer": 1} async def test_gt(): q = Sample.integer > 1 assert q == {"integer": {"$gt": 1}} q = GT(Sample.integer, 1) assert q == {"integer": {"$gt": 1}} q = GT("integer", 1) assert q == {"integer": {"$gt": 1}} async def test_gte(): q = Sample.integer >= 1 assert q == {"integer": {"$gte": 1}} q = GTE(Sample.integer, 1) assert q == {"integer": {"$gte": 1}} q = GTE("integer", 1) assert q == {"integer": {"$gte": 1}} async def test_in(): q = In(Sample.integer, [1]) assert q == {"integer": {"$in": [1]}} q = In(Sample.integer, [1]) assert q == {"integer": {"$in": [1]}} async def test_lt(): q = Sample.integer < 1 assert q == {"integer": {"$lt": 1}} q = LT(Sample.integer, 1) assert q == {"integer": {"$lt": 1}} q = LT("integer", 1) assert q == {"integer": {"$lt": 1}} async def test_lte(): q = Sample.integer <= 1 assert q == {"integer": {"$lte": 1}} q = LTE(Sample.integer, 1) assert q == {"integer": {"$lte": 1}} q = LTE("integer", 1) assert q == {"integer": {"$lte": 1}} async def test_ne(): q = Sample.integer != 1 assert q == {"integer": {"$ne": 1}} q = NE(Sample.integer, 1) assert q == {"integer": {"$ne": 1}} q = NE("integer", 1) assert q == {"integer": {"$ne": 1}} async def test_nin(): q = NotIn(Sample.integer, [1]) assert q == {"integer": {"$nin": [1]}} q = NotIn(Sample.integer, [1]) assert q == {"integer": {"$nin": [1]}} python-beanie-1.29.0/tests/odm/operators/find/test_element.py000066400000000000000000000007411473701376500243050ustar00rootroot00000000000000from beanie.odm.operators.find.element import Exists, Type from tests.odm.models import Sample async def test_exists(): q = Exists(Sample.integer, True) assert q == {"integer": {"$exists": True}} q = Exists(Sample.integer, False) assert q == {"integer": {"$exists": False}} q = Exists(Sample.integer) assert q == {"integer": {"$exists": True}} async def test_type(): q = Type(Sample.integer, "smth") assert q == {"integer": {"$type": "smth"}} python-beanie-1.29.0/tests/odm/operators/find/test_evaluation.py000066400000000000000000000034651473701376500250310ustar00rootroot00000000000000from beanie.odm.operators.find.evaluation import ( Expr, JsonSchema, Mod, RegEx, Text, Where, ) from tests.odm.models import Sample async def test_expr(): q = Expr({"a": "B"}) assert q == {"$expr": {"a": "B"}} async def test_json_schema(): q = JsonSchema({"a": "B"}) assert q == {"$jsonSchema": {"a": "B"}} async def test_mod(): q = Mod(Sample.integer, 3, 2) assert q == {"integer": {"$mod": [3, 2]}} async def test_regex(): q = RegEx(Sample.integer, "smth") assert q == {"integer": {"$regex": "smth"}} q = RegEx(Sample.integer, "smth", "options") assert q == {"integer": {"$regex": "smth", "$options": "options"}} async def test_text(): q = Text("something") assert q == { "$text": { "$search": "something", "$caseSensitive": False, "$diacriticSensitive": False, } } q = Text("something", case_sensitive=True) assert q == { "$text": { "$search": "something", "$caseSensitive": True, "$diacriticSensitive": False, } } q = Text("something", diacritic_sensitive=True) assert q == { "$text": { "$search": "something", "$caseSensitive": False, "$diacriticSensitive": True, } } q = Text("something", diacritic_sensitive=None) assert q == { "$text": { "$search": "something", "$caseSensitive": False, } } q = Text("something", language="test") assert q == { "$text": { "$search": "something", "$caseSensitive": False, "$diacriticSensitive": False, "$language": "test", } } async def test_where(): q = Where("test") assert q == {"$where": "test"} python-beanie-1.29.0/tests/odm/operators/find/test_geospatial.py000066400000000000000000000055001473701376500250020ustar00rootroot00000000000000from beanie.odm.operators.find.geospatial import ( Box, GeoIntersects, GeoWithin, Near, NearSphere, ) from tests.odm.models import Sample async def test_geo_intersects(): q = GeoIntersects( Sample.geo, geo_type="Polygon", coordinates=[[1, 1], [2, 2], [3, 3]] ) assert q == { "geo": { "$geoIntersects": { "$geometry": { "type": "Polygon", "coordinates": [[1, 1], [2, 2], [3, 3]], } } } } async def test_geo_within(): q = GeoWithin( Sample.geo, geo_type="Polygon", coordinates=[[1, 1], [2, 2], [3, 3]] ) assert q == { "geo": { "$geoWithin": { "$geometry": { "type": "Polygon", "coordinates": [[1, 1], [2, 2], [3, 3]], } } } } async def test_box(): q = Box(Sample.geo, lower_left=[1, 3], upper_right=[2, 4]) assert q == {"geo": {"$geoWithin": {"$box": [[1, 3], [2, 4]]}}} async def test_near(): q = Near(Sample.geo, longitude=1.1, latitude=2.2) assert q == { "geo": { "$near": { "$geometry": {"type": "Point", "coordinates": [1.1, 2.2]} } } } q = Near(Sample.geo, longitude=1.1, latitude=2.2, max_distance=1) assert q == { "geo": { "$near": { "$geometry": {"type": "Point", "coordinates": [1.1, 2.2]}, "$maxDistance": 1, } } } q = Near( Sample.geo, longitude=1.1, latitude=2.2, max_distance=1, min_distance=0.5, ) assert q == { "geo": { "$near": { "$geometry": {"type": "Point", "coordinates": [1.1, 2.2]}, "$maxDistance": 1, "$minDistance": 0.5, } } } async def test_near_sphere(): q = NearSphere(Sample.geo, longitude=1.1, latitude=2.2) assert q == { "geo": { "$nearSphere": { "$geometry": {"type": "Point", "coordinates": [1.1, 2.2]} } } } q = NearSphere(Sample.geo, longitude=1.1, latitude=2.2, max_distance=1) assert q == { "geo": { "$nearSphere": { "$geometry": {"type": "Point", "coordinates": [1.1, 2.2]}, "$maxDistance": 1, } } } q = NearSphere( Sample.geo, longitude=1.1, latitude=2.2, max_distance=1, min_distance=0.5, ) assert q == { "geo": { "$nearSphere": { "$geometry": {"type": "Point", "coordinates": [1.1, 2.2]}, "$maxDistance": 1, "$minDistance": 0.5, } } } python-beanie-1.29.0/tests/odm/operators/find/test_logical.py000066400000000000000000000021601473701376500242630ustar00rootroot00000000000000import pytest from beanie.odm.operators.find.logical import And, Nor, Not, Or from tests.odm.models import Sample async def test_and(): q = And(Sample.integer == 1) assert q == {"integer": 1} q = And(Sample.integer == 1, Sample.nested.integer > 3) assert q == {"$and": [{"integer": 1}, {"nested.integer": {"$gt": 3}}]} async def test_not(preset_documents): q = Not(Sample.integer == 1) assert q == {"integer": {"$not": {"$eq": 1}}} docs = await Sample.find(q).to_list() assert len(docs) == 7 with pytest.raises(AttributeError): q = Not(And(Sample.integer == 1, Sample.nested.integer > 3)) await Sample.find(q).to_list() async def test_nor(): q = Nor(Sample.integer == 1) assert q == {"$nor": [{"integer": 1}]} q = Nor(Sample.integer == 1, Sample.nested.integer > 3) assert q == {"$nor": [{"integer": 1}, {"nested.integer": {"$gt": 3}}]} async def test_or(): q = Or(Sample.integer == 1) assert q == {"integer": 1} q = Or(Sample.integer == 1, Sample.nested.integer > 3) assert q == {"$or": [{"integer": 1}, {"nested.integer": {"$gt": 3}}]} python-beanie-1.29.0/tests/odm/operators/update/000077500000000000000000000000001473701376500216035ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/operators/update/__init__.py000066400000000000000000000000001473701376500237020ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/operators/update/test_array.py000066400000000000000000000011761473701376500243370ustar00rootroot00000000000000from beanie.odm.operators.update.array import ( AddToSet, Pop, Pull, PullAll, Push, ) from tests.odm.models import Sample def test_add_to_set(): q = AddToSet({Sample.integer: 2}) assert q == {"$addToSet": {"integer": 2}} def test_pop(): q = Pop({Sample.integer: 2}) assert q == {"$pop": {"integer": 2}} def test_pull(): q = Pull({Sample.integer: 2}) assert q == {"$pull": {"integer": 2}} def test_push(): q = Push({Sample.integer: 2}) assert q == {"$push": {"integer": 2}} def test_pull_all(): q = PullAll({Sample.integer: 2}) assert q == {"$pullAll": {"integer": 2}} python-beanie-1.29.0/tests/odm/operators/update/test_bitwise.py000066400000000000000000000002641473701376500246640ustar00rootroot00000000000000from beanie.odm.operators.update.bitwise import Bit from tests.odm.models import Sample def test_bit(): q = Bit({Sample.integer: 2}) assert q == {"$bit": {"integer": 2}} python-beanie-1.29.0/tests/odm/operators/update/test_general.py000066400000000000000000000020741473701376500246340ustar00rootroot00000000000000from beanie.odm.operators.update.general import ( CurrentDate, Inc, Max, Min, Mul, Rename, Set, SetOnInsert, Unset, ) from tests.odm.models import Sample def test_set(): q = Set({Sample.integer: 2}) assert q == {"$set": {"integer": 2}} def test_current_date(): q = CurrentDate({Sample.integer: 2}) assert q == {"$currentDate": {"integer": 2}} def test_inc(): q = Inc({Sample.integer: 2}) assert q == {"$inc": {"integer": 2}} def test_min(): q = Min({Sample.integer: 2}) assert q == {"$min": {"integer": 2}} def test_max(): q = Max({Sample.integer: 2}) assert q == {"$max": {"integer": 2}} def test_mul(): q = Mul({Sample.integer: 2}) assert q == {"$mul": {"integer": 2}} def test_rename(): q = Rename({Sample.integer: 2}) assert q == {"$rename": {"integer": 2}} def test_set_on_insert(): q = SetOnInsert({Sample.integer: 2}) assert q == {"$setOnInsert": {"integer": 2}} def test_unset(): q = Unset({Sample.integer: 2}) assert q == {"$unset": {"integer": 2}} python-beanie-1.29.0/tests/odm/query/000077500000000000000000000000001473701376500174505ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/query/__init__.py000066400000000000000000000000001473701376500215470ustar00rootroot00000000000000python-beanie-1.29.0/tests/odm/query/test_aggregate.py000066400000000000000000000147021473701376500230130ustar00rootroot00000000000000import pytest from pydantic import Field from pydantic.main import BaseModel from pymongo.errors import OperationFailure from beanie.odm.enums import SortDirection from beanie.odm.utils.find import construct_lookup_queries from tests.odm.models import DocumentWithTextIndexAndLink, Sample async def test_aggregate(preset_documents): q = Sample.aggregate( [{"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}] ) assert q.get_aggregation_pipeline() == [ {"$group": {"_id": "$string", "total": {"$sum": "$integer"}}} ] result = await q.to_list() assert len(result) == 4 assert {"_id": "test_3", "total": 3} in result assert {"_id": "test_1", "total": 3} in result assert {"_id": "test_0", "total": 0} in result assert {"_id": "test_2", "total": 6} in result async def test_aggregate_with_filter(preset_documents): q = Sample.find(Sample.increment >= 4).aggregate( [{"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}] ) assert q.get_aggregation_pipeline() == [ {"$match": {"increment": {"$gte": 4}}}, {"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}, ] result = await q.to_list() assert len(result) == 3 assert {"_id": "test_1", "total": 2} in result assert {"_id": "test_2", "total": 6} in result assert {"_id": "test_3", "total": 3} in result async def test_aggregate_with_sort_skip(preset_documents): q = Sample.find(sort="_id", skip=2).aggregate( [{"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}] ) assert q.get_aggregation_pipeline() == [ {"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}, {"$sort": {"_id": SortDirection.ASCENDING}}, {"$skip": 2}, ] assert await q.to_list() == [ {"_id": "test_2", "total": 6}, {"_id": "test_3", "total": 3}, ] async def test_aggregate_with_sort_limit(preset_documents): q = Sample.find(sort="_id", limit=2).aggregate( [{"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}] ) assert q.get_aggregation_pipeline() == [ {"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}, {"$sort": {"_id": SortDirection.ASCENDING}}, {"$limit": 2}, ] assert await q.to_list() == [ {"_id": "test_0", "total": 0}, {"_id": "test_1", "total": 3}, ] async def test_aggregate_with_projection_model(preset_documents): class OutputItem(BaseModel): id: str = Field(None, alias="_id") total: int ids = [] q = Sample.find(Sample.increment >= 4).aggregate( [{"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}], projection_model=OutputItem, ) assert q.get_aggregation_pipeline() == [ {"$match": {"increment": {"$gte": 4}}}, {"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}, {"$project": {"_id": 1, "total": 1}}, ] async for i in q: if i.id == "test_1": assert i.total == 2 elif i.id == "test_2": assert i.total == 6 elif i.id == "test_3": assert i.total == 3 else: raise KeyError ids.append(i.id) assert set(ids) == {"test_1", "test_2", "test_3"} async def test_aggregate_with_session(preset_documents, session): q = Sample.find(Sample.increment >= 4).aggregate( [{"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}], session=session, ) assert q.session == session q = Sample.find(Sample.increment >= 4, session=session).aggregate( [{"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}] ) assert q.session == session result = await q.to_list() assert len(result) == 3 assert {"_id": "test_1", "total": 2} in result assert {"_id": "test_2", "total": 6} in result assert {"_id": "test_3", "total": 3} in result async def test_aggregate_pymongo_kwargs(preset_documents): with pytest.raises(OperationFailure): await ( Sample.find(Sample.increment >= 4) .aggregate( [ { "$group": { "_id": "$string", "total": {"$sum": "$integer"}, } } ], wrong=True, ) .to_list() ) async def test_clone(preset_documents): q = Sample.find(Sample.increment >= 4).aggregate( [{"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}] ) new_q = q.clone() new_q.aggregation_pipeline.append({"a": "b"}) assert q.get_aggregation_pipeline() == [ {"$match": {"increment": {"$gte": 4}}}, {"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}, ] assert new_q.get_aggregation_pipeline() == [ {"$match": {"increment": {"$gte": 4}}}, {"$group": {"_id": "$string", "total": {"$sum": "$integer"}}}, {"a": "b"}, ] @pytest.mark.parametrize("text_query_count", [0, 1, 2]) @pytest.mark.parametrize("non_text_query_count", [0, 1, 2]) async def test_with_text_queries( text_query_count: int, non_text_query_count: int ): text_query = {"$text": {"$search": "text_search"}} non_text_query = {"s": "test_string"} aggregation_pipeline = [{"$count": "count"}] queries = [] if text_query_count: queries.append(text_query) if text_query_count > 1: queries.append(text_query) if non_text_query_count: queries.append(non_text_query) if non_text_query_count > 1: queries.append(non_text_query) query = DocumentWithTextIndexAndLink.find(*queries, fetch_links=True) expected_aggregation_pipeline = [] if text_query_count: expected_aggregation_pipeline.append( {"$match": text_query} if text_query_count == 1 else {"$match": {"$and": [text_query, text_query]}} ) expected_aggregation_pipeline.extend( construct_lookup_queries(query.document_model) ) if non_text_query_count: expected_aggregation_pipeline.append( {"$match": non_text_query} if non_text_query_count == 1 else {"$match": {"$and": [non_text_query, non_text_query]}} ) expected_aggregation_pipeline.extend(aggregation_pipeline) assert ( query.build_aggregation_pipeline(*aggregation_pipeline) == expected_aggregation_pipeline ) python-beanie-1.29.0/tests/odm/query/test_aggregate_methods.py000066400000000000000000000042001473701376500245260ustar00rootroot00000000000000from tests.odm.models import Sample async def test_sum(preset_documents, session): n = await Sample.find_many(Sample.integer == 1).sum(Sample.increment) assert n == 12 n = await Sample.find_many(Sample.integer == 1).sum( Sample.increment, session=session ) assert n == 12 async def test_sum_without_docs(session): n = await Sample.find_many(Sample.integer == 1).sum(Sample.increment) assert n is None n = await Sample.find_many(Sample.integer == 1).sum( Sample.increment, session=session ) assert n is None async def test_avg(preset_documents, session): n = await Sample.find_many(Sample.integer == 1).avg(Sample.increment) assert n == 4 n = await Sample.find_many(Sample.integer == 1).avg( Sample.increment, session=session ) assert n == 4 async def test_avg_without_docs(session): n = await Sample.find_many(Sample.integer == 1).avg(Sample.increment) assert n is None n = await Sample.find_many(Sample.integer == 1).avg( Sample.increment, session=session ) assert n is None async def test_max(preset_documents, session): n = await Sample.find_many(Sample.integer == 1).max(Sample.increment) assert n == 5 n = await Sample.find_many(Sample.integer == 1).max( Sample.increment, session=session ) assert n == 5 async def test_max_without_docs(session): n = await Sample.find_many(Sample.integer == 1).max(Sample.increment) assert n is None n = await Sample.find_many(Sample.integer == 1).max( Sample.increment, session=session ) assert n is None async def test_min(preset_documents, session): n = await Sample.find_many(Sample.integer == 1).min(Sample.increment) assert n == 3 n = await Sample.find_many(Sample.integer == 1).min( Sample.increment, session=session ) assert n == 3 async def test_min_without_docs(session): n = await Sample.find_many(Sample.integer == 1).min(Sample.increment) assert n is None n = await Sample.find_many(Sample.integer == 1).min( Sample.increment, session=session ) assert n is None python-beanie-1.29.0/tests/odm/query/test_delete.py000066400000000000000000000072061473701376500223300ustar00rootroot00000000000000import pytest from beanie.odm.queries.delete import DeleteMany from tests.odm.models import Sample async def test_delete_many(preset_documents): count_before = await Sample.count() count_find = ( await Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .count() ) # noqa delete_result = ( await Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .delete() ) # noqa count_deleted = delete_result.deleted_count count_after = await Sample.count() assert count_before - count_find == count_after assert count_after + count_deleted == count_before assert isinstance( Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .delete_many(), DeleteMany, ) # noqa async def test_delete_all(preset_documents): count_before = await Sample.count() delete_result = await Sample.delete_all() count_deleted = delete_result.deleted_count count_after = await Sample.count() assert count_after == 0 assert count_after + count_deleted == count_before async def test_delete_self(preset_documents): count_before = await Sample.count() result = ( await Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .to_list() ) # noqa a = result[0] delete_result = await a.delete() count_deleted = delete_result.deleted_count count_after = await Sample.count() assert count_before == count_after + 1 assert count_deleted == 1 async def test_delete_one(preset_documents): count_before = await Sample.count() delete_result = ( await Sample.find_one(Sample.integer > 1) .find_one(Sample.nested.optional == None) .delete() ) # noqa count_after = await Sample.count() count_deleted = delete_result.deleted_count assert count_before == count_after + 1 assert count_deleted == 1 count_before = await Sample.count() delete_result = ( await Sample.find_one(Sample.integer > 1) .find_one(Sample.nested.optional == None) .delete_one() ) # noqa count_deleted = delete_result.deleted_count count_after = await Sample.count() assert count_before == count_after + 1 assert count_deleted == 1 async def test_delete_many_with_session(preset_documents, session): count_before = await Sample.count() count_find = ( await Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .count() ) # noqa q = ( Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .delete(session=session) ) # noqa assert q.session == session q = ( Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .delete() .set_session(session=session) ) # noqa assert q.session == session delete_result = await q count_deleted = delete_result.deleted_count count_after = await Sample.count() assert count_before - count_find == count_after assert count_after + count_deleted == count_before async def test_delete_pymongo_kwargs(preset_documents): with pytest.raises(TypeError): await Sample.find_many(Sample.increment > 4).delete(wrong="integer_1") delete_result = await Sample.find_many(Sample.increment > 4).delete( hint="integer_1" ) assert delete_result is not None delete_result = await Sample.find_one(Sample.increment > 4).delete( hint="integer_1" ) assert delete_result is not None python-beanie-1.29.0/tests/odm/query/test_find.py000066400000000000000000000264321473701376500220100ustar00rootroot00000000000000import datetime from enum import Enum import pytest from pydantic import BaseModel from beanie.odm.enums import SortDirection from tests.odm.models import ( Color, DocumentWithBsonEncodersFiledsTypes, House, Sample, ) async def test_find_query(): q = Sample.find_many(Sample.integer == 1).get_filter_query() assert q == {"integer": 1} q = Sample.find_many( Sample.integer == 1, Sample.nested.integer >= 2 ).get_filter_query() assert q == {"$and": [{"integer": 1}, {"nested.integer": {"$gte": 2}}]} q = ( Sample.find_many(Sample.integer == 1) .find_many(Sample.nested.integer >= 2) .get_filter_query() ) assert q == {"$and": [{"integer": 1}, {"nested.integer": {"$gte": 2}}]} q = Sample.find().get_filter_query() assert q == {} async def test_find_many(preset_documents): result = ( await Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .to_list() ) # noqa assert len(result) == 2 for a in result: assert a.integer > 1 assert a.nested.optional is None len_result = 0 async for a in Sample.find_many(Sample.integer > 1).find_many( Sample.nested.optional == None ): # noqa assert a in result len_result += 1 assert len_result == len(result) async def test_find_many_skip(preset_documents): q = Sample.find_many(Sample.integer > 1, skip=2) assert q.skip_number == 2 q = Sample.find_many(Sample.integer > 1).skip(2) assert q.skip_number == 2 result = ( await Sample.find_many(Sample.increment > 2) .find_many(Sample.nested.optional == None) .skip(1) .to_list() ) assert len(result) == 3 for sample in result: assert sample.increment > 2 assert sample.nested.optional is None len_result = 0 async for sample in ( Sample.find_many(Sample.increment > 2) .find_many(Sample.nested.optional == None) .skip(1) ): # noqa assert sample in result len_result += 1 assert len_result == len(result) async def test_find_many_limit(preset_documents): q = Sample.find_many(Sample.integer > 1, limit=2) assert q.limit_number == 2 q = Sample.find_many(Sample.integer > 1).limit(2) assert q.limit_number == 2 result = ( await Sample.find_many(Sample.increment > 2) .find_many(Sample.nested.optional == None) .sort(Sample.increment) .limit(2) .to_list() ) # noqa assert len(result) == 2 for a in result: assert a.increment > 2 assert a.nested.optional is None len_result = 0 async for a in ( Sample.find_many(Sample.increment > 2) .find(Sample.nested.optional == None) .sort(Sample.increment) .limit(2) ): # noqa assert a in result len_result += 1 assert len_result == len(result) async def test_find_all(preset_documents): result = await Sample.find_all().to_list() assert len(result) == 10 len_result = 0 async for a in Sample.find_all(): assert a in result len_result += 1 assert len_result == len(result) async def test_find_one(preset_documents): a = await Sample.find_one(Sample.integer > 1).find_one( Sample.nested.optional == None ) # noqa assert a.integer > 1 assert a.nested.optional is None a = await Sample.find_one(Sample.integer > 100).find_one( Sample.nested.optional == None ) # noqa assert a is None async def test_get(preset_documents): a = await Sample.find_one(Sample.integer > 1).find_one( Sample.nested.optional == None ) # noqa assert a.integer > 1 assert a.nested.optional is None new_a = await Sample.get(a.id) assert new_a == a # check for another type new_a = await Sample.get(str(a.id)) assert new_a == a async def test_sort(preset_documents): q = Sample.find_many(Sample.integer > 1, sort="-integer") assert q.sort_expressions == [("integer", SortDirection.DESCENDING)] q = Sample.find_many(Sample.integer > 1, sort="integer") assert q.sort_expressions == [("integer", SortDirection.ASCENDING)] q = Sample.find_many(Sample.integer > 1).sort("-integer") assert q.sort_expressions == [("integer", SortDirection.DESCENDING)] q = ( Sample.find_many(Sample.integer > 1) .find_many(Sample.integer < 100) .sort("-integer") ) assert q.sort_expressions == [("integer", SortDirection.DESCENDING)] result = await Sample.find_many( Sample.integer > 1, sort="-integer" ).to_list() i_buf = None for a in result: if i_buf is None: i_buf = a.integer assert i_buf >= a.integer i_buf = a.integer result = await Sample.find_many( Sample.integer > 1, sort="+integer" ).to_list() i_buf = None for a in result: if i_buf is None: i_buf = a.integer assert i_buf <= a.integer i_buf = a.integer result = await Sample.find_many( Sample.integer > 1, sort="integer" ).to_list() i_buf = None for a in result: if i_buf is None: i_buf = a.integer assert i_buf <= a.integer i_buf = a.integer result = await Sample.find_many( Sample.integer > 1, sort=-Sample.integer ).to_list() i_buf = None for a in result: if i_buf is None: i_buf = a.integer assert i_buf >= a.integer i_buf = a.integer result = ( await Sample.find_many(Sample.integer > 1) .sort([Sample.const, -Sample.integer]) .to_list() ) i_buf = None for a in result: if i_buf is None: i_buf = a.integer assert i_buf >= a.integer i_buf = a.integer with pytest.raises(TypeError): Sample.find_many(Sample.integer > 1, sort=1) async def test_find_many_with_projection(preset_documents): class SampleProjection(BaseModel): string: str integer: int result = ( await Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .project(projection_model=SampleProjection) .to_list() ) assert result == [ SampleProjection(string="test_2", integer=2), SampleProjection(string="test_2", integer=2), ] result = ( await Sample.find_many(Sample.integer > 1) .find_many( Sample.nested.optional == None, projection_model=SampleProjection ) .to_list() ) assert result == [ SampleProjection(string="test_2", integer=2), SampleProjection(string="test_2", integer=2), ] async def test_find_many_with_custom_projection(preset_documents): class SampleProjection(BaseModel): string: str i: int class Settings: projection = {"string": 1, "i": "$nested.integer"} result = ( await Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .project(projection_model=SampleProjection) .sort(Sample.nested.integer) .to_list() ) assert result == [ SampleProjection(string="test_2", i=3), SampleProjection(string="test_2", i=4), ] async def test_find_many_with_session(preset_documents, session): q_1 = ( Sample.find_many(Sample.integer > 1) .find_many(Sample.nested.optional == None) .set_session(session) ) assert q_1.session == session q_2 = Sample.find_many(Sample.integer > 1).find_many( Sample.nested.optional == None, session=session ) assert q_2.session == session result = await q_2.to_list() assert len(result) == 2 for a in result: assert a.integer > 1 assert a.nested.optional is None len_result = 0 async for a in Sample.find_many(Sample.integer > 1).find_many( Sample.nested.optional == None ): # noqa assert a in result len_result += 1 assert len_result == len(result) async def test_bson_encoders_filed_types(): custom = DocumentWithBsonEncodersFiledsTypes( color="7fffd4", timestamp=datetime.datetime.now(tz=datetime.timezone.utc), ) c = await custom.insert() c_fromdb = await DocumentWithBsonEncodersFiledsTypes.find_one( DocumentWithBsonEncodersFiledsTypes.color == Color("7fffd4") ) assert c_fromdb.color.as_hex() == c.color.as_hex() async def test_find_by_datetime(preset_documents): datetime_1 = datetime.datetime.now( tz=datetime.timezone.utc ) - datetime.timedelta(days=7) datetime_2 = datetime.datetime.now( tz=datetime.timezone.utc ) - datetime.timedelta(days=2) docs = await Sample.find( Sample.timestamp >= datetime_1, Sample.timestamp <= datetime_2, ).to_list() assert len(docs) == 5 async def test_find_first_or_none(preset_documents): doc = ( await Sample.find(Sample.increment > 1) .sort(-Sample.increment) .first_or_none() ) assert doc.increment == 9 doc = ( await Sample.find(Sample.increment > 9) .sort(-Sample.increment) .first_or_none() ) assert doc is None async def test_find_pymongo_kwargs(preset_documents): with pytest.raises(TypeError): await Sample.find_many(Sample.increment > 1, wrong=100).to_list() await Sample.find_many( Sample.increment > 1, Sample.integer > 1, allow_disk_use=True ).to_list() await Sample.find_many( Sample.increment > 1, Sample.integer > 1, hint="integer_1" ).to_list() await House.find_many( House.height > 1, fetch_links=True, hint="height_1" ).to_list() await House.find_many( House.height > 1, fetch_links=True, allowDiskUse=True ).to_list() await Sample.find_one( Sample.increment > 1, Sample.integer > 1, hint="integer_1" ) await House.find_one(House.height > 1, fetch_links=True, hint="height_1") def test_find_clone(): q = ( Sample.find_many(Sample.integer == 1) .find_many(Sample.nested.integer >= 2) .sort(Sample.integer) .limit(100) ) new_q = q.clone() new_q.find(Sample.nested.integer >= 100).sort(Sample.string).limit(10) assert q.get_filter_query() == { "$and": [{"integer": 1}, {"nested.integer": {"$gte": 2}}] } assert q.sort_expressions == [("integer", SortDirection.ASCENDING)] assert q.limit_number == 100 assert new_q.get_filter_query() == { "$and": [ {"integer": 1}, {"nested.integer": {"$gte": 2}}, {"nested.integer": {"$gte": 100}}, ] } assert new_q.sort_expressions == [ ("integer", SortDirection.ASCENDING), ("string", SortDirection.ASCENDING), ] assert new_q.limit_number == 10 async def test_find_many_with_enum_in_query(preset_documents): class TestEnum(str, Enum): INTEGER = Sample.integer SAMPLE_NESTED_OPTIONAL = Sample.nested.optional CONST = "const" CONST_VALUE = "TEST" filter_query = { TestEnum.INTEGER: {"$gt": 1}, TestEnum.SAMPLE_NESTED_OPTIONAL: {"$type": "null"}, TestEnum.CONST: TestEnum.CONST_VALUE, } result = await Sample.find_many(filter_query).to_list() assert len(result) == 2 python-beanie-1.29.0/tests/odm/query/test_update.py000066400000000000000000000160651473701376500223530ustar00rootroot00000000000000import asyncio import pytest from beanie.odm.operators.update.general import Max, Set from beanie.odm.queries.update import UpdateResponse from tests.odm.models import Sample async def test_update_query(): q = ( Sample.find_many(Sample.integer == 1) .update(Set({Sample.integer: 10})) .update_query ) assert q == {"$set": {"integer": 10}} q = ( Sample.find_many(Sample.integer == 1) .update(Max({Sample.integer: 10}), Set({Sample.optional: None})) .update_query ) assert q == {"$max": {"integer": 10}, "$set": {"optional": None}} q = ( Sample.find_many(Sample.integer == 1) .update(Set({Sample.integer: 10}), Set({Sample.optional: None})) .update_query ) assert q == {"$set": {"optional": None}} q = ( Sample.find_many(Sample.integer == 1) .update(Max({Sample.integer: 10})) .update(Set({Sample.optional: None})) .update_query ) assert q == {"$max": {"integer": 10}, "$set": {"optional": None}} q = ( Sample.find_many(Sample.integer == 1) .update(Set({Sample.integer: 10})) .update(Set({Sample.optional: None})) .update_query ) assert q == {"$set": {"optional": None}} with pytest.raises(TypeError): Sample.find_many(Sample.integer == 1).update(40).update_query async def test_update_many(preset_documents): await ( Sample.find_many(Sample.increment > 4) .find_many(Sample.nested.optional == None) .update(Set({Sample.increment: 100})) ) # noqa result = await Sample.find_many(Sample.increment == 100).to_list() assert len(result) == 3 for sample in result: assert sample.increment == 100 async def test_update_many_linked_method(preset_documents): await ( Sample.find_many(Sample.increment > 4) .find_many(Sample.nested.optional == None) .update_many(Set({Sample.increment: 100})) ) # noqa result = await Sample.find_many(Sample.increment == 100).to_list() assert len(result) == 3 for sample in result: assert sample.increment == 100 async def test_update_all(preset_documents): await Sample.update_all(Set({Sample.integer: 100})) result = await Sample.find_all().to_list() for sample in result: assert sample.integer == 100 await Sample.find_all().update(Set({Sample.integer: 101})) result = await Sample.find_all().to_list() for sample in result: assert sample.integer == 101 async def test_update_one(preset_documents): await Sample.find_one(Sample.integer == 1).update( Set({Sample.integer: 100}) ) result = await Sample.find_many(Sample.integer == 100).to_list() assert len(result) == 1 assert result[0].integer == 100 await Sample.find_one(Sample.integer == 1).update_one( Set({Sample.integer: 101}) ) result = await Sample.find_many(Sample.integer == 101).to_list() assert len(result) == 1 assert result[0].integer == 101 async def test_update_self(preset_documents): sample = await Sample.find_one(Sample.integer == 1) await sample.update(Set({Sample.integer: 100})) assert sample.integer == 100 result = await Sample.find_many(Sample.integer == 100).to_list() assert len(result) == 1 assert result[0].integer == 100 async def test_update_many_with_session(preset_documents, session): q = ( Sample.find_many(Sample.increment > 4) .find_many(Sample.nested.optional == None) .update(Set({Sample.increment: 100})) .set_session(session=session) ) assert q.session == session q = ( Sample.find_many(Sample.increment > 4) .find_many(Sample.nested.optional == None) .update(Set({Sample.increment: 100}), session=session) ) assert q.session == session q = ( Sample.find_many(Sample.increment > 4) .find_many(Sample.nested.optional == None, session=session) .update(Set({Sample.increment: 100})) ) assert q.session == session await q # noqa result = await Sample.find_many(Sample.increment == 100).to_list() assert len(result) == 3 for sample in result: assert sample.increment == 100 async def test_update_many_upsert_with_insert( preset_documents, sample_doc_not_saved ): await Sample.find_many(Sample.integer > 100000).upsert( Set({Sample.integer: 100}), on_insert=sample_doc_not_saved ) await asyncio.sleep(2) new_docs = await Sample.find_many( Sample.string == sample_doc_not_saved.string ).to_list() assert len(new_docs) == 1 doc = new_docs[0] assert doc.integer == sample_doc_not_saved.integer async def test_update_many_upsert_without_insert( preset_documents, sample_doc_not_saved ): await Sample.find_many(Sample.integer > 1).upsert( Set({Sample.integer: 100}), on_insert=sample_doc_not_saved ) await asyncio.sleep(2) new_docs = await Sample.find_many( Sample.string == sample_doc_not_saved.string ).to_list() assert len(new_docs) == 0 async def test_update_one_upsert_with_insert( preset_documents, sample_doc_not_saved ): await Sample.find_one(Sample.integer > 100000).upsert( Set({Sample.integer: 100}), on_insert=sample_doc_not_saved ) new_docs = await Sample.find_many( Sample.string == sample_doc_not_saved.string ).to_list() assert len(new_docs) == 1 doc = new_docs[0] assert doc.integer == sample_doc_not_saved.integer async def test_update_one_upsert_without_insert( preset_documents, sample_doc_not_saved ): await Sample.find_one(Sample.integer > 1).upsert( Set({Sample.integer: 100}), on_insert=sample_doc_not_saved ) new_docs = await Sample.find_many( Sample.string == sample_doc_not_saved.string ).to_list() assert len(new_docs) == 0 async def test_update_one_upsert_without_insert_return_doc( preset_documents, sample_doc_not_saved ): result = await Sample.find_one(Sample.integer > 1).upsert( Set({Sample.integer: 100}), on_insert=sample_doc_not_saved, response_type=UpdateResponse.NEW_DOCUMENT, ) assert isinstance(result, Sample) new_docs = await Sample.find_many( Sample.string == sample_doc_not_saved.string ).to_list() assert len(new_docs) == 0 async def test_update_pymongo_kwargs(preset_documents): with pytest.raises(TypeError): await Sample.find_many(Sample.increment > 4).update( Set({Sample.increment: 100}), wrong="integer_1" ) await Sample.find_many(Sample.increment > 4).update( Set({Sample.increment: 100}), hint="integer_1" ) await Sample.find_one(Sample.increment > 4).update( Set({Sample.increment: 100}), hint="integer_1" ) def test_clone(): q = Sample.find_many(Sample.integer == 1).update(Set({Sample.integer: 10})) new_q = q.clone() new_q.update(Max({Sample.integer: 10})) assert q.update_query == {"$set": {"integer": 10}} assert new_q.update_query == { "$max": {"integer": 10}, "$set": {"integer": 10}, } python-beanie-1.29.0/tests/odm/query/test_update_methods.py000066400000000000000000000040661473701376500240740ustar00rootroot00000000000000from beanie.odm.operators.update.general import Max from beanie.odm.queries.update import UpdateMany, UpdateQuery from tests.odm.models import Sample async def test_set(session): q = Sample.find_many(Sample.integer == 1).set( {Sample.integer: 100}, session=session ) assert isinstance(q, UpdateQuery) assert isinstance(q, UpdateMany) assert q.session == session assert q.update_query == {"$set": {"integer": 100}} q = ( Sample.find_many(Sample.integer == 1) .update(Max({Sample.integer: 10})) .set({Sample.integer: 100}) ) assert isinstance(q, UpdateQuery) assert isinstance(q, UpdateMany) assert q.update_query == { "$max": {"integer": 10}, "$set": {"integer": 100}, } async def test_current_date(session): q = Sample.find_many(Sample.integer == 1).current_date( {Sample.timestamp: "timestamp"}, session=session ) assert isinstance(q, UpdateQuery) assert isinstance(q, UpdateMany) assert q.session == session assert q.update_query == {"$currentDate": {"timestamp": "timestamp"}} q = ( Sample.find_many(Sample.integer == 1) .update(Max({Sample.integer: 10})) .current_date({Sample.timestamp: "timestamp"}) ) assert isinstance(q, UpdateQuery) assert isinstance(q, UpdateMany) assert q.update_query == { "$max": {"integer": 10}, "$currentDate": {"timestamp": "timestamp"}, } async def test_inc(session): q = Sample.find_many(Sample.integer == 1).inc( {Sample.integer: 100}, session=session ) assert isinstance(q, UpdateQuery) assert isinstance(q, UpdateMany) assert q.session == session assert q.update_query == {"$inc": {"integer": 100}} q = ( Sample.find_many(Sample.integer == 1) .update(Max({Sample.integer: 10})) .inc({Sample.integer: 100}) ) assert isinstance(q, UpdateQuery) assert isinstance(q, UpdateMany) assert q.update_query == { "$max": {"integer": 10}, "$inc": {"integer": 100}, } python-beanie-1.29.0/tests/odm/test_actions.py000066400000000000000000000076741473701376500213720ustar00rootroot00000000000000import pytest from beanie import After, Before from tests.odm.models import ( DocumentWithActions, DocumentWithActions2, InheritedDocumentWithActions, ) class TestActions: @pytest.mark.parametrize( "doc_class", [ DocumentWithActions, DocumentWithActions2, InheritedDocumentWithActions, ], ) async def test_actions_insert(self, doc_class): test_name = f"test_actions_insert_{doc_class}" sample = doc_class(name=test_name) await sample.insert() assert sample.name != test_name assert sample.name == test_name.capitalize() assert sample.num_1 == 1 assert sample.num_2 == 9 @pytest.mark.parametrize( "doc_class", [ DocumentWithActions, DocumentWithActions2, InheritedDocumentWithActions, ], ) async def test_actions_replace(self, doc_class): test_name = f"test_actions_replace_{doc_class}" sample = doc_class(name=test_name) await sample.insert() await sample.replace() assert sample.num_1 == 2 assert sample.num_3 == 99 @pytest.mark.parametrize( "doc_class", [ DocumentWithActions, DocumentWithActions2, InheritedDocumentWithActions, ], ) async def test_skip_actions_insert(self, doc_class): test_name = f"test_skip_actions_insert_{doc_class}" sample = doc_class(name=test_name) await sample.insert(skip_actions=[After, "capitalize_name"]) # capitalize_name has been skipped assert sample.name == test_name # add_one has not been skipped assert sample.num_1 == 1 # num_2_change has been skipped assert sample.num_2 == 10 @pytest.mark.parametrize( "doc_class", [ DocumentWithActions, DocumentWithActions2, InheritedDocumentWithActions, ], ) async def test_skip_actions_replace(self, doc_class): test_name = f"test_skip_actions_replace{doc_class}" sample = doc_class(name=test_name) await sample.insert() await sample.replace(skip_actions=[Before, "num_3_change"]) # add_one has been skipped assert sample.num_1 == 1 # num_3_change has been skipped assert sample.num_3 == 100 @pytest.mark.parametrize( "doc_class", [ DocumentWithActions, DocumentWithActions2, InheritedDocumentWithActions, ], ) async def test_actions_delete(self, doc_class): test_name = f"test_actions_delete_{doc_class}" sample = doc_class(name=test_name) await sample.delete() assert sample.Inner.inner_num_1 == 1 assert sample.Inner.inner_num_2 == 2 @pytest.mark.parametrize( "doc_class", [ DocumentWithActions, DocumentWithActions2, InheritedDocumentWithActions, ], ) async def test_actions_update(self, doc_class): test_name = f"test_actions_update_{doc_class}" sample = doc_class(name=test_name) await sample.insert() await sample.update({"$set": {"name": "new_name"}}) assert sample.name == "new_name" assert sample.num_1 == 1 assert sample.num_2 == 9 assert sample._private_num == 101 await sample.set({"name": "awesome_name"}) assert sample._private_num == 102 assert sample.num_2 == 9 assert sample.name == "awesome_name" @pytest.mark.parametrize( "doc_class", [ DocumentWithActions, DocumentWithActions2, InheritedDocumentWithActions, ], ) async def test_actions_save(self, doc_class): test_name = f"test_actions_save_{doc_class}" sample = doc_class(name=test_name) await sample.save() assert sample.num_1 == 1 python-beanie-1.29.0/tests/odm/test_beanie_object_dumping.py000066400000000000000000000023421473701376500242110ustar00rootroot00000000000000import pytest from pydantic import BaseModel, Field from beanie import Link, PydanticObjectId from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import DocumentTestModelWithSoftDelete class TestModel(BaseModel): my_id: PydanticObjectId = Field(default_factory=PydanticObjectId) fake_doc: Link[DocumentTestModelWithSoftDelete] def data_maker(): return TestModel( my_id="5f4e3f3b7c0c9d001f7d4c8e", fake_doc=DocumentTestModelWithSoftDelete( test_int=1, test_str="test", id="5f4e3f3b7c0c9d001f7d4c8f" ), ) @pytest.mark.skipif( not IS_PYDANTIC_V2, reason="model dumping support is more complete with pydantic v2", ) def test_id_types_preserved_when_dumping_to_python(): dumped = data_maker().model_dump(mode="python") assert isinstance(dumped["my_id"], PydanticObjectId) assert isinstance(dumped["fake_doc"]["id"], PydanticObjectId) @pytest.mark.skipif( not IS_PYDANTIC_V2, reason="model dumping support is more complete with pydantic v2", ) def test_id_types_serialized_when_dumping_to_json(): dumped = data_maker().model_dump(mode="json") assert isinstance(dumped["my_id"], str) assert isinstance(dumped["fake_doc"]["id"], str) python-beanie-1.29.0/tests/odm/test_cache.py000066400000000000000000000057161473701376500207700ustar00rootroot00000000000000import asyncio from tests.odm.models import DocumentTestModel async def test_find_one(documents): await documents(5) doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 1) await DocumentTestModel.find_one(DocumentTestModel.test_int == 1).set( {DocumentTestModel.test_str: "NEW_VALUE"} ) new_doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 1) assert doc == new_doc new_doc = await DocumentTestModel.find_one( DocumentTestModel.test_int == 1, ignore_cache=True ) assert doc != new_doc await asyncio.sleep(10) new_doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 1) assert doc != new_doc async def test_find_many(documents): await documents(5) docs = await DocumentTestModel.find( DocumentTestModel.test_int > 1 ).to_list() await DocumentTestModel.find(DocumentTestModel.test_int > 1).set( {DocumentTestModel.test_str: "NEW_VALUE"} ) new_docs = await DocumentTestModel.find( DocumentTestModel.test_int > 1 ).to_list() assert docs == new_docs new_docs = await DocumentTestModel.find( DocumentTestModel.test_int > 1, ignore_cache=True ).to_list() assert docs != new_docs await asyncio.sleep(10) new_docs = await DocumentTestModel.find( DocumentTestModel.test_int > 1 ).to_list() assert docs != new_docs async def test_aggregation(documents): await documents(5) docs = await DocumentTestModel.aggregate( [{"$group": {"_id": "$test_str", "total": {"$sum": "$test_int"}}}] ).to_list() await DocumentTestModel.find(DocumentTestModel.test_int > 1).set( {DocumentTestModel.test_str: "NEW_VALUE"} ) new_docs = await DocumentTestModel.aggregate( [{"$group": {"_id": "$test_str", "total": {"$sum": "$test_int"}}}] ).to_list() assert docs == new_docs new_docs = await DocumentTestModel.aggregate( [{"$group": {"_id": "$test_str", "total": {"$sum": "$test_int"}}}], ignore_cache=True, ).to_list() assert docs != new_docs await asyncio.sleep(10) new_docs = await DocumentTestModel.aggregate( [{"$group": {"_id": "$test_str", "total": {"$sum": "$test_int"}}}] ).to_list() assert docs != new_docs async def test_capacity(documents): await documents(10) docs = [] for i in range(10): docs.append( await DocumentTestModel.find_one(DocumentTestModel.test_int == i) ) await DocumentTestModel.find_one(DocumentTestModel.test_int == 1).set( {DocumentTestModel.test_str: "NEW_VALUE"} ) await DocumentTestModel.find_one(DocumentTestModel.test_int == 9).set( {DocumentTestModel.test_str: "NEW_VALUE"} ) new_doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 1) assert docs[1] != new_doc new_doc = await DocumentTestModel.find_one(DocumentTestModel.test_int == 9) assert docs[9] == new_doc python-beanie-1.29.0/tests/odm/test_concurrency.py000066400000000000000000000016531473701376500222530ustar00rootroot00000000000000import asyncio import motor.motor_asyncio from beanie import Document, init_beanie class SampleModel(Document): s: str = "TEST" i: int = 10 class SampleModel2(SampleModel): ... class SampleModel3(SampleModel2): ... class TestConcurrency: async def test_without_init(self, settings): for i in range(10): cli = motor.motor_asyncio.AsyncIOMotorClient(settings.mongodb_dsn) cli.get_io_loop = asyncio.get_running_loop db = cli[settings.mongodb_db_name] await init_beanie( db, document_models=[SampleModel3, SampleModel, SampleModel2] ) async def insert_find(): await SampleModel2().insert() docs = await SampleModel2.find(SampleModel2.i == 10).to_list() return docs await asyncio.gather(*[insert_find() for _ in range(10)]) await SampleModel2.delete_all() python-beanie-1.29.0/tests/odm/test_consistency.py000066400000000000000000000033121473701376500222540ustar00rootroot00000000000000from beanie.odm.operators.update.general import Set from tests.odm.models import ( DocumentTestModel, DocumentWithTimeStampToTestConsistency, ) class TestResponseOfTheChangingOperations: async def test_insert(self, document_not_inserted): result = await document_not_inserted.insert() assert isinstance(result, DocumentTestModel) async def test_update(self, document): result = await document.update(Set({"test_int": 43})) assert isinstance(result, DocumentTestModel) async def test_save(self, document, document_not_inserted): document.test_int = 43 result = await document.save() assert isinstance(result, DocumentTestModel) document_not_inserted.test_int = 43 result = await document_not_inserted.save() assert isinstance(result, DocumentTestModel) async def test_save_changes(self, document): document.test_int = 43 result = await document.save_changes() assert isinstance(result, DocumentTestModel) async def test_replace(self, document): result = await document.replace() assert isinstance(result, DocumentTestModel) async def test_set(self, document): result = await document.set({"test_int": 43}) assert isinstance(result, DocumentTestModel) async def test_inc(self, document): result = await document.inc({"test_int": 1}) assert isinstance(result, DocumentTestModel) async def test_current_date(self): document = DocumentWithTimeStampToTestConsistency() await document.insert() result = await document.current_date({"ts": True}) assert isinstance(result, DocumentWithTimeStampToTestConsistency) python-beanie-1.29.0/tests/odm/test_cursor.py000066400000000000000000000005561473701376500212370ustar00rootroot00000000000000from tests.odm.models import DocumentTestModel async def test_to_list(documents): await documents(10) result = await DocumentTestModel.find_all().to_list() assert len(result) == 10 async def test_async_for(documents): await documents(10) async for document in DocumentTestModel.find_all(): assert document.test_int in list(range(10)) python-beanie-1.29.0/tests/odm/test_deprecated.py000066400000000000000000000006401473701376500220140ustar00rootroot00000000000000import pytest from beanie import init_beanie from beanie.exceptions import Deprecation from tests.odm.models import DocWithCollectionInnerClass class TestDeprecations: async def test_doc_with_inner_collection_class_init(self, db): with pytest.raises(Deprecation): await init_beanie( database=db, document_models=[DocWithCollectionInnerClass], ) python-beanie-1.29.0/tests/odm/test_encoder.py000066400000000000000000000142201473701376500213320ustar00rootroot00000000000000import re from datetime import date, datetime from enum import Enum from uuid import uuid4 import pytest from bson import Binary, Regex from pydantic import AnyUrl from beanie.odm.utils.encoder import Encoder from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import ( BsonRegexDoc, Child, DictEnum, DocumentForEncodingTest, DocumentForEncodingTestDate, DocumentWithComplexDictKey, DocumentWithDecimalField, DocumentWithEnumKeysDict, DocumentWithHttpUrlField, DocumentWithKeepNullsFalse, DocumentWithStringField, ModelWithOptionalField, NativeRegexDoc, SampleWithMutableObjects, ) async def test_encode_datetime(): assert isinstance(Encoder().encode(datetime.now()), datetime) doc = DocumentForEncodingTest(datetime_field=datetime.now()) await doc.insert() new_doc = await DocumentForEncodingTest.get(doc.id) assert isinstance(new_doc.datetime_field, datetime) async def test_encode_date(): assert isinstance(Encoder().encode(datetime.now()), datetime) doc = DocumentForEncodingTestDate() await doc.insert() new_doc = await DocumentForEncodingTestDate.get(doc.id) assert new_doc.date_field == doc.date_field assert isinstance(new_doc.date_field, date) async def test_encode_regex(): raw_regex = r"^AA.*CC$" case_sensitive_regex = re.compile(raw_regex) case_insensitive_regex = re.compile(raw_regex, re.I) assert isinstance(Encoder().encode(case_sensitive_regex), Regex) assert isinstance(Encoder().encode(case_insensitive_regex), Regex) matching_doc = DocumentWithStringField(string_field="AABBCC") ignore_case_matching_doc = DocumentWithStringField(string_field="aabbcc") non_matching_doc = DocumentWithStringField(string_field="abc") for doc in (matching_doc, ignore_case_matching_doc, non_matching_doc): await doc.insert() assert {matching_doc.id, ignore_case_matching_doc.id} == { doc.id async for doc in DocumentWithStringField.find( DocumentWithStringField.string_field == case_insensitive_regex ) } assert {matching_doc.id} == { doc.id async for doc in DocumentWithStringField.find( DocumentWithStringField.string_field == case_sensitive_regex ) } def test_encode_with_custom_encoder(): assert isinstance( Encoder(custom_encoders={datetime: str}).encode(datetime.now()), str ) async def test_bytes(): encoded_b = Encoder().encode(b"test") assert isinstance(encoded_b, Binary) assert encoded_b.subtype == 0 doc = DocumentForEncodingTest(bytes_field=b"test") await doc.insert() new_doc = await DocumentForEncodingTest.get(doc.id) assert isinstance(new_doc.bytes_field, bytes) async def test_bytes_already_binary(): b = Binary(b"123", 3) encoded_b = Encoder().encode(b) assert isinstance(encoded_b, Binary) assert encoded_b.subtype == 3 async def test_mutable_objects_on_save(): instance = SampleWithMutableObjects( d={"Bar": Child(child_field="Foo")}, lst=[Child(child_field="Bar")] ) await instance.save() assert isinstance(instance.d["Bar"], Child) assert isinstance(instance.lst[0], Child) async def test_decimal(): test_amts = DocumentWithDecimalField(amt=1, other_amt=2) await test_amts.insert() obj = await DocumentWithDecimalField.get(test_amts.id) assert obj.amt == 1 assert obj.other_amt == 2 test_amts.amt = 6 await test_amts.save_changes() obj = await DocumentWithDecimalField.get(test_amts.id) assert obj.amt == 6 test_amts = (await DocumentWithDecimalField.find_all().to_list())[0] test_amts.other_amt = 7 await test_amts.save_changes() obj = await DocumentWithDecimalField.get(test_amts.id) assert obj.other_amt == 7 def test_keep_nulls_false(): model = ModelWithOptionalField(i=10) doc = DocumentWithKeepNullsFalse(m=model) encoder = Encoder(keep_nulls=False, to_db=True) encoded_doc = encoder.encode(doc) assert encoded_doc == {"m": {"i": 10}} @pytest.mark.skipif(not IS_PYDANTIC_V2, reason="Test only for Pydantic v2") def test_should_encode_pydantic_v2_url_correctly(): url = AnyUrl("https://example.com") encoder = Encoder() encoded_url = encoder.encode(url) assert isinstance(encoded_url, str) # pydantic2 add trailing slash for naked url. see https://github.com/pydantic/pydantic/issues/6943 assert encoded_url == "https://example.com/" async def test_should_be_able_to_save_retrieve_doc_with_url(): doc = DocumentWithHttpUrlField(url_field="https://example.com") assert isinstance(doc.url_field, AnyUrl) await doc.save() new_doc = await DocumentWithHttpUrlField.find_one( DocumentWithHttpUrlField.id == doc.id ) assert isinstance(new_doc.url_field, AnyUrl) assert new_doc.url_field == doc.url_field async def test_dict_with_complex_key(): assert isinstance(Encoder().encode({uuid4(): datetime.now()}), dict) uuid = uuid4() # reset microseconds, because it looses by mongo dt = datetime.now().replace(microsecond=0) doc = DocumentWithComplexDictKey(dict_field={uuid: dt}) await doc.insert() new_doc = await DocumentWithComplexDictKey.get(doc.id) assert isinstance(new_doc.dict_field, dict) assert new_doc.dict_field.get(uuid) == dt async def test_dict_with_enum_keys(): doc = DocumentWithEnumKeysDict(color={DictEnum.RED: "favorite"}) await doc.save() assert isinstance(doc.color, dict) for key in doc.color: assert isinstance(key, Enum) assert key == DictEnum.RED async def test_native_regex(): regex = re.compile(r"^1?$|^(11+?)\1+$", (re.I | re.M | re.S) ^ re.UNICODE) doc = await NativeRegexDoc(regex=regex).insert() new_doc = await NativeRegexDoc.get(doc.id) assert new_doc.regex == regex assert new_doc.regex.pattern == r"^1?$|^(11+?)\1+$" assert new_doc.regex.flags == int(re.I | re.M | re.S ^ re.UNICODE) async def test_bson_regex(): regex = Regex(r"^1?$|^(11+?)\1+$") doc = await BsonRegexDoc(regex=regex).insert() new_doc = await BsonRegexDoc.get(doc.id) assert new_doc.regex == Regex(pattern=r"^1?$|^(11+?)\1+$") python-beanie-1.29.0/tests/odm/test_expression_fields.py000066400000000000000000000043311473701376500234420ustar00rootroot00000000000000from beanie.odm.enums import SortDirection from beanie.odm.operators.find.comparison import In, NotIn from tests.odm.models import Sample def test_nesting(): assert Sample.id == "_id" q = Sample.find_many(Sample.integer == 1) assert q.get_filter_query() == {"integer": 1} assert Sample.integer == "integer" q = Sample.find_many(Sample.nested.integer == 1) assert q.get_filter_query() == {"nested.integer": 1} assert Sample.nested.integer == "nested.integer" q = Sample.find_many(Sample.union.s == "test") assert q.get_filter_query() == {"union.s": "test"} assert Sample.union.s == "union.s" q = Sample.find_many(Sample.nested.optional == None) # noqa assert q.get_filter_query() == {"nested.optional": None} assert Sample.nested.optional == "nested.optional" q = Sample.find_many(Sample.nested.integer == 1).find_many( Sample.nested.union.s == "test" ) assert q.get_filter_query() == { "$and": [{"nested.integer": 1}, {"nested.union.s": "test"}] } def test_eq(): q = Sample.find_many(Sample.integer == 1) assert q.get_filter_query() == {"integer": 1} def test_gt(): q = Sample.find_many(Sample.integer > 1) assert q.get_filter_query() == {"integer": {"$gt": 1}} def test_gte(): q = Sample.find_many(Sample.integer >= 1) assert q.get_filter_query() == {"integer": {"$gte": 1}} def test_in(): q = Sample.find_many(In(Sample.integer, [1, 2, 3, 4])) assert dict(q.get_filter_query()) == {"integer": {"$in": [1, 2, 3, 4]}} def test_lt(): q = Sample.find_many(Sample.integer < 1) assert q.get_filter_query() == {"integer": {"$lt": 1}} def test_lte(): q = Sample.find_many(Sample.integer <= 1) assert q.get_filter_query() == {"integer": {"$lte": 1}} def test_ne(): q = Sample.find_many(Sample.integer != 1) assert q.get_filter_query() == {"integer": {"$ne": 1}} def test_nin(): q = Sample.find_many(NotIn(Sample.integer, [1, 2, 3, 4])) assert dict(q.get_filter_query()) == {"integer": {"$nin": [1, 2, 3, 4]}} def test_pos(): q = +Sample.integer assert q == ("integer", SortDirection.ASCENDING) def test_neg(): q = -Sample.integer assert q == ("integer", SortDirection.DESCENDING) python-beanie-1.29.0/tests/odm/test_fields.py000066400000000000000000000137021473701376500211650ustar00rootroot00000000000000import datetime from decimal import Decimal from pathlib import Path from typing import AbstractSet, Mapping from uuid import uuid4 import pytest from pydantic import BaseModel, ValidationError from beanie import Document from beanie.exceptions import CollectionWasNotInitialized from beanie.odm.fields import PydanticObjectId from beanie.odm.utils.dump import get_dict from beanie.odm.utils.encoder import Encoder from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import ( DocumentTestModel, DocumentTestModelIndexFlagsAnnotated, DocumentWithBsonEncodersFiledsTypes, DocumentWithCustomFiledsTypes, DocumentWithDeprecatedHiddenField, Sample, ) class M(BaseModel): p: PydanticObjectId def test_pydantic_object_id_wrong_input(): with pytest.raises(ValidationError): M(p="test") def test_pydantic_object_id_bytes_input(): p = PydanticObjectId() m = M(p=str(p).encode("utf-8")) assert m.p == p with pytest.raises(ValidationError): M(p=b"test") async def test_bson_encoders_filed_types(): custom = DocumentWithBsonEncodersFiledsTypes( color="7fffd4", timestamp=datetime.datetime.now(tz=datetime.timezone.utc), ) encoded = get_dict(custom) assert isinstance(encoded["timestamp"], str) c = await custom.insert() c_fromdb = await DocumentWithBsonEncodersFiledsTypes.get(c.id) assert c_fromdb.color.as_hex() == c.color.as_hex() assert isinstance(c_fromdb.timestamp, datetime.datetime) assert c_fromdb.timestamp, custom.timestamp async def test_custom_filed_types(): custom1 = DocumentWithCustomFiledsTypes( color="#753c38", decimal=500, secret_bytes=b"secret_bytes", secret_string="super_secret_password", ipv4address="127.0.0.1", ipv4interface="192.0.2.5/24", ipv4network="192.0.2.0/24", ipv6address="::abc:7:def", ipv6interface="2001:db00::2/24", ipv6network="2001:db00::0/24", timedelta=4782453, set_type={"one", "two", "three"}, tuple_type=tuple([3, "string"]), path="/etc/hosts", ) custom2 = DocumentWithCustomFiledsTypes( color="magenta", decimal=Decimal("3.14") + Decimal(10) ** Decimal(-18), secret_bytes=b"secret_bytes", secret_string="super_secret_password", ipv4address="127.0.0.1", ipv4interface="192.0.2.5/24", ipv4network="192.0.2.0/24", ipv6address="::abc:7:def", ipv6interface="2001:db00::2/24", ipv6network="2001:db00::0/24", timedelta=4782453, set_type=["one", "two", "three"], tuple_type=[3, "three"], path=Path("C:\\Windows"), ) c1 = await custom1.insert() c2 = await custom2.insert() c1_fromdb = await DocumentWithCustomFiledsTypes.get(c1.id) c2_fromdb = await DocumentWithCustomFiledsTypes.get(c2.id) assert set(c1_fromdb.set_type) == set(c1.set_type) assert set(c2_fromdb.set_type) == set(c2.set_type) c1_fromdb.set_type = c2_fromdb.set_type = c1.set_type = c2.set_type = None c1_fromdb.revision_id = None c2_fromdb.revision_id = None c1_encoded = Encoder().encode(c1) c1_fromdb_encoded = Encoder().encode(c1_fromdb) c2_encoded = Encoder().encode(c2) c2_fromdb_encoded = Encoder().encode(c2_fromdb) assert c1_fromdb_encoded == c1_encoded assert c2_fromdb_encoded == c2_encoded assert Decimal(str(custom1.decimal)) == Decimal( str(c1_encoded.get("decimal")) ) assert Decimal(str(custom2.decimal)) == Decimal( str(c2_encoded.get("decimal")) ) async def test_excluded(document): document = await DocumentTestModel.find_one() if IS_PYDANTIC_V2: assert "test_list" not in document.model_dump() else: assert "test_list" not in document.dict() async def test_hidden(deprecated_init_beanie): document = DocumentWithDeprecatedHiddenField(test_hidden=["abc", "def"]) await document.insert() document = await DocumentWithDeprecatedHiddenField.find_one() if IS_PYDANTIC_V2: assert "test_hidden" not in document.model_dump() else: assert "test_hidden" not in document.dict() def test_revision_id_not_in_schema(): """Check if there is a `revision_id` slipping into the schema.""" class Foo(Document): """Dummy document.""" bar: int = 3 if IS_PYDANTIC_V2: schema = Foo.model_json_schema() else: schema = Foo.schema() assert "revision_id" not in schema["properties"] # check that the document has not been initialized, # as otherwise the `revision_id` is normally gone from the schema. with pytest.raises(CollectionWasNotInitialized): Foo.get_settings() @pytest.mark.parametrize("exclude", [{"test_int"}, {"test_doc": {"test_int"}}]) async def test_param_exclude(document, exclude): document = await DocumentTestModel.find_one() if IS_PYDANTIC_V2: doc_dict = document.model_dump(exclude=exclude) else: doc_dict = document.dict(exclude=exclude) if isinstance(exclude, AbstractSet): for k in exclude: assert k not in doc_dict elif isinstance(exclude, Mapping): for k, v in exclude.items(): if isinstance(v, bool) and v: assert k not in doc_dict elif isinstance(v, AbstractSet): for another_k in v: assert another_k not in doc_dict[k] def test_expression_fields(): assert Sample.nested.integer == "nested.integer" assert Sample.nested["integer"] == "nested.integer" def test_indexed_field() -> None: """Test that fields can be declared and instantiated with Indexed() and Annotated[..., Indexed()].""" # No error should be raised the document is properly initialized # and `Indexed` is implemented correctly. DocumentTestModelIndexFlagsAnnotated( str_index="test", str_index_annotated="test", uuid_index=uuid4(), uuid_index_annotated=uuid4(), ) python-beanie-1.29.0/tests/odm/test_id.py000066400000000000000000000031171473701376500203120ustar00rootroot00000000000000from uuid import UUID import pytest from pydantic import BaseModel from beanie import PydanticObjectId from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import DocumentWithCustomIdInt, DocumentWithCustomIdUUID class A(BaseModel): id: PydanticObjectId async def test_uuid_id(): doc = DocumentWithCustomIdUUID(name="TEST") await doc.insert() new_doc = await DocumentWithCustomIdUUID.get(doc.id) assert isinstance(new_doc.id, UUID) async def test_integer_id(): doc = DocumentWithCustomIdInt(name="TEST", id=1) await doc.insert() new_doc = await DocumentWithCustomIdInt.get(doc.id) assert isinstance(new_doc.id, int) @pytest.mark.skipif( not IS_PYDANTIC_V2, reason="supports only pydantic v2", ) async def test_pydantic_object_id_validation_json(): deserialized = A.model_validate_json('{"id": "5eb7cf5a86d9755df3a6c593"}') assert isinstance(deserialized.id, PydanticObjectId) assert str(deserialized.id) == "5eb7cf5a86d9755df3a6c593" assert deserialized.id == PydanticObjectId("5eb7cf5a86d9755df3a6c593") @pytest.mark.skipif( not IS_PYDANTIC_V2, reason="supports only pydantic v2", ) @pytest.mark.parametrize( "data", [ "5eb7cf5a86d9755df3a6c593", PydanticObjectId("5eb7cf5a86d9755df3a6c593"), ], ) async def test_pydantic_object_id_serialization(data): deserialized = A(**{"id": data}) assert isinstance(deserialized.id, PydanticObjectId) assert str(deserialized.id) == "5eb7cf5a86d9755df3a6c593" assert deserialized.id == PydanticObjectId("5eb7cf5a86d9755df3a6c593") python-beanie-1.29.0/tests/odm/test_json_schema_generation.py000066400000000000000000000131331473701376500244210ustar00rootroot00000000000000from uuid import uuid4 import pytest from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import ( DocumentWithBackLink, DocumentWithDecimalField, DocumentWithIndexedObjectId, DocumentWithLink, DocumentWithListBackLink, DocumentWithListLink, DocumentWithOptionalLink, ) def test_schema_export_of_model_with_decimal_field(): doc = DocumentWithDecimalField(amt=0.1, other_amt=3.5) if IS_PYDANTIC_V2: json_schema = doc.model_json_schema() assert json_schema["properties"]["amt"]["anyOf"][0]["type"] == "number" assert json_schema["properties"]["amt"]["anyOf"][1]["type"] == "string" assert ( json_schema["properties"]["other_amt"]["anyOf"][0]["type"] == "number" ) assert ( json_schema["properties"]["other_amt"]["anyOf"][1]["type"] == "string" ) else: json_schema = doc.schema() assert json_schema["properties"]["amt"]["type"] == "number" assert json_schema["properties"]["other_amt"]["type"] == "number" def test_schema_export_of_model_with_pydanticobjectid(): doc = DocumentWithIndexedObjectId( pyid="5f8d0a8b0b7e3a1e4c9f4b1e", uuid=uuid4(), email="test@test.com" ) if IS_PYDANTIC_V2: json_schema = doc.model_json_schema() assert json_schema["properties"]["_id"]["anyOf"][0]["type"] == "string" assert json_schema["properties"]["pyid"]["type"] == "string" else: json_schema = doc.schema() assert json_schema["properties"]["_id"]["type"] == "string" assert json_schema["properties"]["pyid"]["type"] == "string" def test_schema_export_of_model_with_link(): if IS_PYDANTIC_V2: json_schema = DocumentWithLink.model_json_schema() link_alternate_representation = json_schema["properties"]["link"][ "anyOf" ] else: json_schema = DocumentWithLink.schema() link_alternate_representation = json_schema["definitions"][ "DocumentWithLink" ]["properties"]["link"]["anyOf"] assert link_alternate_representation[0]["type"] == "object" assert link_alternate_representation[1]["type"] == "object" @pytest.mark.skipif( not IS_PYDANTIC_V2, reason="schema dumping support is more complete with pydantic v2", ) def test_schema_export_of_model_with_optional_link(): if IS_PYDANTIC_V2: json_schema = DocumentWithOptionalLink.model_json_schema() else: json_schema = DocumentWithOptionalLink.schema() link_alternate_representation = json_schema["properties"]["link"]["anyOf"] assert link_alternate_representation[0]["type"] == "object" assert link_alternate_representation[1]["type"] == "object" assert link_alternate_representation[2]["type"] == "null" def test_schema_export_of_model_with_list_link(): if IS_PYDANTIC_V2: json_schema = DocumentWithListLink.model_json_schema() link_alternate_representation = json_schema["properties"]["link"][ "items" ]["anyOf"] link_definition = json_schema["properties"]["link"]["type"] else: json_schema = DocumentWithListLink.schema() link_alternate_representation = json_schema["definitions"][ "DocumentWithListLink" ]["properties"]["link"]["items"]["anyOf"] link_definition = json_schema["definitions"]["DocumentWithListLink"][ "properties" ]["link"]["type"] assert link_definition == "array" assert link_alternate_representation[0]["type"] == "object" assert link_alternate_representation[1]["type"] == "object" def test_schema_export_of_model_with_backlink(): if IS_PYDANTIC_V2: json_schema = DocumentWithBackLink.model_json_schema() back_link_definition = json_schema["properties"]["back_link"]["type"] else: json_schema = DocumentWithBackLink.schema() back_link_definition = json_schema["definitions"][ "DocumentWithBackLink" ]["properties"]["back_link"]["anyOf"][1]["type"] assert back_link_definition == "object" def test_schema_export_of_model_with_list_backlink(): if IS_PYDANTIC_V2: json_schema = DocumentWithListBackLink.model_json_schema() assert json_schema["properties"]["back_link"]["type"] == "array" assert ( json_schema["properties"]["back_link"]["items"]["type"] == "object" ) else: json_schema = DocumentWithListBackLink.schema() assert ( json_schema["definitions"]["DocumentWithListBackLink"][ "properties" ]["back_link"]["type"] == "array" ) assert ( json_schema["definitions"]["DocumentWithListBackLink"][ "properties" ]["back_link"]["items"]["anyOf"][1]["type"] == "object" ) @pytest.mark.skipif( not IS_PYDANTIC_V2, reason="schema dumping support is more complete with pydantic v2", ) @pytest.mark.parametrize( "model_type", [ DocumentWithDecimalField, DocumentWithIndexedObjectId, DocumentWithLink, DocumentWithOptionalLink, DocumentWithListLink, DocumentWithBackLink, DocumentWithListBackLink, ], ) def test_json_serialization_of_model(model_type): validation_schema = model_type.model_json_schema(mode="serialization") assert validation_schema is not None assert isinstance(validation_schema, dict) python-beanie-1.29.0/tests/odm/test_lazy_parsing.py000066400000000000000000000035701473701376500224230ustar00rootroot00000000000000import pytest from beanie.odm.utils.dump import get_dict from beanie.odm.utils.parsing import parse_obj from tests.odm.models import SampleLazyParsing @pytest.fixture async def docs(): for i in range(10): await SampleLazyParsing(i=i, s=str(i)).insert() class TestLazyParsing: async def test_find_all(self, docs): found_docs = await SampleLazyParsing.all(lazy_parse=True).to_list() saved_state = found_docs[0].get_saved_state() assert "_id" in saved_state del saved_state["_id"] assert found_docs[0].get_saved_state() == {} assert found_docs[0].i == 0 assert found_docs[0].s == "0" assert found_docs[1]._store["i"] == 1 assert found_docs[1]._store["s"] == "1" assert get_dict(found_docs[2])["i"] == 2 assert get_dict(found_docs[2])["s"] == "2" async def test_find_many(self, docs): found_docs = await SampleLazyParsing.find( SampleLazyParsing.i <= 5, lazy_parse=True ).to_list() saved_state = found_docs[0].get_saved_state() assert "_id" in saved_state del saved_state["_id"] assert found_docs[0].get_saved_state() == {} assert found_docs[0].i == 0 assert found_docs[0].s == "0" assert found_docs[1]._store["i"] == 1 assert found_docs[1]._store["s"] == "1" assert get_dict(found_docs[2])["i"] == 2 assert get_dict(found_docs[2])["s"] == "2" async def test_save_changes(self, docs): found_docs = await SampleLazyParsing.all(lazy_parse=True).to_list() doc = found_docs[0] doc.i = 1000 await doc.save_changes() new_doc = await SampleLazyParsing.find_one(SampleLazyParsing.s == "0") assert new_doc.i == 1000 async def test_default_list(self): res = parse_obj(SampleLazyParsing, {"i": "1", "s": "1"}) assert res.lst == [] python-beanie-1.29.0/tests/odm/test_relations.py000066400000000000000000000771161473701376500217300ustar00rootroot00000000000000from typing import List import pytest from pydantic.fields import Field from beanie import Document, init_beanie from beanie.exceptions import DocumentWasNotSaved from beanie.odm.fields import ( BackLink, DeleteRules, Link, WriteRules, ) from beanie.odm.utils.pydantic import ( IS_PYDANTIC_V2, get_model_fields, parse_model, ) from beanie.operators import In, Or from tests.odm.models import ( AddressView, ADocument, BDocument, DocumentToBeLinked, DocumentWithBackLink, DocumentWithBackLinkForNesting, DocumentWithLink, DocumentWithLinkForNesting, DocumentWithListBackLink, DocumentWithListLink, DocumentWithListOfLinks, DocumentWithTextIndexAndLink, Door, House, LinkDocumentForTextSeacrh, Lock, LongSelfLink, LoopedLinksA, LoopedLinksB, Region, Roof, RootDocument, SelfLinked, UsersAddresses, Window, Yard, ) def lock_not_inserted_fn(): return Lock(k=10) @pytest.fixture def locks_not_inserted(): return [Lock(k=10001), Lock(k=20002)] @pytest.fixture def window_not_inserted(): return Window(x=10, y=10, lock=lock_not_inserted_fn()) @pytest.fixture def windows_not_inserted(): return [ Window( x=10, y=10, lock=lock_not_inserted_fn(), ), Window( x=11, y=11, lock=lock_not_inserted_fn(), ), ] @pytest.fixture def door_not_inserted(window_not_inserted, locks_not_inserted): return Door(t=10, window=window_not_inserted, locks=locks_not_inserted) @pytest.fixture def house_not_inserted(windows_not_inserted, door_not_inserted): return House( windows=windows_not_inserted, door=door_not_inserted, name="test" ) @pytest.fixture async def house(house_not_inserted): return await house_not_inserted.insert(link_rule=WriteRules.WRITE) @pytest.fixture async def houses(): for i in range(10): roof = Roof() if i % 2 == 0 else None if i % 2 == 0: yards = [Yard(v=10, w=10 + i), Yard(v=11, w=10 + i)] else: yards = None house = await House( door=Door( t=i, window=Window(x=20, y=21 + i, lock=Lock(k=20 + i)) if i % 2 == 0 else None, locks=[Lock(k=20 + i)], ), windows=[ Window(x=10, y=10 + i, lock=Lock(k=10 + i)), Window(x=11, y=11 + i, lock=Lock(k=11 + i)), ], yards=yards, roof=roof, name="test", height=i, ).insert(link_rule=WriteRules.WRITE) if i == 9: await house.windows[0].delete() await house.windows[1].lock.delete() await house.door.delete() class TestInsert: async def test_rule_do_nothing(self, house_not_inserted): with pytest.raises(DocumentWasNotSaved): await house_not_inserted.insert() async def test_rule_write(self, house_not_inserted): await house_not_inserted.insert(link_rule=WriteRules.WRITE) locks = await Lock.all().to_list() assert len(locks) == 5 windows = await Window.all().to_list() assert len(windows) == 3 doors = await Door.all().to_list() assert len(doors) == 1 houses = await House.all().to_list() assert len(houses) == 1 async def test_insert_with_link( self, house_not_inserted, door_not_inserted, window_not_inserted, locks_not_inserted, ): lock_links = [] for lock in locks_not_inserted: lock = await lock.insert() link = Lock.link_from_id(lock.id) lock_links.append(link) door_not_inserted.locks = lock_links door_window_lock = await lock_not_inserted_fn().insert() door_window_lock_link = Lock.link_from_id(door_window_lock.id) window_not_inserted.lock = door_window_lock_link door_window = await window_not_inserted.insert() door_window_link = Window.link_from_id(door_window.id) door_not_inserted.window = door_window_link door = await door_not_inserted.insert() door_link = Door.link_from_id(door.id) house_not_inserted.door = door_link house = parse_model(House, house_not_inserted) await house.insert(link_rule=WriteRules.WRITE) if IS_PYDANTIC_V2: json_str = house.model_dump_json() else: json_str = house.json() assert json_str is not None async def test_multi_insert_links(self): house = House(name="random", windows=[], door=Door()) window = await Window(x=13, y=23).insert() assert window.id house.windows.append(window) house = await house.insert(link_rule=WriteRules.WRITE) new_window_1 = Window(x=11, y=22) assert new_window_1.id is None house.windows.append(new_window_1) new_window_2 = Window(x=12, y=23) assert new_window_2.id is None house.windows.append(new_window_2) await house.save(link_rule=WriteRules.WRITE) for win in house.windows: assert isinstance(win, Window) assert win.id assert new_window_1.id is not None assert new_window_2.id is not None async def test_fetch_after_insert(self, house_not_inserted): # TODO: what is the point of this test if nothing was inserted to DB? await house_not_inserted.fetch_all_links() class TestFind: async def test_prefetch_find_many(self, houses): items = await House.find(House.height > 2).sort(House.height).to_list() assert len(items) == 7 for window in items[0].windows: assert isinstance(window, Link) assert items[0].yards is None for yard in items[1].yards: assert isinstance(yard, Link) assert isinstance(items[0].door, Link) assert items[0].roof is None assert isinstance(items[1].roof, Link) items = ( await House.find(House.height > 2, fetch_links=True) .sort(House.height) .to_list() ) assert len(items) == 7 for window in items[0].windows: assert isinstance(window, Window) assert isinstance(window.lock, Lock) assert items[0].yards == [] for yard in items[1].yards: assert isinstance(yard, Yard) assert isinstance(items[0].door, Door) assert isinstance(items[1].door.window, Window) assert items[0].door.window is None assert isinstance(items[1].door.window.lock, Lock) for lock in items[0].door.locks: assert isinstance(lock, Lock) assert items[0].roof is None assert isinstance(items[1].roof, Roof) houses = await House.find_many( House.height == 9, fetch_links=True ).to_list() assert len(houses[0].windows) == 1 assert isinstance(houses[0].windows[0].lock, Link) assert isinstance(houses[0].door, Link) await houses[0].fetch_link(House.door) assert isinstance(houses[0].door, Link) houses = await House.find_many( House.door.t > 5, fetch_links=True ).to_list() assert len(houses) == 3 houses = await House.find_many( House.windows.y == 15, fetch_links=True ).to_list() assert len(houses) == 2 houses = await House.find_many( House.height > 5, limit=3, fetch_links=True ).to_list() assert len(houses) == 3 async def test_prefect_count(self, houses): c = await House.find(House.door.t > 5, fetch_links=True).count() assert c == 3 c = await House.find_one(House.door.t > 5, fetch_links=True).count() assert c == 3 async def test_prefetch_find_one(self, house): house = await House.find_one(House.name == "test") for window in house.windows: assert isinstance(window, Link) assert isinstance(house.door, Link) house = await House.find_one(House.name == "test", fetch_links=True) for window in house.windows: assert isinstance(window, Window) assert isinstance(house.door, Door) house = await House.get(house.id, fetch_links=True) for window in house.windows: assert isinstance(window, Window) assert isinstance(house.door, Door) async def test_fetch(self, house): house = await House.find_one(House.name == "test") for window in house.windows: assert isinstance(window, Link) assert isinstance(house.door, Link) await house.fetch_all_links() for window in house.windows: assert isinstance(window, Window) assert isinstance(window.lock, Lock) assert isinstance(house.door, Door) assert isinstance(house.door.window, Window) for lock in house.door.locks: assert isinstance(lock, Lock) house = await House.find_one(House.name == "test") assert isinstance(house.door, Link) await house.fetch_link(House.door) assert isinstance(house.door, Door) assert isinstance(house.door.window, Window) for lock in house.door.locks: assert isinstance(lock, Lock) for window in house.windows: assert isinstance(window, Link) await house.fetch_link(House.windows) for window in house.windows: assert isinstance(window, Window) assert isinstance(window.lock, Lock) async def test_find_by_id_of_the_linked_docs(self, house): house_lst_1 = await House.find( House.door.id == house.door.id ).to_list() house_lst_2 = await House.find( House.door.id == house.door.id, fetch_links=True ).to_list() assert len(house_lst_1) == 1 assert len(house_lst_2) == 1 house_1 = await House.find_one(House.door.id == house.door.id) house_2 = await House.find_one( House.door.id == house.door.id, fetch_links=True ) assert house_1 is not None assert house_2 is not None async def test_find_by_id_list_of_the_linked_docs(self, houses): items = ( await House.find(House.height < 3, fetch_links=True) .sort(House.height) .to_list() ) assert len(items) == 3 house_lst_1 = await House.find( Or( House.door.id == items[0].door.id, In(House.door.id, [items[1].door.id, items[2].door.id]), ) ).to_list() house_lst_2 = await House.find( Or( House.door.id == items[0].door.id, In(House.door.id, [items[1].door.id, items[2].door.id]), ), fetch_links=True, ).to_list() assert len(house_lst_1) == 3 assert len(house_lst_2) == 3 async def test_fetch_list_with_some_prefetched(self): docs = [] for i in range(10): doc = DocumentToBeLinked() await doc.save() docs.append(doc) doc_with_links = DocumentWithListOfLinks(links=docs) await doc_with_links.save() doc_with_links = await DocumentWithListOfLinks.get( doc_with_links.id, fetch_links=False ) doc_with_links.links[-1] = await doc_with_links.links[-1].fetch() await doc_with_links.fetch_all_links() for link in doc_with_links.links: assert isinstance(link, DocumentToBeLinked) assert len(doc_with_links.links) == 10 # test order for i in range(10): assert doc_with_links.links[i].id == docs[i].id async def test_text_search(self): doc = DocumentWithTextIndexAndLink( s="hello world", link=LinkDocumentForTextSeacrh(i=1) ) await doc.insert(link_rule=WriteRules.WRITE) doc2 = DocumentWithTextIndexAndLink( s="hi world", link=LinkDocumentForTextSeacrh(i=2) ) await doc2.insert(link_rule=WriteRules.WRITE) docs = await DocumentWithTextIndexAndLink.find( {"$text": {"$search": "hello"}}, fetch_links=True ).to_list() assert len(docs) == 1 async def test_self_nesting_find_parameters(self): self_linked_doc = LongSelfLink() await self_linked_doc.insert(link_rule=WriteRules.WRITE) self_linked_doc.link = self_linked_doc await self_linked_doc.save() self_linked_doc = await LongSelfLink.find_one( nesting_depth=4, fetch_links=True ) assert self_linked_doc.link.link.link.link.id == self_linked_doc.id assert isinstance(self_linked_doc.link.link.link.link.link, Link) self_linked_doc = await LongSelfLink.find_one( nesting_depth=0, fetch_links=True ) assert isinstance(self_linked_doc.link, Link) async def test_nesting_find_parameters(self): back_link_doc = DocumentWithBackLinkForNesting(i=1) await back_link_doc.insert() link_doc = DocumentWithLinkForNesting(link=back_link_doc, s="TEST") await link_doc.insert() doc = await DocumentWithBackLinkForNesting.find_one( DocumentWithBackLinkForNesting.i == 1, fetch_links=True, nesting_depths_per_field={"back_link": 2}, ) assert doc.back_link.link.id == doc.id assert isinstance(doc.back_link.link.back_link, BackLink) class TestReplace: async def test_do_nothing(self, house): house.door.t = 100 await house.replace() new_house = await House.get(house.id, fetch_links=True) assert new_house.door.t == 10 async def test_write(self, house): house.door.t = 100 await house.replace(link_rule=WriteRules.WRITE) new_house = await House.get(house.id, fetch_links=True) assert new_house.door.t == 100 class TestSave: async def test_do_nothing(self, house): house.door.t = 100 await house.save() new_house = await House.get(house.id, fetch_links=True) assert new_house.door.t == 10 async def test_write(self, house): house.door.t = 100 new_window = Window(x=100, y=100, lock=Lock(k=100)) house.windows = [new_window] assert new_window.id is None await house.save(link_rule=WriteRules.WRITE) new_house = await House.get(house.id, fetch_links=True) assert new_house.door.t == 100 for window in new_house.windows: assert window.x == 100 assert window.y == 100 assert window.id is not None assert isinstance(window.lock, Lock) assert window.lock.k == 100 assert window.lock.id is not None assert new_window.id is not None class TestDelete: async def test_do_nothing(self, house): await house.delete() door = await Door.get(house.door.id) assert door is not None windows = await Window.all().to_list() assert windows is not None locks = await Lock.all().to_list() assert locks is not None async def test_delete_links(self, house): await house.delete(link_rule=DeleteRules.DELETE_LINKS) door = await Door.get(house.door.id) assert door is None windows = await Window.all().to_list() assert windows == [] locks = await Lock.all().to_list() assert locks == [] class TestOther: async def test_query_composition(self): SYS = {"id", "revision_id"} # Simple fields are initialized using the pydantic model_fields internal property # such fields are properly isolated when multi inheritance is involved. assert set(get_model_fields(RootDocument).keys()) == SYS | { "name", "link_root", } assert set(get_model_fields(ADocument).keys()) == SYS | { "name", "link_root", "surname", "link_a", } assert set(get_model_fields(BDocument).keys()) == SYS | { "name", "link_root", "email", "link_b", } # Where Document.init_fields() has a bug that prevents proper link inheritance when parent # documents are initialized. Furthermore, some-why BDocument._link_fields are not deterministic assert set(RootDocument._link_fields.keys()) == {"link_root"} assert set(ADocument._link_fields.keys()) == {"link_root", "link_a"} assert set(BDocument._link_fields.keys()) == {"link_root", "link_b"} async def test_with_projection(self): await UsersAddresses(region_id=Region()).insert( link_rule=WriteRules.WRITE ) res = await UsersAddresses.find_one(fetch_links=True).project( AddressView ) assert res.id is not None assert res.state == "TEST" assert res.city == "TEST" async def test_self_linked(self): await SelfLinked(item=SelfLinked(s="2"), s="1").insert( link_rule=WriteRules.WRITE ) res = await SelfLinked.find_one(fetch_links=True) assert isinstance(res, SelfLinked) assert res.item is None await SelfLinked.delete_all() await SelfLinked( item=SelfLinked( item=SelfLinked(item=SelfLinked(s="4"), s="3"), s="2" ), s="1", ).insert(link_rule=WriteRules.WRITE) res = await SelfLinked.find_one(SelfLinked.s == "1", fetch_links=True) assert isinstance(res, SelfLinked) assert isinstance(res.item, SelfLinked) assert isinstance(res.item.item, SelfLinked) assert isinstance(res.item.item.item, Link) async def test_looped_links(self): await LoopedLinksA( b=LoopedLinksB( a=LoopedLinksA( b=LoopedLinksB( s="4", ), s="3", ), s="2", ), s="1", ).insert(link_rule=WriteRules.WRITE) res = await LoopedLinksA.find_one( LoopedLinksA.s == "1", fetch_links=True ) assert isinstance(res, LoopedLinksA) assert isinstance(res.b, LoopedLinksB) assert isinstance(res.b.a, LoopedLinksA) assert isinstance(res.b.a.b, Link) await LoopedLinksA( b=LoopedLinksB(s="a2"), s="a1", ).insert(link_rule=WriteRules.WRITE) res = await LoopedLinksA.find_one( LoopedLinksA.s == "a1", fetch_links=True ) assert isinstance(res, LoopedLinksA) assert isinstance(res.b, LoopedLinksB) assert res.b.a is None async def test_with_chaining_aggregation(self): region = Region() await region.insert() for i in range(10): await UsersAddresses(region_id=region).insert() region_2 = Region() await region_2.insert() for i in range(10): await UsersAddresses(region_id=region_2).insert() addresses_count = ( await UsersAddresses.find( UsersAddresses.region_id.id == region.id, fetch_links=True ) .aggregate([{"$count": "count"}]) .to_list() ) assert addresses_count[0] == {"count": 10} async def test_with_chaining_aggregation_and_text_search(self): # ARRANGE NUM_DOCS = 10 NUM_WITH_LOWER = 5 linked_document = LinkDocumentForTextSeacrh(i=1) await linked_document.insert() for i in range(NUM_DOCS): await DocumentWithTextIndexAndLink( s="lower" if i < NUM_WITH_LOWER else "UPPER", link=linked_document, ).insert() linked_document_2 = LinkDocumentForTextSeacrh(i=2) await linked_document_2.insert() for i in range(NUM_DOCS): await DocumentWithTextIndexAndLink( s="lower" if i < NUM_WITH_LOWER else "UPPER", link=linked_document_2, ).insert() # ACT query = DocumentWithTextIndexAndLink.find( {"$text": {"$search": "lower"}}, DocumentWithTextIndexAndLink.link.i == 1, fetch_links=True, ) # Test both aggregation and count methods document_count_aggregation = await query.aggregate( [{"$count": "count"}] ).to_list() document_count = await query.count() # ASSERT assert document_count_aggregation[0] == {"count": NUM_WITH_LOWER} assert document_count == NUM_WITH_LOWER async def test_with_extra_allow(self, houses): res = await House.find(fetch_links=True).to_list() assert get_model_fields(res[0]).keys() == { "id", "revision_id", "windows", "door", "roof", "yards", "name", "height", } res = await House.find_one(fetch_links=True) assert get_model_fields(res).keys() == { "id", "revision_id", "windows", "door", "roof", "yards", "name", "height", } @pytest.fixture() async def link_and_backlink_doc_pair(): back_link_doc = DocumentWithBackLink() await back_link_doc.insert() link_doc = DocumentWithLink(link=back_link_doc) await link_doc.insert() return link_doc, back_link_doc @pytest.fixture() async def list_link_and_list_backlink_doc_pair(): back_link_doc = DocumentWithListBackLink() await back_link_doc.insert() link_doc = DocumentWithListLink(link=[back_link_doc]) await link_doc.insert() return link_doc, back_link_doc class TestFindBackLinks: async def test_prefetch_direct(self, link_and_backlink_doc_pair): link_doc, back_link_doc = link_and_backlink_doc_pair back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) assert back_link_doc.back_link.id == link_doc.id assert back_link_doc.back_link.link.id == back_link_doc.id async def test_prefetch_list(self, list_link_and_list_backlink_doc_pair): link_doc, back_link_doc = list_link_and_list_backlink_doc_pair back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) assert back_link_doc.back_link[0].id == link_doc.id assert back_link_doc.back_link[0].link[0].id == back_link_doc.id async def test_nesting(self): back_link_doc = DocumentWithBackLinkForNesting(i=1) await back_link_doc.insert() link_doc = DocumentWithLinkForNesting(link=back_link_doc, s="TEST") await link_doc.insert() doc = await DocumentWithLinkForNesting.get( link_doc.id, fetch_links=True ) assert isinstance(doc.link, Link) doc.link = await doc.link.fetch() assert doc.link.i == 1 back_link_doc = await DocumentWithBackLinkForNesting.get( back_link_doc.id, fetch_links=True ) assert ( back_link_doc.back_link.link.back_link.link.back_link.id == link_doc.id ) assert isinstance( back_link_doc.back_link.link.back_link.link.back_link.link, Link ) class TestReplaceBackLinks: async def test_do_nothing(self, link_and_backlink_doc_pair): link_doc, back_link_doc = link_and_backlink_doc_pair back_link_doc.back_link.s = "new value" await back_link_doc.replace() new_back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) assert new_back_link_doc.back_link.s == "TEST" async def test_do_nothing_list(self, list_link_and_list_backlink_doc_pair): link_doc, back_link_doc = list_link_and_list_backlink_doc_pair back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) for lnk in back_link_doc.back_link: lnk.s = "new value" await back_link_doc.replace() new_back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) for lnk in new_back_link_doc.back_link: assert lnk.s == "TEST" async def test_write(self, link_and_backlink_doc_pair): link_doc, back_link_doc = link_and_backlink_doc_pair back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) back_link_doc.back_link.s = "new value" await back_link_doc.replace(link_rule=WriteRules.WRITE) new_back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) assert new_back_link_doc.back_link.s == "new value" async def test_do_nothing_write_list( self, list_link_and_list_backlink_doc_pair ): link_doc, back_link_doc = list_link_and_list_backlink_doc_pair back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) for lnk in back_link_doc.back_link: lnk.s = "new value" await back_link_doc.replace(link_rule=WriteRules.WRITE) new_back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) for lnk in new_back_link_doc.back_link: assert lnk.s == "new value" class TestSaveBackLinks: async def test_do_nothing(self, link_and_backlink_doc_pair): link_doc, back_link_doc = link_and_backlink_doc_pair back_link_doc.back_link.s = "new value" await back_link_doc.save() new_back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) assert new_back_link_doc.back_link.s == "TEST" async def test_do_nothing_list(self, list_link_and_list_backlink_doc_pair): link_doc, back_link_doc = list_link_and_list_backlink_doc_pair back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) for lnk in back_link_doc.back_link: lnk.s = "new value" await back_link_doc.save() new_back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) for lnk in new_back_link_doc.back_link: assert lnk.s == "TEST" async def test_write(self, link_and_backlink_doc_pair): link_doc, back_link_doc = link_and_backlink_doc_pair back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) back_link_doc.back_link.s = "new value" await back_link_doc.save(link_rule=WriteRules.WRITE) new_back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) assert new_back_link_doc.back_link.s == "new value" async def test_write_list(self, list_link_and_list_backlink_doc_pair): link_doc, back_link_doc = list_link_and_list_backlink_doc_pair back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) for lnk in back_link_doc.back_link: lnk.s = "new value" await back_link_doc.save(link_rule=WriteRules.WRITE) new_back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) for lnk in new_back_link_doc.back_link: assert lnk.s == "new value" class HouseForReversedOrderInit(Document): name: str door: Link["DoorForReversedOrderInit"] owners: List[Link["PersonForReversedOrderInit"]] class DoorForReversedOrderInit(Document): height: int = 2 width: int = 1 if IS_PYDANTIC_V2: house: BackLink[HouseForReversedOrderInit] = Field( json_schema_extra={"original_field": "door"} ) else: house: BackLink[HouseForReversedOrderInit] = Field( original_field="door" ) class PersonForReversedOrderInit(Document): name: str if IS_PYDANTIC_V2: house: List[BackLink[HouseForReversedOrderInit]] = Field( json_schema_extra={"original_field": "owners"} ) else: house: List[BackLink[HouseForReversedOrderInit]] = Field( original_field="owners" ) class TestDeleteBackLinks: async def test_do_nothing(self, link_and_backlink_doc_pair): link_doc, back_link_doc = link_and_backlink_doc_pair back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) await back_link_doc.delete() new_link_doc = await DocumentWithLink.get( link_doc.id, fetch_links=True ) assert new_link_doc is not None async def test_do_nothing_list(self, list_link_and_list_backlink_doc_pair): link_doc, back_link_doc = list_link_and_list_backlink_doc_pair back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) await back_link_doc.delete() new_link_doc = await DocumentWithListLink.get( link_doc.id, fetch_links=True ) assert new_link_doc is not None async def test_delete_links(self, link_and_backlink_doc_pair): link_doc, back_link_doc = link_and_backlink_doc_pair back_link_doc = await DocumentWithBackLink.get( back_link_doc.id, fetch_links=True ) await back_link_doc.delete(link_rule=DeleteRules.DELETE_LINKS) new_link_doc = await DocumentWithLink.get( link_doc.id, fetch_links=True ) assert new_link_doc is None async def test_delete_links_list( self, list_link_and_list_backlink_doc_pair ): link_doc, back_link_doc = list_link_and_list_backlink_doc_pair back_link_doc = await DocumentWithListBackLink.get( back_link_doc.id, fetch_links=True ) await back_link_doc.delete(link_rule=DeleteRules.DELETE_LINKS) new_link_doc = await DocumentWithListLink.get( link_doc.id, fetch_links=True ) assert new_link_doc is None async def test_init_reversed_order(self, db): await init_beanie( database=db, document_models=[ DoorForReversedOrderInit, HouseForReversedOrderInit, PersonForReversedOrderInit, ], ) class TestBuildAggregations: async def test_find_aggregate_without_fetch_links(self, houses): door = await Door.find_one() aggregation = House.find(House.door.id == door.id).aggregate( [ {"$group": {"_id": "$height", "count": {"$sum": 1}}}, ] ) assert aggregation.get_aggregation_pipeline() == [ {"$match": {"door.$id": door.id}}, {"$group": {"_id": "$height", "count": {"$sum": 1}}}, ] result = await aggregation.to_list() assert result == [{"_id": 0, "count": 1}] async def test_find_aggregate_with_fetch_links(self, houses): door = await Door.find_one() aggregation = House.find( House.door.id == door.id, fetch_links=True ).aggregate( [ {"$group": {"_id": "$height", "count": {"$sum": 1}}}, ] ) assert len(aggregation.get_aggregation_pipeline()) == 12 assert aggregation.get_aggregation_pipeline()[10:] == [ {"$match": {"door._id": door.id}}, {"$group": {"_id": "$height", "count": {"$sum": 1}}}, ] result = await aggregation.to_list() assert result == [{"_id": 0, "count": 1}] python-beanie-1.29.0/tests/odm/test_root_models.py000066400000000000000000000012031473701376500222360ustar00rootroot00000000000000from beanie.odm.utils.pydantic import IS_PYDANTIC_V2 from tests.odm.models import DocumentWithRootModelAsAField if IS_PYDANTIC_V2: class TestRootModels: async def test_insert(self): doc = DocumentWithRootModelAsAField(pets=["dog", "cat", "fish"]) await doc.insert() new_doc = await DocumentWithRootModelAsAField.get(doc.id) assert new_doc.pets.root == ["dog", "cat", "fish"] collection = DocumentWithRootModelAsAField.get_motor_collection() raw_doc = await collection.find_one({"_id": doc.id}) assert raw_doc["pets"] == ["dog", "cat", "fish"] python-beanie-1.29.0/tests/odm/test_state_management.py000066400000000000000000000356071473701376500232430ustar00rootroot00000000000000import pytest from bson import ObjectId from beanie import PydanticObjectId, WriteRules from beanie.exceptions import StateManagementIsTurnedOff, StateNotSaved from beanie.odm.utils.parsing import parse_obj from beanie.odm.utils.pydantic import IS_PYDANTIC_V2, parse_model from tests.odm.models import ( DocumentWithTurnedOffStateManagement, DocumentWithTurnedOnReplaceObjects, DocumentWithTurnedOnSavePrevious, DocumentWithTurnedOnStateManagement, DocumentWithTurnedOnStateManagementWithCustomId, HouseWithRevision, InternalDoc, LockWithRevision, StateAndDecimalFieldModel, WindowWithRevision, ) @pytest.fixture def state(): if IS_PYDANTIC_V2: internal = InternalDoc().model_dump() else: internal = InternalDoc().dict() return { "num_1": 1, "num_2": 2, "_id": ObjectId(), "internal": internal, } @pytest.fixture def state_without_id(): if IS_PYDANTIC_V2: internal = InternalDoc().model_dump() else: internal = InternalDoc().dict() return { "num_1": 1, "num_2": 2, "internal": internal, } @pytest.fixture def doc_default(state): return parse_obj(DocumentWithTurnedOnStateManagement, state) @pytest.fixture def doc_replace(state): return parse_obj(DocumentWithTurnedOnReplaceObjects, state) @pytest.fixture def doc_previous(state): return parse_obj(DocumentWithTurnedOnSavePrevious, state) @pytest.fixture async def saved_doc_default(doc_default): await doc_default.insert() return doc_default @pytest.fixture async def saved_doc_previous(doc_previous): await doc_previous.insert() return doc_previous @pytest.fixture def windows_not_inserted(): return [ WindowWithRevision(x=10, y=10, lock=LockWithRevision(k=10)), WindowWithRevision(x=11, y=11, lock=LockWithRevision(k=11)), ] @pytest.fixture def house_not_inserted(windows_not_inserted): return HouseWithRevision(windows=windows_not_inserted) @pytest.fixture async def house(house_not_inserted): return await house_not_inserted.insert(link_rule=WriteRules.WRITE) class TestStateManagement: async def test_use_state_management_property(self): assert ( DocumentWithTurnedOnStateManagement.use_state_management() is True ) assert ( DocumentWithTurnedOffStateManagement.use_state_management() is False ) async def test_state_with_decimal_field( self, ): await StateAndDecimalFieldModel(amt=10.01).insert() await StateAndDecimalFieldModel.all().to_list() async def test_parse_object_with_saving_state(self): if IS_PYDANTIC_V2: internal = InternalDoc().model_dump() else: internal = InternalDoc().dict() obj = { "num_1": 1, "num_2": 2, "_id": ObjectId(), "internal": internal, } doc = parse_obj(DocumentWithTurnedOnStateManagement, obj) assert doc.get_saved_state() == obj assert doc.get_previous_saved_state() is None class TestSaveState: async def test_save_state(self): doc = DocumentWithTurnedOnStateManagement( num_1=1, num_2=2, internal=InternalDoc(num=1, string="s") ) assert doc.get_saved_state() is None assert doc.get_previous_saved_state() is None doc.id = PydanticObjectId() doc._save_state() assert doc.get_saved_state() == { "num_1": 1, "num_2": 2, "internal": {"num": 1, "string": "s", "lst": [1, 2, 3, 4, 5]}, "_id": doc.id, } assert doc.get_previous_saved_state() is None doc.num_1 = 2 doc.num_2 = 3 doc._save_state() assert doc.get_saved_state() == { "num_1": 2, "num_2": 3, "internal": {"num": 1, "string": "s", "lst": [1, 2, 3, 4, 5]}, "_id": doc.id, } assert doc.get_previous_saved_state() is None async def test_save_state_with_custom_id_type(self): doc = DocumentWithTurnedOnStateManagementWithCustomId( id=0, num_1=1, num_2=2, ) with pytest.raises(StateNotSaved): await doc.save_changes() doc.num_1 = 2 with pytest.raises(StateNotSaved): await doc.save_changes() async def test_save_state_with_previous(self): doc = DocumentWithTurnedOnSavePrevious( num_1=1, num_2=2, internal=InternalDoc(num=1, string="s") ) assert doc.get_saved_state() is None assert doc.get_previous_saved_state() is None doc.id = PydanticObjectId() doc._save_state() assert doc.get_saved_state() == { "num_1": 1, "num_2": 2, "internal": {"num": 1, "string": "s", "lst": [1, 2, 3, 4, 5]}, "_id": doc.id, } assert doc.get_previous_saved_state() is None doc.num_1 = 2 doc.num_2 = 3 doc._save_state() assert doc.get_saved_state() == { "num_1": 2, "num_2": 3, "internal": {"num": 1, "string": "s", "lst": [1, 2, 3, 4, 5]}, "_id": doc.id, } assert doc.get_previous_saved_state() == { "num_1": 1, "num_2": 2, "internal": {"num": 1, "string": "s", "lst": [1, 2, 3, 4, 5]}, "_id": doc.id, } class TestIsChanged: async def test_state_management_off(self): doc = DocumentWithTurnedOffStateManagement(num_1=1, num_2=2) with pytest.raises(StateManagementIsTurnedOff): doc.is_changed async def test_state_management_on_not_changed(self): doc = DocumentWithTurnedOnStateManagement( num_1=1, num_2=2, internal=InternalDoc() ) with pytest.raises(StateNotSaved): doc.is_changed async def test_state_management_on_changed(self, doc_default): assert doc_default.is_changed is False doc_default.num_1 = 10 assert doc_default.is_changed is True class TestHasChanged: async def test_state_management_off(self): doc = DocumentWithTurnedOffStateManagement(num_1=1, num_2=2) with pytest.raises(StateManagementIsTurnedOff): doc.has_changed async def test_state_management_on_not_changed(self): doc = DocumentWithTurnedOnStateManagement( num_1=1, num_2=2, internal=InternalDoc() ) with pytest.raises(StateNotSaved): doc.has_changed async def test_save_previous_on_not_changed(self): doc = DocumentWithTurnedOnSavePrevious( num_1=1, num_2=2, internal=InternalDoc() ) with pytest.raises(StateNotSaved): doc.has_changed async def test_save_previous_on_changed(self, doc_previous): assert doc_previous.has_changed is False doc_previous.num_1 = 10 doc_previous._save_state() assert doc_previous.has_changed is True class TestGetChanges: async def test_valid(self, doc_default): doc_default.internal.num = 1000 doc_default.internal.string = "new_value" doc_default.internal.lst.append(100) assert doc_default.get_changes() == { "internal.num": 1000, "internal.string": "new_value", "internal.lst": [1, 2, 3, 4, 5, 100], } doc_default._save_state() assert doc_default.get_changes() == {} async def test_whole(self, doc_default): doc_default.internal = {"num": 1000, "string": "new_value"} assert doc_default.get_changes() == { "internal.num": 1000, "internal.string": "new_value", } async def test_replace(self, doc_replace): doc_replace.internal.num = 1000 doc_replace.internal.string = "new_value" assert doc_replace.get_changes() == { "internal": { "num": 1000, "string": "new_value", "lst": [1, 2, 3, 4, 5], } } async def test_replace_whole(self, doc_replace): doc_replace.internal = {"num": 1000, "string": "new_value"} assert doc_replace.get_changes() == { "internal": { "num": 1000, "string": "new_value", } } class TestGetPreviousChanges: async def test_get_previous_changes(self, doc_previous): doc_previous.internal.num = 1000 doc_previous.internal.string = "new_value" doc_previous.internal.lst.append(100) assert doc_previous.get_previous_changes() == {} doc_previous._save_state() assert doc_previous.get_previous_changes() == { "internal.num": 1000, "internal.string": "new_value", "internal.lst": [1, 2, 3, 4, 5, 100], } class TestRollback: async def test_rollback(self, doc_default, state): doc_default.num_1 = 100 doc_default.rollback() assert doc_default.num_1 == state["num_1"] class TestQueries: async def test_save_changes(self, saved_doc_default): assert saved_doc_default.get_saved_state()["num_1"] == 1 assert saved_doc_default.get_previous_saved_state() is None saved_doc_default.num_1 = 10000 saved_doc_default.internal.change_private() assert ( saved_doc_default.internal.get_private() == "PRIVATE_CHANGED" ) await saved_doc_default.save_changes() assert saved_doc_default.get_saved_state()["num_1"] == 10000 assert saved_doc_default.get_previous_saved_state() is None assert ( saved_doc_default.internal.get_private() == "PRIVATE_CHANGED" ) new_doc = await DocumentWithTurnedOnStateManagement.get( saved_doc_default.id ) assert new_doc.num_1 == 10000 async def test_save_changes_previous(self, saved_doc_previous): assert saved_doc_previous.get_saved_state()["num_1"] == 1 assert saved_doc_previous.get_previous_saved_state()["num_1"] == 1 saved_doc_previous.num_1 = 10000 saved_doc_previous.internal.change_private() assert ( saved_doc_previous.internal.get_private() == "PRIVATE_CHANGED" ) await saved_doc_previous.save_changes() assert saved_doc_previous.get_saved_state()["num_1"] == 10000 assert saved_doc_previous.get_previous_saved_state()["num_1"] == 1 assert ( saved_doc_previous.internal.get_private() == "PRIVATE_CHANGED" ) new_doc = await DocumentWithTurnedOnSavePrevious.get( saved_doc_previous.id ) assert new_doc.num_1 == 10000 async def test_fetch_save_changes(self, house): data = await HouseWithRevision.all(fetch_links=True).to_list() house = data[0] window_0 = house.windows[0] window_0.x = 10000 window_0.lock.k = 10000 await window_0.save_changes() async def test_find_one(self, saved_doc_default, state): new_doc = await DocumentWithTurnedOnStateManagement.get( saved_doc_default.id ) assert new_doc.get_saved_state() == state assert new_doc.get_previous_saved_state() is None new_doc = await DocumentWithTurnedOnStateManagement.find_one( DocumentWithTurnedOnStateManagement.id == saved_doc_default.id ) assert new_doc.get_saved_state() == state assert new_doc.get_previous_saved_state() is None async def test_find_many(self): docs = [] for i in range(10): docs.append( DocumentWithTurnedOnStateManagement( num_1=i, num_2=i + 1, internal=InternalDoc() ) ) await DocumentWithTurnedOnStateManagement.insert_many(docs) found_docs = await DocumentWithTurnedOnStateManagement.find( DocumentWithTurnedOnStateManagement.num_1 > 4 ).to_list() for doc in found_docs: assert doc.get_saved_state() is not None assert doc.get_previous_saved_state() is None async def test_insert(self, state_without_id): doc = parse_model( DocumentWithTurnedOnStateManagement, state_without_id ) assert doc.get_saved_state() is None await doc.insert() new_state = doc.get_saved_state() assert new_state["_id"] is not None del new_state["_id"] assert new_state == state_without_id async def test_replace(self, saved_doc_default): saved_doc_default.num_1 = 100 await saved_doc_default.replace() assert saved_doc_default.get_saved_state()["num_1"] == 100 assert saved_doc_default.get_previous_saved_state() is None async def test_replace_save_previous(self, saved_doc_previous): saved_doc_previous.num_1 = 100 await saved_doc_previous.replace() assert saved_doc_previous.get_saved_state()["num_1"] == 100 assert saved_doc_previous.get_previous_saved_state()["num_1"] == 1 async def test_exclude_revision_id(self, saved_doc_previous): saved_doc_previous.num_1 = 100 await saved_doc_previous.replace() assert saved_doc_previous.get_saved_state()["num_1"] == 100 assert saved_doc_previous.get_previous_saved_state()["num_1"] == 1 assert ( saved_doc_previous.get_saved_state().get("revision_id") is None ) assert ( saved_doc_previous.get_saved_state().get( "previous_revision_id" ) is None ) assert ( saved_doc_previous.get_previous_saved_state().get( "revision_id" ) is None ) assert ( saved_doc_previous.get_previous_saved_state().get( "previous_revision_id" ) is None ) python-beanie-1.29.0/tests/odm/test_timesries_collection.py000066400000000000000000000024021473701376500241310ustar00rootroot00000000000000import pytest from beanie import init_beanie from beanie.exceptions import MongoDBVersionError from tests.odm.models import DocumentWithTimeseries async def test_timeseries_collection(db): build_info = await db.command({"buildInfo": 1}) mongo_version = build_info["version"] major_version = int(mongo_version.split(".")[0]) if major_version < 5: with pytest.raises(MongoDBVersionError): await init_beanie( database=db, document_models=[DocumentWithTimeseries] ) if major_version >= 5: await init_beanie( database=db, document_models=[DocumentWithTimeseries] ) info = await db.command( { "listCollections": 1, "filter": {"name": "DocumentWithTimeseries"}, } ) assert info["cursor"]["firstBatch"][0] == { "name": "DocumentWithTimeseries", "type": "timeseries", "options": { "expireAfterSeconds": 2, "timeseries": { "timeField": "ts", "granularity": "seconds", "bucketMaxSpanSeconds": 3600, }, }, "info": {"readOnly": False}, } python-beanie-1.29.0/tests/odm/test_typing_utils.py000066400000000000000000000026111473701376500224460ustar00rootroot00000000000000from typing import Optional, Union import pytest from pydantic import BaseModel from typing_extensions import Annotated from beanie import Document, Link from beanie.odm.fields import Indexed from beanie.odm.utils.pydantic import get_model_fields from beanie.odm.utils.typing import extract_id_class, get_index_attributes class Lock(Document): k: int class TestTyping: def test_extract_id_class(self): # Union assert extract_id_class(Union[str, int]) is str assert extract_id_class(Union[str, None]) is str assert extract_id_class(Union[str, None, int]) is str # Optional assert extract_id_class(Optional[str]) is str # Link assert extract_id_class(Link[Lock]) == Lock @pytest.mark.parametrize( "type,result", ( (str, None), (Indexed(str), (1, {})), (Indexed(str, "text", unique=True), ("text", {"unique": True})), (Annotated[str, Indexed()], (1, {})), ( Annotated[str, "other metadata", Indexed(unique=True)], (1, {"unique": True}), ), (Annotated[str, "other metadata"], None), ), ) def test_get_index_attributes(self, type, result): class Foo(BaseModel): bar: type field = get_model_fields(Foo)["bar"] assert get_index_attributes(field) == result python-beanie-1.29.0/tests/odm/test_views.py000066400000000000000000000016461473701376500210600ustar00rootroot00000000000000from tests.odm.views import ViewForTest, ViewForTestWithLink class TestViews: async def test_simple(self, documents): await documents(number=15) results = await ViewForTest.all().to_list() assert len(results) == 6 async def test_aggregate(self, documents): await documents(number=15) results = await ViewForTest.aggregate( [ {"$set": {"test_field": 1}}, {"$match": {"$expr": {"$lt": ["$number", 12]}}}, ] ).to_list() assert len(results) == 3 assert results[0]["test_field"] == 1 async def test_link(self, documents_with_links): await documents_with_links() results = await ViewForTestWithLink.all().to_list() for document in results: await document.fetch_all_links() for i, document in enumerate(results): assert document.link.test_int == i python-beanie-1.29.0/tests/odm/views.py000066400000000000000000000012561473701376500200160ustar00rootroot00000000000000from beanie.odm.fields import Link from beanie.odm.views import View from tests.odm.models import DocumentTestModel, DocumentTestModelWithLink class ViewForTest(View): number: int string: str class Settings: view_name = "test_view" source = DocumentTestModel pipeline = [ {"$match": {"$expr": {"$gt": ["$test_int", 8]}}}, {"$project": {"number": "$test_int", "string": "$test_str"}}, ] class ViewForTestWithLink(View): link: Link[DocumentTestModel] class Settings: view_name = "test_view_with_link" source = DocumentTestModelWithLink pipeline = [{"$set": {"link": "$test_link"}}] python-beanie-1.29.0/tests/test_beanie.py000066400000000000000000000005301473701376500203560ustar00rootroot00000000000000from pathlib import Path import toml from beanie import __version__ def parse_version_from_pyproject(): pyproject = Path(__file__).parent.parent / "pyproject.toml" toml_data = toml.loads(pyproject.read_text()) return toml_data["project"]["version"] def test_version(): assert __version__ == parse_version_from_pyproject() python-beanie-1.29.0/tests/typing/000077500000000000000000000000001473701376500170365ustar00rootroot00000000000000python-beanie-1.29.0/tests/typing/__init__.py000066400000000000000000000000001473701376500211350ustar00rootroot00000000000000python-beanie-1.29.0/tests/typing/aggregation.py000066400000000000000000000011131473701376500216730ustar00rootroot00000000000000from typing import Any, Dict, List from tests.typing.models import ProjectionTest, Test async def aggregate() -> List[Dict[str, Any]]: result = await Test.aggregate([]).to_list() result_2 = await Test.find().aggregate([]).to_list() return result or result_2 async def aggregate_with_projection() -> List[ProjectionTest]: result = ( await Test.find() .aggregate([], projection_model=ProjectionTest) .to_list() ) result_2 = await Test.aggregate( [], projection_model=ProjectionTest ).to_list() return result or result_2 python-beanie-1.29.0/tests/typing/decorators.py000066400000000000000000000045061473701376500215620ustar00rootroot00000000000000from typing import Any, Callable, Coroutine from typing_extensions import Protocol, TypeAlias, assert_type from beanie import Document from beanie.odm.actions import EventTypes, wrap_with_actions from beanie.odm.utils.self_validation import validate_self_before from beanie.odm.utils.state import ( previous_saved_state_needed, save_state_after, saved_state_needed, ) def sync_func(doc_self: Document, arg1: str, arg2: int, /) -> Document: """ Models `Document` sync method that expects self """ raise NotImplementedError SyncFunc: TypeAlias = Callable[[Document, str, int], Document] async def async_func(doc_self: Document, arg1: str, arg2: int, /) -> Document: """ Models `Document` async method that expects self """ raise NotImplementedError AsyncFunc: TypeAlias = Callable[ [Document, str, int], Coroutine[Any, Any, Document] ] def test_wrap_with_actions_preserves_signature() -> None: assert_type(async_func, AsyncFunc) assert_type(wrap_with_actions(EventTypes.SAVE)(async_func), AsyncFunc) def test_save_state_after_preserves_signature() -> None: assert_type(async_func, AsyncFunc) assert_type(save_state_after(async_func), AsyncFunc) def test_validate_self_before_preserves_signature() -> None: assert_type(async_func, AsyncFunc) assert_type(validate_self_before(async_func), AsyncFunc) def test_saved_state_needed_preserves_signature() -> None: assert_type(async_func, AsyncFunc) assert_type(saved_state_needed(async_func), AsyncFunc) assert_type(sync_func, SyncFunc) assert_type(saved_state_needed(sync_func), SyncFunc) def test_previous_saved_state_needed_preserves_signature() -> None: assert_type(async_func, AsyncFunc) assert_type(previous_saved_state_needed(async_func), AsyncFunc) assert_type(sync_func, SyncFunc) assert_type(previous_saved_state_needed(sync_func), SyncFunc) class ExpectsDocumentSelf(Protocol): def __call__(self, doc_self: Document, /) -> Any: ... def test_document_insert_expects_self() -> None: test_insert: ExpectsDocumentSelf = Document.insert # noqa: F841 def test_document_save_expects_self() -> None: test_insert: ExpectsDocumentSelf = Document.save # noqa: F841 def test_document_replace_expects_self() -> None: test_insert: ExpectsDocumentSelf = Document.replace # noqa: F841 python-beanie-1.29.0/tests/typing/find.py000066400000000000000000000016111473701376500203270ustar00rootroot00000000000000from typing import List, Optional from tests.typing.models import ProjectionTest, Test async def find_many() -> List[Test]: return await Test.find().to_list() async def find_many_with_projection() -> List[ProjectionTest]: return await Test.find().project(projection_model=ProjectionTest).to_list() async def find_many_generator() -> List[Test]: docs: List[Test] = [] async for doc in Test.find(): docs.append(doc) return docs async def find_many_generator_with_projection() -> List[ProjectionTest]: docs: List[ProjectionTest] = [] async for doc in Test.find().project(projection_model=ProjectionTest): docs.append(doc) return docs async def find_one() -> Optional[Test]: return await Test.find_one() async def find_one_with_projection() -> Optional[ProjectionTest]: return await Test.find_one().project(projection_model=ProjectionTest) python-beanie-1.29.0/tests/typing/models.py000066400000000000000000000002701473701376500206720ustar00rootroot00000000000000from pydantic import BaseModel from beanie import Document class Test(Document): foo: str bar: str baz: str class ProjectionTest(BaseModel): foo: str bar: int