pax_global_header00006660000000000000000000000064147666371460014536gustar00rootroot0000000000000052 comment=a81b5e931fccfd68a297f09682647f96f5c49003 python-advanced-alchemy-1.0.1/000077500000000000000000000000001476663714600162415ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/.github/000077500000000000000000000000001476663714600176015ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/.github/CODEOWNERS000066400000000000000000000007651476663714600212040ustar00rootroot00000000000000# Code owner settings for `litestar-org` # @maintainers should be assigned to all reviews. # Most specific assignment takes precedence though, so if you add a more specific thing than the `*` glob, you must also add @maintainers # For more info about code owners see https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-file-example # Global Assignment * @litestar-org/maintainers @litestar-org/members python-advanced-alchemy-1.0.1/.github/dependabot.yaml000066400000000000000000000001651476663714600225740ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "daily" python-advanced-alchemy-1.0.1/.github/labeler.yml000066400000000000000000000074611476663714600217420ustar00rootroot00000000000000version: v1 labels: # -- types ------------------------------------------------------------------- - label: 'type/feat' sync: true matcher: title: '^feat(\([^)]+\))?!?:' - label: 'type/bug' sync: true matcher: title: '^fix(\([^)]+\))?!?:' - label: 'type/docs' sync: true matcher: title: '^docs(\([^)]+\))?:' - label: 'Breaking ๐Ÿ”จ' sync: true matcher: title: '^(feat|fix)(\([^)]+\))?!:' # -- distinct areas ---------------------------------------------------------- - label: 'area/docs' sync: true matcher: files: any: ['docs/*', 'docs/**/*', '**/*.rst', '**/*.md'] - label: 'area/unit-tests' sync: true matcher: files: any: ['test/unit/*', 'test/unit/**/*', 'tests/*.py', 'tests/fixtures/**/*'] - label: 'area/integration-tests' sync: true matcher: files: any: ['test/integration/*', 'test/integration/**/*'] - label: 'area/example-apps' sync: true matcher: files: any: ['examples/**/*', 'examples/**/*'] - label: 'area/docs' sync: true matcher: files: any: ['docs/*', 'docs/**/*', '**/*.rst', '**/*.md'] - label: 'area/ci' sync: true matcher: files: any: ['.github/**/*', 'codecov.yml', 'pre-commit-config.yaml', 'sonar-project.properties', '*.yaml', '*.yml'] - label: 'area/dependencies' sync: true matcher: files: any: ['pyproject.toml', '*.lock'] - label: 'area/repositories' sync: true matcher: files: ['advanced_alchemy/repository/**/*'] - label: 'area/services' sync: true matcher: files: ['advanced_alchemy/service/**/*'] - label: 'area/base' sync: true matcher: files: ['advanced_alchemy/base.py'] - label: 'area/exceptions' sync: true matcher: files: ['advanced_alchemy/exceptions.py'] - label: 'area/filters' sync: true matcher: files: ['advanced_alchemy/filters.py'] - label: 'area/operations' sync: true matcher: files: ['advanced_alchemy/operations.py'] - label: 'area/mixins' sync: true matcher: files: ['advanced_alchemy/mixins/**/*'] - label: 'area/config' sync: true matcher: files: ['advanced_alchemy/config/**/*'] - label: 'area/alembic' sync: true matcher: files: ['advanced_alchemy/alembic/**/*'] - label: 'area/flask' sync: true matcher: files: ['advanced_alchemy/extensions/flask/**/*','advanced_alchemy/extensions/flask.py'] - label: 'area/sanic' sync: true matcher: files: ['advanced_alchemy/extensions/sanic/**/*','advanced_alchemy/extensions/sanic.py'] - label: 'area/fastapi' sync: true matcher: files: ['advanced_alchemy/extensions/starlette/**/*','advanced_alchemy/extensions/starlette.py'] - label: 'area/litestar' sync: true matcher: files: ['advanced_alchemy/extensions/litestar/**/*'] - label: 'area/private-api' sync: true matcher: files: any: ['advanced_alchemy/_*.py', 'advanced_alchemy/*/_*.py', 'advanced_alchemy/_*/**/*.py'] - label: 'area/tools' sync: true matcher: files: ['tools/**/*'] # -- Size Based Labels ------------------------------------------------------- - label: 'size: small' sync: true matcher: files: count: gte: 1 lte: 10 - label: 'size: medium' sync: true matcher: files: count: gte: 10 lte: 25 - label: 'size: large' sync: true matcher: files: count: gte: 26 # -- Merge Checks -------------------------------------------------------------- checks: - context: 'No Merge check' description: "Disable merging when 'do not merge' label is set" labels: none: ['do not merge'] python-advanced-alchemy-1.0.1/.github/workflows/000077500000000000000000000000001476663714600216365ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/.github/workflows/cd.yml000066400000000000000000000010741476663714600227510ustar00rootroot00000000000000name: Continuous Deployment on: push: tags: - "v*.*.*" jobs: generate-changelog: name: Generate changelog runs-on: ubuntu-22.04 outputs: release_body: ${{ steps.git-cliff.outputs.content }} steps: - name: Checkout uses: actions/checkout@v4 with: fetch-depth: 0 - name: Generate a changelog uses: orhun/git-cliff-action@main id: git-cliff with: config: pyproject.toml args: -vv --latest --strip header env: OUTPUT: docs/CHANGELOG.rst python-advanced-alchemy-1.0.1/.github/workflows/ci.yml000066400000000000000000000122721476663714600227600ustar00rootroot00000000000000name: Tests And Linting on: pull_request: push: branches: - main concurrency: group: test-${{ github.head_ref }} cancel-in-progress: true jobs: validate: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install uv uses: astral-sh/setup-uv@v5 - name: Set up Python run: uv python install 3.12 - name: Create virtual environment run: uv sync --all-extras --dev - name: Install Pre-Commit hooks run: uv run pre-commit install - name: Load cached Pre-Commit Dependencies id: cached-pre-commit-dependencies uses: actions/cache@v4 with: path: ~/.cache/pre-commit/ key: pre-commit|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }} - name: Execute Pre-Commit run: uv run pre-commit run --show-diff-on-failure --color=always --all-files mypy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install uv uses: astral-sh/setup-uv@v5 - name: Set up Python run: uv python install 3.13 - name: Install dependencies run: uv sync --all-extras --dev - name: Run mypy run: uv run mypy pyright: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install uv uses: astral-sh/setup-uv@v5 - name: Set up Python run: uv python install 3.13 - name: Install dependencies run: uv sync --all-extras --dev - name: Run pyright run: uv run pyright # # TODO(cofin) # # AttributeError: 'SuiteRequirements' object has no attribute 'computed_reflects_normally' slotscheck: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install uv uses: astral-sh/setup-uv@v5 - name: Set up Python run: uv python install 3.13 - name: Install dependencies run: uv sync --all-extras --dev - name: Run slotscheck run: uv run slotscheck -m advanced_alchemy.config -m advanced_alchemy.repository -m advanced_alchemy.service -m advanced_alchemy.extensions -m advanced_alchemy.base -m advanced_alchemy.types -m advanced_alchemy.operations test: name: "test (${{ matrix.python-version }}" strategy: fail-fast: true matrix: python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] uses: ./.github/workflows/test.yml with: coverage: ${{ matrix.python-version == '3.13' }} python-version: ${{ matrix.python-version }} sonar: needs: - test - validate if: github.event.pull_request.head.repo.fork == false && github.repository_owner == 'litestar-org' runs-on: ubuntu-latest steps: - name: Check out repository uses: actions/checkout@v4 with: fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis - name: Download Artifacts uses: actions/download-artifact@v4 with: name: coverage-xml - name: SonarCloud Scan uses: SonarSource/sonarqube-scan-action@v5 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} codecov: needs: - test - validate runs-on: ubuntu-latest permissions: security-events: write steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: "3.13" - name: Download Artifacts uses: actions/download-artifact@v4 with: name: coverage-xml path: coverage.xml merge-multiple: true # - name: Combine coverage files # run: | # python -Im pip install coverage covdefaults # python -Im coverage combine # python -Im coverage xml -i # - name: Fix coverage file name # run: sed -i "s/home\/runner\/work\/advanced-alchemy\/advanced-alchemy/github\/workspace/g" coverage.xml - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v5 with: files: coverage.xml token: ${{ secrets.CODECOV_TOKEN }} slug: litestar-org/advanced-alchemy build-docs: needs: - validate if: github.event_name == 'pull_request' runs-on: ubuntu-latest steps: - name: Check out repository uses: actions/checkout@v4 - name: Install Microsoft ODBC run: sudo ACCEPT_EULA=Y apt-get install msodbcsql18 -y - name: Install uv uses: astral-sh/setup-uv@v5 - name: Set up Python run: uv python install 3.13 - name: Install dependencies run: uv sync --all-extras --dev - name: Build docs run: uv run make docs - name: Check docs links env: LITESTAR_DOCS_IGNORE_MISSING_EXAMPLE_OUTPUT: 1 run: uv run make docs-linkcheck - name: Save PR number run: | echo "${{ github.event.number }}" > .pr_number - name: Upload artifact uses: actions/upload-artifact@v4 with: name: docs-preview path: | docs/_build/html .pr_number include-hidden-files: true python-advanced-alchemy-1.0.1/.github/workflows/codeql.yml000066400000000000000000000103711476663714600236320ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ "main" ] pull_request: branches: [ "main" ] schedule: - cron: '43 0 * * 0' jobs: analyze: name: Analyze (${{ matrix.language }}) # Runner size impacts CodeQL analysis time. To learn more, please see: # - https://gh.io/recommended-hardware-resources-for-running-codeql # - https://gh.io/supported-runners-and-hardware-resources # - https://gh.io/using-larger-runners (GitHub.com only) # Consider using larger runners or machines with greater resources for possible analysis time improvements. runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} permissions: # required for all workflows security-events: write # required to fetch internal or private CodeQL packs packages: read # only required for workflows in private repositories actions: read contents: read strategy: fail-fast: false matrix: include: - language: python build-mode: none # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' # Use `c-cpp` to analyze code written in C, C++ or both # Use 'java-kotlin' to analyze code written in Java, Kotlin or both # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - name: Checkout repository uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} build-mode: ${{ matrix.build-mode }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality # If the analyze step fails for one of the languages you are analyzing with # "We were unable to automatically build your code", modify the matrix above # to set the build mode to "manual" for that language. Then modify this step # to build your code. # โ„น๏ธ Command-line programs to run using the OS shell. # ๐Ÿ“š See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - if: matrix.build-mode == 'manual' run: | echo 'If you are using a "manual" build mode for one or more of the' \ 'languages you are analyzing, replace this with the commands to build' \ 'your code, for example:' echo ' make bootstrap' echo ' make release' exit 1 - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 with: category: "/language:${{matrix.language}}" python-advanced-alchemy-1.0.1/.github/workflows/docs-preview.yml000066400000000000000000000044561476663714600250010ustar00rootroot00000000000000name: Deploy Documentation Preview on: workflow_run: workflows: [Tests And Linting] types: [completed] jobs: deploy: if: ${{ github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request' }} runs-on: ubuntu-latest permissions: issues: write pull-requests: write steps: - name: Check out repository uses: actions/checkout@v4 - name: Download artifact uses: dawidd6/action-download-artifact@v9 with: workflow_conclusion: success run_id: ${{ github.event.workflow_run.id }} path: docs-preview name: docs-preview - name: Set PR number run: echo "PR_NUMBER=$(cat docs-preview/.pr_number)" >> $GITHUB_ENV - name: Deploy docs preview uses: JamesIves/github-pages-deploy-action@v4 with: folder: docs-preview/docs/_build/html token: ${{ secrets.DOCS_PREVIEW_DEPLOY_TOKEN }} repository-name: litestar-org/advanced-alchemy-docs-preview clean: false target-folder: ${{ env.PR_NUMBER }} branch: gh-pages - uses: actions/github-script@v7 env: PR_NUMBER: ${{ env.PR_NUMBER }} with: script: | const issue_number = process.env.PR_NUMBER const body = "Documentation preview will be available shortly at https://litestar-org.github.io/advanced-alchemy-docs-preview/" + issue_number const opts = github.rest.issues.listComments.endpoint.merge({ owner: context.repo.owner, repo: context.repo.repo, issue_number: issue_number, }); const comments = await github.paginate(opts) for (const comment of comments) { if (comment.user.id === 41898282 && comment.body === body) { await github.rest.issues.deleteComment({ owner: context.repo.owner, repo: context.repo.repo, comment_id: comment.id }) } } await github.rest.issues.createComment({ owner: context.repo.owner, repo: context.repo.repo, issue_number: issue_number, body: body, }) python-advanced-alchemy-1.0.1/.github/workflows/docs.yml000066400000000000000000000024641476663714600233170ustar00rootroot00000000000000name: Documentation Building on: release: types: [published] push: branches: - main # Allows you to run this workflow manually from the Actions tab workflow_dispatch: jobs: build: permissions: contents: write pages: write id-token: write runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install uv uses: astral-sh/setup-uv@v5 - name: Set up Python run: uv python install 3.13 - name: Install dependencies run: uv sync --all-extras --dev - name: Build Release Documentation run: uv run python tools/build_docs.py docs-build --version latest if: github.event_name == 'release' - name: Build Documentation run: uv run python tools/build_docs.py docs-build --version latest if: github.event_name != 'release' - name: Upload artifact uses: actions/upload-pages-artifact@v3 with: path: docs-build/ deploy: permissions: contents: write pages: write id-token: write environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} runs-on: ubuntu-latest needs: build steps: - name: Deploy to GitHub Pages id: deployment uses: actions/deploy-pages@v4 python-advanced-alchemy-1.0.1/.github/workflows/pr-labeler.yml000066400000000000000000000025531476663714600244130ustar00rootroot00000000000000name: "Pull Request Labeler" on: pull_request_target: jobs: apply-labels: permissions: contents: read pull-requests: write checks: write statuses: write runs-on: ubuntu-latest steps: - uses: fuxingloh/multi-labeler@v4 with: github-token: "${{ secrets.GITHUB_TOKEN }}" distinguish-pr-origin: needs: apply-labels if: ${{ always() }} permissions: pull-requests: write runs-on: ubuntu-latest steps: - uses: actions/github-script@v7 with: github-token: ${{secrets.GITHUB_TOKEN}} script: | const maintainers = [ 'JacobCoffee', 'provinzkraut', 'cofin','Alc-Alc', 'dependabot[bot]', 'all-contributors[bot]' ] if (maintainers.includes(context.payload.sender.login)) { github.rest.issues.addLabels({ issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, labels: ['pr/internal'] }) } else { github.rest.issues.addLabels({ issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, labels: ['pr/external', 'Triage Required :hospital:'] }) } python-advanced-alchemy-1.0.1/.github/workflows/pr-title.yml000066400000000000000000000005321476663714600241210ustar00rootroot00000000000000name: "Lint PR Title" on: pull_request_target: types: - opened - edited - synchronize permissions: pull-requests: read jobs: main: name: Validate PR title runs-on: ubuntu-latest steps: - uses: amannn/action-semantic-pull-request@v5 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} python-advanced-alchemy-1.0.1/.github/workflows/publish.yml000066400000000000000000000011701476663714600240260ustar00rootroot00000000000000name: Latest Release on: release: types: [published] workflow_dispatch: jobs: publish-release: runs-on: ubuntu-latest permissions: id-token: write environment: release steps: - name: Check out repository uses: actions/checkout@v4 - name: Install uv uses: astral-sh/setup-uv@v5 - name: Set up Python run: uv python install 3.13 - name: Install dependencies run: uv sync --all-extras - name: Build package run: uv build - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 python-advanced-alchemy-1.0.1/.github/workflows/test.yml000066400000000000000000000040101476663714600233330ustar00rootroot00000000000000name: Test on: workflow_call: inputs: python-version: required: true type: string coverage: required: false type: boolean default: false os: required: false type: string default: "ubuntu-latest" timeout: required: false type: number default: 60 jobs: test: runs-on: ${{ inputs.os }} timeout-minutes: ${{ inputs.timeout }} defaults: run: shell: bash steps: - name: Check out repository uses: actions/checkout@v4 - name: Install Microsoft ODBC Drivers run: sudo ACCEPT_EULA=Y apt-get install msodbcsql18 -y - name: Free additional space run: | sudo docker rmi $(docker image ls -aq) >/dev/null 2>&1 || true sudo rm -rf \ /usr/share/dotnet /usr/local/lib/android /opt/ghc \ /usr/local/share/powershell /usr/share/swift /usr/local/.ghcup \ /usr/lib/jvm || true sudo apt-get autoremove -y \ && sudo apt-get clean -y \ && sudo rm -rf /root/.cache \ && sudo rm -rf /var/apt/lists/* \ && sudo rm -rf /var/cache/apt/* \ && sudo apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false - name: Install uv uses: astral-sh/setup-uv@v5 - name: Set up Python run: uv python install ${{ inputs.python-version }} - name: Install dependencies run: uv sync --all-extras --dev - name: Set PYTHONPATH run: echo "PYTHONPATH=$PWD" >> $GITHUB_ENV - name: Test if: ${{ !inputs.coverage }} run: uv run pytest --dist "loadgroup" -m "" tests -n 2 - name: Test with coverage if: ${{ inputs.coverage }} run: uv run pytest tests --dist "loadgroup" -m "" --cov=advanced_alchemy --cov-report=xml -n 2 - uses: actions/upload-artifact@v4 if: ${{ inputs.coverage }} with: name: coverage-xml path: coverage.xml python-advanced-alchemy-1.0.1/.gitignore000066400000000000000000000062551476663714600202410ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python .python-version build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST tmp/ # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ cover/ test.sqlite # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ docs-build/ # PyBuilder .pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv # For a library or package, you might want to ignore these files since the code is # intended to run in multiple environments; otherwise, check them in: # .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # poetry # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. # This is especially recommended for binary packages to ensure reproducibility, and is more # commonly ignored for libraries. # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control #poetry.lock # pdm # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. #pdm.lock # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it # in version control. # https://pdm.fming.dev/#use-with-ide .pdm.toml .pdm-python .pdm-build/ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ # pytype static type analyzer .pytype/ # Cython debug symbols cython_debug/ # PyCharm # JetBrains specific template is maintained in a separate JetBrains.gitignore that can # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ /.idea # vscode .vscode/ # generated changelog /docs/changelog.md .cursorrules .cursorignore .zed python-advanced-alchemy-1.0.1/.pre-commit-config.yaml000066400000000000000000000025471476663714600225320ustar00rootroot00000000000000default_language_version: python: "3" repos: - repo: https://github.com/compilerla/conventional-pre-commit rev: v4.0.0 hooks: - id: conventional-pre-commit stages: [commit-msg] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: - id: check-ast - id: check-case-conflict - id: check-toml - id: debug-statements - id: end-of-file-fixer - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/provinzkraut/unasyncd rev: "v0.8.1" hooks: - id: unasyncd additional_dependencies: ["ruff"] - repo: https://github.com/charliermarsh/ruff-pre-commit rev: "v0.11.0" hooks: # Run the linter. - id: ruff types_or: [ python, pyi ] args: [ --fix ] # Run the formatter. - id: ruff-format types_or: [ python, pyi ] - repo: https://github.com/codespell-project/codespell rev: v2.4.1 hooks: - id: codespell exclude: "uv.lock|examples/us_state_lookup.json" additional_dependencies: - tomli - repo: https://github.com/sphinx-contrib/sphinx-lint rev: "v1.0.0" hooks: - id: sphinx-lint - repo: local hooks: - id: pypi-readme name: pypi-readme language: python entry: python tools/pypi_readme.py types: [markdown] python-advanced-alchemy-1.0.1/.sourcery.yaml000066400000000000000000000011561476663714600210610ustar00rootroot00000000000000ignore: - .tox/ - .venv/ - dist/ - docs/_build/ - docs/_static/ - node_modules/ - vendor/ - venv/ rule_settings: enable: [default] disable: [dont-import-test-modules] rule_types: - refactoring - suggestion - comment python_version: "3.8" rules: [] metrics: quality_threshold: 25.0 github: ignore_labels: - sourcery-ignore - docs labels: - build-ignore request_review: origin: owner forked: author sourcery_branch: sourcery/{base_branch} clone_detection: min_lines: 3 min_duplicates: 2 identical_clones_only: false proxy: no_ssl_verify: false python-advanced-alchemy-1.0.1/CONTRIBUTING.rst000066400000000000000000000074351476663714600207130ustar00rootroot00000000000000Contribution guide ================== Setting up the environment -------------------------- 1. Run ``make install-uv`` to install `uv `_ if not already installed 1. Run ``make install`` to install all dependencies and pre-commit hooks Code contributions ------------------ Workflow ++++++++ 1. `Fork `_ the `Advanced Alchemy repository `_ 2. Clone your fork locally with git 3. `Set up the environment <#setting-up-the-environment>`_ 4. Make your changes 5. Run ``make lint`` to run linters and formatters. This step is optional and will be executed automatically by git before you make a commit, but you may want to run it manually in order to apply fixes automatically by git before you make a commit, but you may want to run it manually in order to apply fixes 6. Commit your changes to git 7. Push the changes to your fork 8. Open a `pull request `_. Give the pull request a descriptive title indicating what it changes. If it has a corresponding open issue, the issue number should be included in the title as well. For example a pull request that fixes issue ``bug: Increased stack size making it impossible to find needle #100`` could be titled ``fix(#100): Make needles easier to find by applying fire to haystack`` .. tip:: Pull requests and commits all need to follow the `Conventional Commit format `_ .. note:: To run the integration tests locally, you will need the `ODBC Driver for SQL Server `_, one option is using `unixODBC `_. Guidelines for writing code ---------------------------- - All code should be fully `typed `_. This is enforced via `mypy `_. - All code should be tested. This is enforced via `pytest `_. - All code should be properly formatted. This is enforced via `Ruff `_. Writing and running tests +++++++++++++++++++++++++ .. todo:: Write this section Project documentation --------------------- The documentation is located in the ``/docs`` directory and is `ReST `_ and `Sphinx `_. If you're unfamiliar with any of those, `ReStructuredText primer `_ and `Sphinx quickstart `_ are recommended reads. Running the docs locally ++++++++++++++++++++++++ To run or build the docs locally, you need to first install the required dependencies: ``make install`` Then you can serve the documentation with ``make docs-serve``, or build them with ``make docs``. Creating a new release ---------------------- 1. Increment the version in `pyproject.toml `_. .. note:: The version should follow `semantic versioning `_ and `PEP 440 `_. 2. `Draft a new release `_ on GitHub * Use ``vMAJOR.MINOR.PATCH`` (e.g. ``v1.2.3``) as both the tag and release title * Fill in the release description. You can use the "Generate release notes" function to get a draft for this 3. Commit your changes and push to ``main`` 4. Publish the release 5. Go to `Actions `_ and approve the release workflow 6. Check that the workflow runs successfully python-advanced-alchemy-1.0.1/LICENSE000066400000000000000000000020661476663714600172520ustar00rootroot00000000000000MIT License Copyright (c) 2024 Litestar Organization Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-advanced-alchemy-1.0.1/Makefile000066400000000000000000000215411476663714600177040ustar00rootroot00000000000000SHELL := /bin/bash # ============================================================================= # Configuration and Environment Variables # ============================================================================= .DEFAULT_GOAL:=help .ONESHELL: .EXPORT_ALL_VARIABLES: MAKEFLAGS += --no-print-directory # ----------------------------------------------------------------------------- # Display Formatting and Colors # ----------------------------------------------------------------------------- BLUE := $(shell printf "\033[1;34m") GREEN := $(shell printf "\033[1;32m") RED := $(shell printf "\033[1;31m") YELLOW := $(shell printf "\033[1;33m") NC := $(shell printf "\033[0m") INFO := $(shell printf "$(BLUE)โ„น$(NC)") OK := $(shell printf "$(GREEN)โœ“$(NC)") WARN := $(shell printf "$(YELLOW)โš $(NC)") ERROR := $(shell printf "$(RED)โœ–$(NC)") # ============================================================================= # Help and Documentation # ============================================================================= .PHONY: help help: ## Display this help text for Makefile @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z0-9_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) # ============================================================================= # Installation and Environment Setup # ============================================================================= .PHONY: install-uv install-uv: ## Install latest version of uv @echo "${INFO} Installing uv..." @curl -LsSf https://astral.sh/uv/install.sh | sh >/dev/null 2>&1 @uv tool install nodeenv >/dev/null 2>&1 @echo "${OK} UV installed successfully" .PHONY: install install: destroy clean ## Install the project, dependencies, and pre-commit @echo "${INFO} Starting fresh installation..." @uv python pin 3.9 >/dev/null 2>&1 @uv venv >/dev/null 2>&1 @uv sync --all-extras --dev @echo "${OK} Installation complete! ๐ŸŽ‰" .PHONY: destroy destroy: ## Destroy the virtual environment @echo "${INFO} Destroying virtual environment... ๐Ÿ—‘๏ธ" @rm -rf .venv @echo "${OK} Virtual environment destroyed ๐Ÿ—‘๏ธ" # ============================================================================= # Dependency Management # ============================================================================= .PHONY: upgrade upgrade: ## Upgrade all dependencies to latest stable versions @echo "${INFO} Updating all dependencies... ๐Ÿ”„" @uv lock --upgrade @echo "${OK} Dependencies updated ๐Ÿ”„" @uv run pre-commit autoupdate @echo "${OK} Updated Pre-commit hooks ๐Ÿ”„" .PHONY: lock lock: ## Rebuild lockfiles from scratch @echo "${INFO} Rebuilding lockfiles... ๐Ÿ”„" @uv lock --upgrade >/dev/null 2>&1 @echo "${OK} Lockfiles updated" # ============================================================================= # Build and Release # ============================================================================= .PHONY: build build: ## Build the package @echo "${INFO} Building package... ๐Ÿ“ฆ" @uv build >/dev/null 2>&1 @echo "${OK} Package build complete" .PHONY: release release: ## Bump version and create release tag @echo "${INFO} Preparing for release... ๐Ÿ“ฆ" @make docs @make clean @make build @uv run bump-my-version bump $(bump) @uv lock --upgrade-package advanced-alchemy @echo "${OK} Release complete ๐ŸŽ‰" # ============================================================================= # Cleaning and Maintenance # ============================================================================= .PHONY: clean clean: ## Cleanup temporary build artifacts @echo "${INFO} Cleaning working directory... ๐Ÿงน" @rm -rf .pytest_cache .ruff_cache .hypothesis build/ dist/ .eggs/ .coverage coverage.xml coverage.json htmlcov/ .pytest_cache tests/.pytest_cache tests/**/.pytest_cache .mypy_cache .unasyncd_cache/ .auto_pytabs_cache node_modules >/dev/null 2>&1 @find . -name '*.egg-info' -exec rm -rf {} + >/dev/null 2>&1 @find . -type f -name '*.egg' -exec rm -f {} + >/dev/null 2>&1 @find . -name '*.pyc' -exec rm -f {} + >/dev/null 2>&1 @find . -name '*.pyo' -exec rm -f {} + >/dev/null 2>&1 @find . -name '*~' -exec rm -f {} + >/dev/null 2>&1 @find . -name '__pycache__' -exec rm -rf {} + >/dev/null 2>&1 @find . -name '.ipynb_checkpoints' -exec rm -rf {} + >/dev/null 2>&1 @echo "${OK} Working directory cleaned" $(MAKE) docs-clean # ============================================================================= # Testing and Quality Checks # ============================================================================= .PHONY: test test: ## Run the tests @echo "${INFO} Running test cases... ๐Ÿงช" @uv run pytest -n 2 --quiet @echo "${OK} Tests passed โœจ" .PHONY: coverage coverage: ## Run tests with coverage report @echo "${INFO} Running tests with coverage... ๐Ÿ“Š" @uv run pytest --cov -n auto --quiet @uv run coverage html >/dev/null 2>&1 @uv run coverage xml >/dev/null 2>&1 @echo "${OK} Coverage report generated โœจ" # ----------------------------------------------------------------------------- # Type Checking # ----------------------------------------------------------------------------- .PHONY: mypy mypy: ## Run mypy @echo "${INFO} Running mypy... ๐Ÿ”" @uv run dmypy run @echo "${OK} Mypy checks passed โœจ" .PHONY: mypy-nocache mypy-nocache: ## Run Mypy without cache @echo "${INFO} Running mypy without cache... ๐Ÿ”" @uv run mypy @echo "${OK} Mypy checks passed โœจ" .PHONY: pyright pyright: ## Run pyright @echo "${INFO} Running pyright... ๐Ÿ”" @uv run pyright @echo "${OK} Pyright checks passed โœจ" .PHONY: type-check type-check: mypy pyright ## Run all type checking # ----------------------------------------------------------------------------- # Linting and Formatting # ----------------------------------------------------------------------------- .PHONY: pre-commit pre-commit: ## Run pre-commit hooks @echo "${INFO} Running pre-commit checks... ๐Ÿ”Ž" @NODE_OPTIONS="--no-deprecation --disable-warning=ExperimentalWarning" uv run pre-commit run --color=always --all-files @echo "${OK} Pre-commit checks passed โœจ" .PHONY: slotscheck slotscheck: ## Run slotscheck @echo "${INFO} Running slots check... ๐Ÿ”" @uv run slotscheck @echo "${OK} Slots check passed โœจ" .PHONY: fix fix: ## Run code formatters @echo "${INFO} Running code formatters... ๐Ÿ”ง" @uv run ruff check --fix --unsafe-fixes @echo "${OK} Code formatting complete โœจ" .PHONY: lint lint: pre-commit type-check slotscheck ## Run all linting checks .PHONY: check-all check-all: lint test coverage ## Run all checks (lint, test, coverage) # ============================================================================= # Documentation # ============================================================================= .PHONY: docs-clean docs-clean: ## Clean documentation build @echo "${INFO} Cleaning documentation build assets... ๐Ÿงน" @rm -rf docs/_build >/dev/null 2>&1 @echo "${OK} Documentation assets cleaned" .PHONY: docs-serve docs-serve: ## Serve documentation locally @echo "${INFO} Starting documentation server... ๐Ÿ“š" @uv run sphinx-autobuild docs docs/_build/ -j auto --watch advanced_alchemy --watch docs --watch tests --watch CONTRIBUTING.rst --open-browser .PHONY: docs docs: docs-clean ## Build documentation @echo "${INFO} Building documentation... ๐Ÿ“" @uv run sphinx-build -M html docs docs/_build/ -E -a -j auto -W --keep-going @echo "${OK} Documentation built successfully" .PHONY: docs-linkcheck docs-linkcheck: ## Check documentation links @echo "${INFO} Checking documentation links... ๐Ÿ”—" @uv run sphinx-build -b linkcheck ./docs ./docs/_build -D linkcheck_ignore='http://.*','https://.*' @echo "${OK} Link check complete" .PHONY: docs-linkcheck-full docs-linkcheck-full: ## Run full documentation link check @echo "${INFO} Running full link check... ๐Ÿ”—" @uv run sphinx-build -b linkcheck ./docs ./docs/_build -D linkcheck_anchors=0 @echo "${OK} Full link check complete" python-advanced-alchemy-1.0.1/README.md000066400000000000000000000452401476663714600175250ustar00rootroot00000000000000

Litestar Logo - Light Litestar Logo - Dark

| Project | | Status | |-----------|:----|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | CI/CD | | [![Latest Release](https://github.com/litestar-org/advanced-alchemy/actions/workflows/publish.yml/badge.svg)](https://github.com/litestar-org/advanced-alchemy/actions/workflows/publish.yml) [![ci](https://github.com/litestar-org/advanced-alchemy/actions/workflows/ci.yml/badge.svg)](https://github.com/litestar-org/advanced-alchemy/actions/workflows/ci.yml) [![Documentation Building](https://github.com/litestar-org/advanced-alchemy/actions/workflows/docs.yml/badge.svg?branch=main)](https://github.com/litestar-org/advanced-alchemy/actions/workflows/docs.yml) | | Quality | | [![Coverage](https://codecov.io/github/litestar-org/advanced-alchemy/graph/badge.svg?token=vKez4Pycrc)](https://codecov.io/github/litestar-org/advanced-alchemy) [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=litestar-org_advanced-alchemy&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=litestar-org_advanced-alchemy) [![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=litestar-org_advanced-alchemy&metric=sqale_rating)](https://sonarcloud.io/summary/new_code?id=litestar-org_advanced-alchemy) [![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=litestar-org_advanced-alchemy&metric=reliability_rating)](https://sonarcloud.io/summary/new_code?id=litestar-org_advanced-alchemy) [![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=litestar-org_advanced-alchemy&metric=security_rating)](https://sonarcloud.io/summary/new_code?id=litestar-org_advanced-alchemy) | | Package | | [![PyPI - Version](https://img.shields.io/pypi/v/advanced-alchemy?labelColor=202235&color=edb641&logo=python&logoColor=edb641)](https://badge.fury.io/py/advanced-alchemy) ![PyPI - Support Python Versions](https://img.shields.io/pypi/pyversions/advanced-alchemy?labelColor=202235&color=edb641&logo=python&logoColor=edb641) ![Advanced Alchemy PyPI - Downloads](https://img.shields.io/pypi/dm/advanced-alchemy?logo=python&label=package%20downloads&labelColor=202235&color=edb641&logoColor=edb641) | | Community | | [![Discord](https://img.shields.io/discord/919193495116337154?labelColor=202235&color=edb641&label=chat%20on%20discord&logo=discord&logoColor=edb641)](https://discord.gg/litestar) [![Matrix](https://img.shields.io/badge/chat%20on%20Matrix-bridged-202235?labelColor=202235&color=edb641&logo=matrix&logoColor=edb641)](https://matrix.to/#/#litestar:matrix.org) | | Meta | | [![Litestar Project](https://img.shields.io/badge/Litestar%20Org-%E2%AD%90%20Advanced%20Alchemy-202235.svg?logo=python&labelColor=202235&color=edb641&logoColor=edb641)](https://github.com/litestar-org/advanced-alchemy) [![types - Mypy](https://img.shields.io/badge/types-Mypy-202235.svg?logo=python&labelColor=202235&color=edb641&logoColor=edb641)](https://github.com/python/mypy) [![License - MIT](https://img.shields.io/badge/license-MIT-202235.svg?logo=python&labelColor=202235&color=edb641&logoColor=edb641)](https://spdx.org/licenses/) [![linting - Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json&labelColor=202235)](https://github.com/astral-sh/ruff) |
# Advanced Alchemy Check out the [project documentation][project-docs] ๐Ÿ“š for more information. ## About A carefully crafted, thoroughly tested, optimized companion library for SQLAlchemy, offering: - Sync and async repositories, featuring common CRUD and highly optimized bulk operations - Integration with major web frameworks including Litestar, Starlette, FastAPI, Sanic - Custom-built alembic configuration and CLI with optional framework integration - Utility base classes with audit columns, primary keys and utility functions - Optimized JSON types including a custom JSON type for Oracle - Integrated support for UUID6 and UUID7 using [`uuid-utils`](https://github.com/aminalaee/uuid-utils) (install with the `uuid` extra) - Integrated support for Nano ID using [`fastnanoid`](https://github.com/oliverlambson/fastnanoid) (install with the `nanoid` extra) - Pre-configured base classes with audit columns UUID or Big Integer primary keys and a [sentinel column](https://docs.sqlalchemy.org/en/20/core/connections.html#configuring-sentinel-columns). - Synchronous and asynchronous repositories featuring: - Common CRUD operations for SQLAlchemy models - Bulk inserts, updates, upserts, and deletes with dialect-specific enhancements - Integrated counts, pagination, sorting, filtering with `LIKE`, `IN`, and dates before and/or after. - Tested support for multiple database backends including: - SQLite via [aiosqlite](https://aiosqlite.omnilib.dev/en/stable/) or [sqlite](https://docs.python.org/3/library/sqlite3.html) - Postgres via [asyncpg](https://magicstack.github.io/asyncpg/current/) or [psycopg3 (async or sync)](https://www.psycopg.org/psycopg3/) - MySQL via [asyncmy](https://github.com/long2ice/asyncmy) - Oracle via [oracledb (async or sync)](https://oracle.github.io/python-oracledb/) (tested on 18c and 23c) - Google Spanner via [spanner-sqlalchemy](https://github.com/googleapis/python-spanner-sqlalchemy/) - DuckDB via [duckdb_engine](https://github.com/Mause/duckdb_engine) - Microsoft SQL Server via [pyodbc](https://github.com/mkleehammer/pyodbc) or [aioodbc](https://github.com/aio-libs/aioodbc) - CockroachDB via [sqlalchemy-cockroachdb (async or sync)](https://github.com/cockroachdb/sqlalchemy-cockroachdb) - ...and much more ## Usage ### Installation ```shell pip install advanced-alchemy ``` > [!IMPORTANT]\ > Check out [the installation guide][install-guide] in our official documentation! ### Repositories Advanced Alchemy includes a set of asynchronous and synchronous repository classes for easy CRUD operations on your SQLAlchemy models.
Click to expand the example ```python from advanced_alchemy import base, repository, config from sqlalchemy import create_engine from sqlalchemy.orm import Mapped, sessionmaker class User(base.UUIDBase): # you can optionally override the generated table name by manually setting it. __tablename__ = "user_account" # type: ignore[assignment] email: Mapped[str] name: Mapped[str] class UserRepository(repository.SQLAlchemySyncRepository[User]): """User repository.""" model_type = User db = config.SQLAlchemySyncConfig(connection_string="duckdb:///:memory:", session_config=config.SyncSessionConfig(expire_on_commit=False)) # Initializes the database. with db.get_engine().begin() as conn: User.metadata.create_all(conn) with db.get_session() as db_session: repo = UserRepository(session=db_session) # 1) Create multiple users with `add_many` bulk_users = [ {"email": 'cody@litestar.dev', 'name': 'Cody'}, {"email": 'janek@litestar.dev', 'name': 'Janek'}, {"email": 'peter@litestar.dev', 'name': 'Peter'}, {"email": 'jacob@litestar.dev', 'name': 'Jacob'} ] objs = repo.add_many([User(**raw_user) for raw_user in bulk_users]) db_session.commit() print(f"Created {len(objs)} new objects.") # 2) Select paginated data and total row count. Pass additional filters as kwargs created_objs, total_objs = repo.list_and_count(LimitOffset(limit=10, offset=0), name="Cody") print(f"Selected {len(created_objs)} records out of a total of {total_objs}.") # 3) Let's remove the batch of records selected. deleted_objs = repo.delete_many([new_obj.id for new_obj in created_objs]) print(f"Removed {len(deleted_objs)} records out of a total of {total_objs}.") # 4) Let's count the remaining rows remaining_count = repo.count() print(f"Found {remaining_count} remaining records after delete.") ```
For a full standalone example, see the sample [here][standalone-example] ### Services Advanced Alchemy includes an additional service class to make working with a repository easier. This class is designed to accept data as a dictionary or SQLAlchemy model, and it will handle the type conversions for you.
Here's the same example from above but using a service to create the data: ```python from advanced_alchemy import base, repository, filters, service, config from sqlalchemy import create_engine from sqlalchemy.orm import Mapped, sessionmaker class User(base.UUIDBase): # you can optionally override the generated table name by manually setting it. __tablename__ = "user_account" # type: ignore[assignment] email: Mapped[str] name: Mapped[str] class UserService(service.SQLAlchemySyncRepositoryService[User]): """User repository.""" class Repo(repository.SQLAlchemySyncRepository[User]): """User repository.""" model_type = User repository_type = Repo db = config.SQLAlchemySyncConfig(connection_string="duckdb:///:memory:", session_config=config.SyncSessionConfig(expire_on_commit=False)) # Initializes the database. with db.get_engine().begin() as conn: User.metadata.create_all(conn) with db.get_session() as db_session: service = UserService(session=db_session) # 1) Create multiple users with `add_many` objs = service.create_many([ {"email": 'cody@litestar.dev', 'name': 'Cody'}, {"email": 'janek@litestar.dev', 'name': 'Janek'}, {"email": 'peter@litestar.dev', 'name': 'Peter'}, {"email": 'jacob@litestar.dev', 'name': 'Jacob'} ]) print(objs) print(f"Created {len(objs)} new objects.") # 2) Select paginated data and total row count. Pass additional filters as kwargs created_objs, total_objs = service.list_and_count(LimitOffset(limit=10, offset=0), name="Cody") print(f"Selected {len(created_objs)} records out of a total of {total_objs}.") # 3) Let's remove the batch of records selected. deleted_objs = service.delete_many([new_obj.id for new_obj in created_objs]) print(f"Removed {len(deleted_objs)} records out of a total of {total_objs}.") # 4) Let's count the remaining rows remaining_count = service.count() print(f"Found {remaining_count} remaining records after delete.") ```
### Web Frameworks Advanced Alchemy works with nearly all Python web frameworks. Several helpers for popular libraries are included, and additional PRs to support others are welcomed. #### Litestar Advanced Alchemy is the official SQLAlchemy integration for Litestar. In addition to installing with `pip install advanced-alchemy`, it can also be installed as a Litestar extra with `pip install litestar[sqlalchemy]`.
Litestar Example ```python from litestar import Litestar from litestar.plugins.sqlalchemy import SQLAlchemyPlugin, SQLAlchemyAsyncConfig # alternately... # from advanced_alchemy.extensions.litestar import SQLAlchemyAsyncConfig, SQLAlchemyPlugin alchemy = SQLAlchemyPlugin( config=SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///test.sqlite"), ) app = Litestar(plugins=[alchemy]) ```
For a full Litestar example, check [here][litestar-example] #### Flask
Flask Example ```python from flask import Flask from advanced_alchemy.extensions.flask import AdvancedAlchemy, SQLAlchemySyncConfig app = Flask(__name__) alchemy = AdvancedAlchemy( config=SQLAlchemySyncConfig(connection_string="duckdb:///:memory:"), app=app, ) ```
For a full Flask example, see [here][flask-example] #### FastAPI
FastAPI Example ```python from advanced_alchemy.extensions.fastapi import AdvancedAlchemy, SQLAlchemyAsyncConfig from fastapi import FastAPI app = FastAPI() alchemy = AdvancedAlchemy( config=SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///test.sqlite"), app=app, ) ```
For a full FastAPI example with optional CLI integration, see [here][fastapi-example] #### Starlette
Pre-built Example Apps ```python from advanced_alchemy.extensions.starlette import AdvancedAlchemy, SQLAlchemyAsyncConfig from starlette.applications import Starlette app = Starlette() alchemy = AdvancedAlchemy( config=SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///test.sqlite"), app=app, ) ```
#### Sanic
Pre-built Example Apps ```python from sanic import Sanic from sanic_ext import Extend from advanced_alchemy.extensions.sanic import AdvancedAlchemy, SQLAlchemyAsyncConfig app = Sanic("AlchemySanicApp") alchemy = AdvancedAlchemy( sqlalchemy_config=SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///test.sqlite"), ) Extend.register(alchemy) ```
## Contributing All [Litestar Organization][litestar-org] projects will always be a community-centered, available for contributions of any size. Before contributing, please review the [contribution guide][contributing]. If you have any questions, reach out to us on [Discord][discord], our org-wide [GitHub discussions][litestar-discussions] page, or the [project-specific GitHub discussions page][project-discussions].

Litestar Logo - Light
An official Litestar Organization Project

[litestar-org]: https://github.com/litestar-org [contributing]: https://docs.advanced-alchemy.litestar.dev/latest/contribution-guide.html [discord]: https://discord.gg/litestar [litestar-discussions]: https://github.com/orgs/litestar-org/discussions [project-discussions]: https://github.com/litestar-org/advanced-alchemy/discussions [project-docs]: https://docs.advanced-alchemy.litestar.dev [install-guide]: https://docs.advanced-alchemy.litestar.dev/latest/#installation [fastapi-example]: https://github.com/litestar-org/advanced-alchemy/blob/main/examples/fastapi_service.py [flask-example]: https://github.com/litestar-org/advanced-alchemy/blob/main/examples/flask/flask_services.py [litestar-example]: https://github.com/litestar-org/advanced-alchemy/blob/main/examples/litestar.py [standalone-example]: https://github.com/litestar-org/advanced-alchemy/blob/main/examples/standalone.py python-advanced-alchemy-1.0.1/advanced_alchemy/000077500000000000000000000000001476663714600215105ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/__init__.py000066400000000000000000000005701476663714600236230ustar00rootroot00000000000000from advanced_alchemy import ( alembic, base, cli, config, exceptions, extensions, filters, mixins, operations, service, types, utils, ) __all__ = ( "alembic", "base", "cli", "config", "exceptions", "extensions", "filters", "mixins", "operations", "service", "types", "utils", ) python-advanced-alchemy-1.0.1/advanced_alchemy/__main__.py000066400000000000000000000003661476663714600236070ustar00rootroot00000000000000from advanced_alchemy.cli import add_migration_commands as build_cli_interface def run_cli() -> None: # pragma: no cover """Advanced Alchemy CLI""" build_cli_interface()() if __name__ == "__main__": # pragma: no cover run_cli() python-advanced-alchemy-1.0.1/advanced_alchemy/__metadata__.py000066400000000000000000000010671476663714600244420ustar00rootroot00000000000000"""Metadata for the Project.""" from importlib.metadata import PackageNotFoundError, metadata, version # pragma: no cover __all__ = ("__project__", "__version__") # pragma: no cover try: # pragma: no cover __version__ = version("advanced_alchemy") """Version of the project.""" __project__ = metadata("advanced_alchemy")["Name"] """Name of the project.""" except PackageNotFoundError: # pragma: no cover __version__ = "0.0.1" __project__ = "Advanced Alchemy" finally: # pragma: no cover del version, PackageNotFoundError, metadata python-advanced-alchemy-1.0.1/advanced_alchemy/_listeners.py000066400000000000000000000013141476663714600242300ustar00rootroot00000000000000"""Application ORM configuration.""" import datetime from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from sqlalchemy.orm import Session def touch_updated_timestamp(session: "Session", *_: Any) -> None: """Set timestamp on update. Called from SQLAlchemy's :meth:`before_flush ` event to bump the ``updated`` timestamp on modified instances. Args: session: The sync :class:`Session ` instance that underlies the async session. """ for instance in session.dirty: if hasattr(instance, "updated_at"): instance.updated_at = datetime.datetime.now(datetime.timezone.utc) python-advanced-alchemy-1.0.1/advanced_alchemy/_serialization.py000066400000000000000000000050141476663714600250760ustar00rootroot00000000000000import datetime import enum from typing import Any from typing_extensions import runtime_checkable try: from pydantic import BaseModel # type: ignore # noqa: PGH003 PYDANTIC_INSTALLED = True except ImportError: from typing import ClassVar, Protocol @runtime_checkable class BaseModel(Protocol): # type: ignore[no-redef] """Placeholder Implementation""" model_fields: ClassVar[dict[str, Any]] def model_dump_json(self, *args: Any, **kwargs: Any) -> str: """Placeholder""" return "" PYDANTIC_INSTALLED = False # pyright: ignore[reportConstantRedefinition] def _type_to_string(value: Any) -> str: # pragma: no cover if isinstance(value, datetime.datetime): return convert_datetime_to_gmt_iso(value) if isinstance(value, datetime.date): return convert_date_to_iso(value) if isinstance(value, enum.Enum): return str(value.value) if PYDANTIC_INSTALLED and isinstance(value, BaseModel): return value.model_dump_json() try: val = str(value) except Exception as exc: raise TypeError from exc return val try: from msgspec.json import Decoder, Encoder encoder, decoder = Encoder(enc_hook=_type_to_string), Decoder() decode_json = decoder.decode def encode_json(data: Any) -> str: # pragma: no cover return encoder.encode(data).decode("utf-8") except ImportError: try: from orjson import OPT_NAIVE_UTC, OPT_SERIALIZE_NUMPY, OPT_SERIALIZE_UUID from orjson import dumps as _encode_json from orjson import loads as decode_json # type: ignore[no-redef,assignment] def encode_json(data: Any) -> str: # pragma: no cover return _encode_json( data, default=_type_to_string, option=OPT_SERIALIZE_NUMPY | OPT_NAIVE_UTC | OPT_SERIALIZE_UUID ).decode("utf-8") # type: ignore[no-any-return] except ImportError: from json import dumps as encode_json # type: ignore[assignment] # noqa: F401 from json import loads as decode_json # type: ignore[assignment] # noqa: F401 def convert_datetime_to_gmt_iso(dt: datetime.datetime) -> str: # pragma: no cover """Handle datetime serialization for nested timestamps.""" if not dt.tzinfo: dt = dt.replace(tzinfo=datetime.timezone.utc) return dt.isoformat().replace("+00:00", "Z") def convert_date_to_iso(dt: datetime.date) -> str: # pragma: no cover """Handle datetime serialization for nested timestamps.""" return dt.isoformat() python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/000077500000000000000000000000001476663714600231045ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/__init__.py000066400000000000000000000000001476663714600252030ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/commands.py000066400000000000000000000323231476663714600252620ustar00rootroot00000000000000import sys from typing import TYPE_CHECKING, Any, Optional, TextIO, Union from advanced_alchemy.config.asyncio import SQLAlchemyAsyncConfig from alembic import command as migration_command from alembic.config import Config as _AlembicCommandConfig from alembic.ddl.impl import DefaultImpl if TYPE_CHECKING: import os from argparse import Namespace from collections.abc import Mapping from pathlib import Path from sqlalchemy import Engine from sqlalchemy.ext.asyncio import AsyncEngine from advanced_alchemy.config.sync import SQLAlchemySyncConfig from alembic.runtime.environment import ProcessRevisionDirectiveFn from alembic.script.base import Script class AlembicSpannerImpl(DefaultImpl): """Alembic implementation for Spanner.""" __dialect__ = "spanner+spanner" class AlembicDuckDBImpl(DefaultImpl): """Alembic implementation for DuckDB.""" __dialect__ = "duckdb" class AlembicCommandConfig(_AlembicCommandConfig): def __init__( self, engine: "Union[Engine, AsyncEngine]", version_table_name: str, bind_key: "Optional[str]" = None, file_: "Union[str, os.PathLike[str], None]" = None, ini_section: str = "alembic", output_buffer: "Optional[TextIO]" = None, stdout: "TextIO" = sys.stdout, cmd_opts: "Optional[Namespace]" = None, config_args: "Optional[Mapping[str, Any]]" = None, attributes: "Optional[dict[str, Any]]" = None, template_directory: "Optional[Path]" = None, version_table_schema: "Optional[str]" = None, render_as_batch: bool = True, compare_type: bool = False, user_module_prefix: "Optional[str]" = "sa.", ) -> None: """Initialize the AlembicCommandConfig. Args: engine (sqlalchemy.engine.Engine | sqlalchemy.ext.asyncio.AsyncEngine): The SQLAlchemy engine instance. version_table_name (str): The name of the version table. bind_key (str | None): The bind key for the metadata. file_ (str | os.PathLike[str] | None): The file path for the alembic configuration. ini_section (str): The ini section name. output_buffer (typing.TextIO | None): The output buffer for alembic commands. stdout (typing.TextIO): The standard output stream. cmd_opts (argparse.Namespace | None): Command line options. config_args (typing.Mapping[str, typing.Any] | None): Additional configuration arguments. attributes (dict[str, typing.Any] | None): Additional attributes for the configuration. template_directory (pathlib.Path | None): The directory for alembic templates. version_table_schema (str | None): The schema for the version table. render_as_batch (bool): Whether to render migrations as batch. compare_type (bool): Whether to compare types during migrations. user_module_prefix (str | None): The prefix for user modules. """ self.template_directory = template_directory self.bind_key = bind_key self.version_table_name = version_table_name self.version_table_pk = engine.dialect.name != "spanner+spanner" self.version_table_schema = version_table_schema self.render_as_batch = render_as_batch self.user_module_prefix = user_module_prefix self.compare_type = compare_type self.engine = engine self.db_url = engine.url.render_as_string(hide_password=False) if config_args is None: config_args = {} super().__init__(file_, ini_section, output_buffer, stdout, cmd_opts, config_args, attributes) def get_template_directory(self) -> str: """Return the directory where Alembic setup templates are found. This method is used by the alembic ``init`` and ``list_templates`` commands. """ if self.template_directory is not None: return str(self.template_directory) return super().get_template_directory() class AlembicCommands: def __init__(self, sqlalchemy_config: "Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]") -> None: """Initialize the AlembicCommands. Args: sqlalchemy_config (SQLAlchemyAsyncConfig | SQLAlchemySyncConfig): The SQLAlchemy configuration. """ self.sqlalchemy_config = sqlalchemy_config self.config = self._get_alembic_command_config() def upgrade( self, revision: str = "head", sql: bool = False, tag: "Optional[str]" = None, ) -> None: """Upgrade the database to a specified revision. Args: revision (str): The target revision to upgrade to. sql (bool): If True, generate SQL script instead of applying changes. tag (str | None): An optional tag to apply to the migration. """ return migration_command.upgrade(config=self.config, revision=revision, tag=tag, sql=sql) def downgrade( self, revision: str = "head", sql: bool = False, tag: "Optional[str]" = None, ) -> None: """Downgrade the database to a specified revision. Args: revision (str): The target revision to downgrade to. sql (bool): If True, generate SQL script instead of applying changes. tag (str | None): An optional tag to apply to the migration. """ return migration_command.downgrade(config=self.config, revision=revision, tag=tag, sql=sql) def check(self) -> None: """Check for pending upgrade operations. This method checks if there are any pending upgrade operations that need to be applied to the database. """ return migration_command.check(config=self.config) def current(self, verbose: bool = False) -> None: """Display the current revision of the database. Args: verbose (bool): If True, display detailed information. """ return migration_command.current(self.config, verbose=verbose) def edit(self, revision: str) -> None: """Edit the revision script using the system editor. Args: revision (str): The revision identifier to edit. """ return migration_command.edit(config=self.config, rev=revision) def ensure_version(self, sql: bool = False) -> None: """Ensure the alembic version table exists. Args: sql (bool): If True, generate SQL script instead of applying changes. """ return migration_command.ensure_version(config=self.config, sql=sql) def heads(self, verbose: bool = False, resolve_dependencies: bool = False) -> None: """Show current available heads in the script directory. Args: verbose (bool): If True, display detailed information. resolve_dependencies (bool): If True, resolve dependencies between heads. """ return migration_command.heads(config=self.config, verbose=verbose, resolve_dependencies=resolve_dependencies) def history( self, rev_range: "Optional[str]" = None, verbose: bool = False, indicate_current: bool = False, ) -> None: """List changeset scripts in chronological order. Args: rev_range (str | None): The revision range to display. verbose (bool): If True, display detailed information. indicate_current (bool): If True, indicate the current revision. """ return migration_command.history( config=self.config, rev_range=rev_range, verbose=verbose, indicate_current=indicate_current, ) def merge( self, revisions: str, message: "Optional[str]" = None, branch_label: "Optional[str]" = None, rev_id: "Optional[str]" = None, ) -> "Union[Script, None]": """Merge two revisions together. Args: revisions (str): The revisions to merge. message (str | None): The commit message for the merge. branch_label (str | None): The branch label for the merge. rev_id (str | None): The revision ID for the merge. Returns: Script | None: The resulting script from the merge. """ return migration_command.merge( config=self.config, revisions=revisions, message=message, branch_label=branch_label, rev_id=rev_id, ) def revision( self, message: "Optional[str]" = None, autogenerate: bool = False, sql: bool = False, head: str = "head", splice: bool = False, branch_label: "Optional[str]" = None, version_path: "Optional[str]" = None, rev_id: "Optional[str]" = None, depends_on: "Optional[str]" = None, process_revision_directives: "Optional[ProcessRevisionDirectiveFn]" = None, ) -> "Union[Script, list[Optional[Script]], None]": """Create a new revision file. Args: message (str | None): The commit message for the revision. autogenerate (bool): If True, autogenerate the revision script. sql (bool): If True, generate SQL script instead of applying changes. head (str): The head revision to base the new revision on. splice (bool): If True, create a splice revision. branch_label (str | None): The branch label for the revision. version_path (str | None): The path for the version file. rev_id (str | None): The revision ID for the new revision. depends_on (str | None): The revisions this revision depends on. process_revision_directives (ProcessRevisionDirectiveFn | None): A function to process revision directives. Returns: Script | List[Script | None] | None: The resulting script(s) from the revision. """ return migration_command.revision( config=self.config, message=message, autogenerate=autogenerate, sql=sql, head=head, splice=splice, branch_label=branch_label, version_path=version_path, rev_id=rev_id, depends_on=depends_on, process_revision_directives=process_revision_directives, ) def show( self, rev: Any, ) -> None: """Show the revision(s) denoted by the given symbol. Args: rev (Any): The revision symbol to display. """ return migration_command.show(config=self.config, rev=rev) def init( self, directory: str, package: bool = False, multidb: bool = False, ) -> None: """Initialize a new scripts directory. Args: directory (str): The directory to initialize. package (bool): If True, create a package. multidb (bool): If True, initialize for multiple databases. """ template = "sync" if isinstance(self.sqlalchemy_config, SQLAlchemyAsyncConfig): template = "asyncio" if multidb: template = f"{template}-multidb" msg = "Multi database Alembic configurations are not currently supported." raise NotImplementedError(msg) return migration_command.init( config=self.config, directory=directory, template=template, package=package, ) def list_templates(self) -> None: """List available templates. This method lists all available templates for alembic initialization. """ return migration_command.list_templates(config=self.config) def stamp( self, revision: str, sql: bool = False, tag: "Optional[str]" = None, purge: bool = False, ) -> None: """Stamp the revision table with the given revision. Args: revision (str): The revision to stamp. sql (bool): If True, generate SQL script instead of applying changes. tag (str | None): An optional tag to apply to the migration. purge (bool): If True, purge the revision history. """ return migration_command.stamp(config=self.config, revision=revision, sql=sql, tag=tag, purge=purge) def _get_alembic_command_config(self) -> "AlembicCommandConfig": """Get the Alembic command configuration. Returns: AlembicCommandConfig: The configuration for Alembic commands. """ kwargs: dict[str, Any] = {} if self.sqlalchemy_config.alembic_config.script_config: kwargs["file_"] = self.sqlalchemy_config.alembic_config.script_config if self.sqlalchemy_config.alembic_config.template_path: kwargs["template_directory"] = self.sqlalchemy_config.alembic_config.template_path kwargs.update( { "engine": self.sqlalchemy_config.get_engine(), "version_table_name": self.sqlalchemy_config.alembic_config.version_table_name, }, ) self.config = AlembicCommandConfig(**kwargs) self.config.set_main_option("script_location", self.sqlalchemy_config.alembic_config.script_location) return self.config python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/000077500000000000000000000000001476663714600251025ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/__init__.py000066400000000000000000000000001476663714600272010ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/asyncio/000077500000000000000000000000001476663714600265475ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/asyncio/__init__.py000066400000000000000000000000001476663714600306460ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/asyncio/alembic.ini.mako000066400000000000000000000050011476663714600315660ustar00rootroot00000000000000# Advanced Alchemy Alembic Asyncio Config [alembic] prepend_sys_path = src:. # path to migration scripts script_location = migrations # template used to generate migration files file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(slug)s_%%(rev)s # This is not required to be set when running through `advanced_alchemy` # sqlalchemy.url = driver://user:pass@localhost/dbname # timezone to use when rendering the date # within the migration file as well as the filename. # string value is passed to dateutil.tz.gettz() # leave blank for localtime # timezone = UTC # max length of characters to apply to the # "slug" field truncate_slug_length = 40 # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate # revision_environment = false # set to 'true' to allow .pyc and .pyo files without # a source .py file to be detected as revisions in the # versions/ directory # sourceless = false # version location specification; this defaults # to alembic/versions. When using multiple version # directories, initial revisions must be specified with --version-path # version_locations = %(here)s/bar %(here)s/bat alembic/versions # version path separator; As mentioned above, this is the character used to split # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. # Valid values for version_path_separator are: # # version_path_separator = : # version_path_separator = ; # version_path_separator = space version_path_separator = os # Use os.pathsep. Default configuration used for new projects. # set to 'true' to search source files recursively # in each "version_locations" directory # new in Alembic version 1.10 # recursive_version_locations = false # the output encoding used when revision files # are written from script.py.mako output_encoding = utf-8 # [post_write_hooks] # This section defines scripts or Python functions that are run # on newly generated revision scripts. See the documentation for further # detail and examples # format using "black" - use the console_scripts runner, # against the "black" entrypoint # hooks = black # black.type = console_scripts # black.entrypoint = black # black.options = -l 79 REVISION_SCRIPT_FILENAME # lint with attempts to fix using "ruff" - use the exec runner, execute a binary # hooks = ruff # ruff.type = exec # ruff.executable = %(here)s/.venv/bin/ruff # ruff.options = --fix REVISION_SCRIPT_FILENAME python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/asyncio/env.py000066400000000000000000000107521476663714600277160ustar00rootroot00000000000000import asyncio from typing import TYPE_CHECKING, cast from sqlalchemy import Column, pool from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config from advanced_alchemy.base import metadata_registry from alembic import context from alembic.autogenerate import rewriter from alembic.operations import ops if TYPE_CHECKING: from sqlalchemy.engine import Connection from advanced_alchemy.alembic.commands import AlembicCommandConfig from alembic.runtime.environment import EnvironmentContext __all__ = ("do_run_migrations", "run_migrations_offline", "run_migrations_online") # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config: "AlembicCommandConfig" = context.config # type: ignore # noqa: PGH003 writer = rewriter.Rewriter() @writer.rewrites(ops.CreateTableOp) def order_columns( context: "EnvironmentContext", # noqa: ARG001 revision: tuple[str, ...], # noqa: ARG001 op: ops.CreateTableOp, ) -> ops.CreateTableOp: """Orders ID first and the audit columns at the end.""" special_names = {"id": -100, "sa_orm_sentinel": 3001, "created_at": 3002, "updated_at": 3003} cols_by_key = [ # pyright: ignore[reportUnknownVariableType] ( special_names.get(col.key, index) if isinstance(col, Column) else 2000, col.copy(), # type: ignore[attr-defined] ) for index, col in enumerate(op.columns) ] columns = [col for _, col in sorted(cols_by_key, key=lambda entry: entry[0])] # pyright: ignore[reportUnknownVariableType,reportUnknownArgumentType,reportUnknownLambdaType] return ops.CreateTableOp( op.table_name, columns, # pyright: ignore[reportUnknownArgumentType] schema=op.schema, # TODO: Remove when https://github.com/sqlalchemy/alembic/issues/1193 is fixed # noqa: FIX002 _namespace_metadata=op._namespace_metadata, # noqa: SLF001 # pyright: ignore[reportPrivateUsage] **op.kw, ) def run_migrations_offline() -> None: """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ context.configure( url=config.db_url, target_metadata=metadata_registry.get(config.bind_key), literal_binds=True, dialect_opts={"paramstyle": "named"}, compare_type=config.compare_type, version_table=config.version_table_name, version_table_pk=config.version_table_pk, user_module_prefix=config.user_module_prefix, render_as_batch=config.render_as_batch, process_revision_directives=writer, ) with context.begin_transaction(): context.run_migrations() def do_run_migrations(connection: "Connection") -> None: """Run migrations.""" context.configure( connection=connection, target_metadata=metadata_registry.get(config.bind_key), compare_type=config.compare_type, version_table=config.version_table_name, version_table_pk=config.version_table_pk, user_module_prefix=config.user_module_prefix, render_as_batch=config.render_as_batch, process_revision_directives=writer, ) with context.begin_transaction(): context.run_migrations() async def run_migrations_online() -> None: """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ configuration = config.get_section(config.config_ini_section) or {} configuration["sqlalchemy.url"] = config.db_url connectable = cast( "AsyncEngine", config.engine or async_engine_from_config( configuration, prefix="sqlalchemy.", poolclass=pool.NullPool, future=True, ), ) if connectable is None: # pyright: ignore[reportUnnecessaryComparison] msg = "Could not get engine from config. Please ensure your `alembic.ini` according to the official Alembic documentation." raise RuntimeError( msg, ) async with connectable.connect() as connection: await connection.run_sync(do_run_migrations) await connectable.dispose() if context.is_offline_mode(): run_migrations_offline() else: asyncio.run(run_migrations_online()) python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/asyncio/script.py.mako000066400000000000000000000033641476663714600313610ustar00rootroot00000000000000# type: ignore """${message} Revision ID: ${up_revision} Revises: ${down_revision | comma,n} Create Date: ${create_date} """ import warnings from typing import TYPE_CHECKING import sqlalchemy as sa from alembic import op from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC from sqlalchemy import Text # noqa: F401 ${imports if imports else ""} if TYPE_CHECKING: from collections.abc import Sequence __all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"] sa.GUID = GUID sa.DateTimeUTC = DateTimeUTC sa.ORA_JSONB = ORA_JSONB sa.EncryptedString = EncryptedString sa.EncryptedText = EncryptedText # revision identifiers, used by Alembic. revision = ${repr(up_revision)} down_revision = ${repr(down_revision)} branch_labels = ${repr(branch_labels)} depends_on = ${repr(depends_on)} def upgrade() -> None: with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning) with op.get_context().autocommit_block(): schema_upgrades() data_upgrades() def downgrade() -> None: with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning) with op.get_context().autocommit_block(): data_downgrades() schema_downgrades() def schema_upgrades() -> None: """schema upgrade migrations go here.""" ${upgrades if upgrades else "pass"} def schema_downgrades() -> None: """schema downgrade migrations go here.""" ${downgrades if downgrades else "pass"} def data_upgrades() -> None: """Add any optional data upgrade migrations here!""" def data_downgrades() -> None: """Add any optional data downgrade migrations here!""" python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/sync/000077500000000000000000000000001476663714600260565ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/sync/__init__.py000066400000000000000000000000001476663714600301550ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/sync/alembic.ini.mako000066400000000000000000000050021476663714600310760ustar00rootroot00000000000000# Advanced Alchemy Alembic Sync Config [alembic] prepend_sys_path = src:. # path to migration scripts script_location = migrations # template used to generate migration files file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(slug)s_%%(rev)s # This is not required to be set when running through the `advanced_alchemy` # sqlalchemy.url = driver://user:pass@localhost/dbname # timezone to use when rendering the date # within the migration file as well as the filename. # string value is passed to dateutil.tz.gettz() # leave blank for localtime # timezone = UTC # max length of characters to apply to the # "slug" field truncate_slug_length = 40 # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate # revision_environment = false # set to 'true' to allow .pyc and .pyo files without # a source .py file to be detected as revisions in the # versions/ directory # sourceless = false # version location specification; this defaults # to alembic/versions. When using multiple version # directories, initial revisions must be specified with --version-path # version_locations = %(here)s/bar %(here)s/bat alembic/versions # version path separator; As mentioned above, this is the character used to split # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. # Valid values for version_path_separator are: # # version_path_separator = : # version_path_separator = ; # version_path_separator = space version_path_separator = os # Use os.pathsep. Default configuration used for new projects. # set to 'true' to search source files recursively # in each "version_locations" directory # new in Alembic version 1.10 # recursive_version_locations = false # the output encoding used when revision files # are written from script.py.mako output_encoding = utf-8 # [post_write_hooks] # This section defines scripts or Python functions that are run # on newly generated revision scripts. See the documentation for further # detail and examples # format using "black" - use the console_scripts runner, # against the "black" entrypoint # hooks = black # black.type = console_scripts # black.entrypoint = black # black.options = -l 79 REVISION_SCRIPT_FILENAME # lint with attempts to fix using "ruff" - use the exec runner, execute a binary # hooks = ruff # ruff.type = exec # ruff.executable = %(here)s/.venv/bin/ruff # ruff.options = --fix REVISION_SCRIPT_FILENAME python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/sync/env.py000066400000000000000000000106151476663714600272230ustar00rootroot00000000000000from typing import TYPE_CHECKING, cast from sqlalchemy import Column, Engine, engine_from_config, pool from advanced_alchemy.base import metadata_registry from alembic import context from alembic.autogenerate import rewriter from alembic.operations import ops if TYPE_CHECKING: from sqlalchemy.engine import Connection from advanced_alchemy.alembic.commands import AlembicCommandConfig from alembic.runtime.environment import EnvironmentContext __all__ = ["do_run_migrations", "run_migrations_offline", "run_migrations_online"] # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config: "AlembicCommandConfig" = context.config # type: ignore # noqa: PGH003 writer = rewriter.Rewriter() @writer.rewrites(ops.CreateTableOp) def order_columns( context: "EnvironmentContext", # noqa: ARG001 revision: tuple[str, ...], # noqa: ARG001 op: ops.CreateTableOp, ) -> ops.CreateTableOp: """Orders ID first and the audit columns at the end.""" special_names = {"id": -100, "sa_orm_sentinel": 3001, "created_at": 3002, "updated_at": 3003} cols_by_key = [ # pyright: ignore[reportUnknownVariableType] ( special_names.get(col.key, index) if isinstance(col, Column) else 2000, col.copy(), # type: ignore[attr-defined] ) for index, col in enumerate(op.columns) ] columns = [col for _, col in sorted(cols_by_key, key=lambda entry: entry[0])] # pyright: ignore[reportUnknownVariableType,reportUnknownArgumentType,reportUnknownLambdaType] return ops.CreateTableOp( op.table_name, columns, # pyright: ignore[reportUnknownArgumentType] schema=op.schema, # TODO: Remove when https://github.com/sqlalchemy/alembic/issues/1193 is fixed # noqa: FIX002 _namespace_metadata=op._namespace_metadata, # noqa: SLF001 # noqa: SLF001 # pyright: ignore[reportPrivateUsage] **op.kw, ) def run_migrations_offline() -> None: """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ context.configure( url=config.db_url, target_metadata=metadata_registry.get(config.bind_key), literal_binds=True, dialect_opts={"paramstyle": "named"}, compare_type=config.compare_type, version_table=config.version_table_name, version_table_pk=config.version_table_pk, user_module_prefix=config.user_module_prefix, render_as_batch=config.render_as_batch, process_revision_directives=writer, ) with context.begin_transaction(): context.run_migrations() def do_run_migrations(connection: "Connection") -> None: """Run migrations.""" context.configure( connection=connection, target_metadata=metadata_registry.get(config.bind_key), compare_type=config.compare_type, version_table=config.version_table_name, version_table_pk=config.version_table_pk, user_module_prefix=config.user_module_prefix, render_as_batch=config.render_as_batch, process_revision_directives=writer, ) with context.begin_transaction(): context.run_migrations() def run_migrations_online() -> None: """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ configuration = config.get_section(config.config_ini_section) or {} configuration["sqlalchemy.url"] = config.db_url connectable = cast( "Engine", config.engine or engine_from_config( configuration, prefix="sqlalchemy.", poolclass=pool.NullPool, future=True, ), ) if connectable is None: # pyright: ignore[reportUnnecessaryComparison] msg = "Could not get engine from config. Please ensure your `alembic.ini` according to the official Alembic documentation." raise RuntimeError( msg, ) with connectable.connect() as connection: do_run_migrations(connection=connection) connectable.dispose() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online() python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/templates/sync/script.py.mako000066400000000000000000000034771476663714600306750ustar00rootroot00000000000000# type: ignore """${message} Revision ID: ${up_revision} Revises: ${down_revision | comma,n} Create Date: ${create_date} """ import warnings from typing import TYPE_CHECKING import sqlalchemy as sa from alembic import op from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC from sqlalchemy import Text # noqa: F401 ${imports if imports else ""} if TYPE_CHECKING: from collections.abc import Sequence __all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"] sa.GUID = GUID sa.DateTimeUTC = DateTimeUTC sa.ORA_JSONB = ORA_JSONB sa.EncryptedString = EncryptedString sa.EncryptedText = EncryptedText # revision identifiers, used by Alembic. revision: str = ${repr(up_revision)} down_revision: str | None = ${repr(down_revision)} branch_labels: str | Sequence[str] | None = ${repr(branch_labels)} depends_on: str | Sequence[str] | None = ${repr(depends_on)} def upgrade() -> None: with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning) with op.get_context().autocommit_block(): schema_upgrades() data_upgrades() def downgrade() -> None: with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning) with op.get_context().autocommit_block(): data_downgrades() schema_downgrades() def schema_upgrades() -> None: """schema upgrade migrations go here.""" ${upgrades if upgrades else "pass"} def schema_downgrades() -> None: """schema downgrade migrations go here.""" ${downgrades if downgrades else "pass"} def data_upgrades() -> None: """Add any optional data upgrade migrations here!""" def data_downgrades() -> None: """Add any optional data downgrade migrations here!""" python-advanced-alchemy-1.0.1/advanced_alchemy/alembic/utils.py000066400000000000000000000101741476663714600246210ustar00rootroot00000000000000from contextlib import AbstractAsyncContextManager, AbstractContextManager from pathlib import Path from typing import TYPE_CHECKING, Union from litestar.cli._utils import console from sqlalchemy import Engine, MetaData, Table from typing_extensions import TypeIs if TYPE_CHECKING: from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession from sqlalchemy.orm import DeclarativeBase, Session __all__ = ("drop_all", "dump_tables") async def drop_all(engine: "Union[AsyncEngine, Engine]", version_table_name: str, metadata: MetaData) -> None: def _is_sync(engine: "Union[Engine, AsyncEngine]") -> "TypeIs[Engine]": return isinstance(engine, Engine) def _drop_tables_sync(engine: Engine) -> None: console.rule("[bold red]Connecting to database backend.") with engine.begin() as db: console.rule("[bold red]Dropping the db", align="left") metadata.drop_all(db) console.rule("[bold red]Dropping the version table", align="left") Table(version_table_name, metadata).drop(db, checkfirst=True) console.rule("[bold yellow]Successfully dropped all objects", align="left") async def _drop_tables_async(engine: "AsyncEngine") -> None: console.rule("[bold red]Connecting to database backend.", align="left") async with engine.begin() as db: console.rule("[bold red]Dropping the db", align="left") await db.run_sync(metadata.drop_all) console.rule("[bold red]Dropping the version table", align="left") await db.run_sync(Table(version_table_name, metadata).drop, checkfirst=True) console.rule("[bold yellow]Successfully dropped all objects", align="left") if _is_sync(engine): return _drop_tables_sync(engine) return await _drop_tables_async(engine) async def dump_tables( dump_dir: Path, session: "Union[AbstractContextManager[Session], AbstractAsyncContextManager[AsyncSession]]", models: "list[type[DeclarativeBase]]", ) -> None: from types import new_class from advanced_alchemy._serialization import encode_json def _is_sync( session: "Union[AbstractAsyncContextManager[AsyncSession], AbstractContextManager[Session]]", ) -> "TypeIs[AbstractContextManager[Session]]": return isinstance(session, AbstractContextManager) def _dump_table_sync(session: "AbstractContextManager[Session]") -> None: from advanced_alchemy.repository import SQLAlchemySyncRepository with session as _session: for model in models: json_path = dump_dir / f"{model.__tablename__}.json" console.rule( f"[yellow bold]Dumping table '{json_path.stem}' to '{json_path}'", style="yellow", align="left", ) repo = new_class( "repo", (SQLAlchemySyncRepository,), exec_body=lambda ns, model=model: ns.setdefault("model_type", model), # type: ignore[misc] ) json_path.write_text(encode_json([row.to_dict() for row in repo(session=_session).list()])) async def _dump_table_async(session: "AbstractAsyncContextManager[AsyncSession]") -> None: from advanced_alchemy.repository import SQLAlchemyAsyncRepository async with session as _session: for model in models: json_path = dump_dir / f"{model.__tablename__}.json" console.rule( f"[yellow bold]Dumping table '{json_path.stem}' to '{json_path}'", style="yellow", align="left", ) repo = new_class( "repo", (SQLAlchemyAsyncRepository,), exec_body=lambda ns, model=model: ns.setdefault("model_type", model), # type: ignore[misc] ) json_path.write_text(encode_json([row.to_dict() for row in await repo(session=_session).list()])) dump_dir.mkdir(exist_ok=True) if _is_sync(session): return _dump_table_sync(session) return await _dump_table_async(session) python-advanced-alchemy-1.0.1/advanced_alchemy/base.py000066400000000000000000000362761476663714600230120ustar00rootroot00000000000000# ruff: noqa: TC004 """Common base classes for SQLAlchemy declarative models.""" import contextlib import datetime import re from collections.abc import Iterator from typing import TYPE_CHECKING, Any, Optional, Protocol, Union, cast, runtime_checkable from uuid import UUID from sqlalchemy import Date, MetaData, String from sqlalchemy.ext.asyncio import AsyncAttrs from sqlalchemy.orm import ( DeclarativeBase, Mapper, declared_attr, ) from sqlalchemy.orm import ( registry as SQLAlchemyRegistry, # noqa: N812 ) from sqlalchemy.orm.decl_base import _TableArgsType as TableArgsType # pyright: ignore[reportPrivateUsage] from sqlalchemy.types import TypeEngine from typing_extensions import Self, TypeVar from advanced_alchemy.mixins import ( AuditColumns, BigIntPrimaryKey, NanoIDPrimaryKey, UUIDPrimaryKey, UUIDv6PrimaryKey, UUIDv7PrimaryKey, ) from advanced_alchemy.types import GUID, DateTimeUTC, JsonB from advanced_alchemy.utils.dataclass import DataclassProtocol if TYPE_CHECKING: from sqlalchemy.sql import FromClause from sqlalchemy.sql.schema import ( _NamingSchemaParameter as NamingSchemaParameter, # pyright: ignore[reportPrivateUsage] ) __all__ = ( "AdvancedDeclarativeBase", "BasicAttributes", "BigIntAuditBase", "BigIntBase", "BigIntBaseT", "CommonTableAttributes", "ModelProtocol", "NanoIDAuditBase", "NanoIDBase", "NanoIDBaseT", "SQLQuery", "TableArgsType", "UUIDAuditBase", "UUIDBase", "UUIDBaseT", "UUIDv6AuditBase", "UUIDv6Base", "UUIDv6BaseT", "UUIDv7AuditBase", "UUIDv7Base", "UUIDv7BaseT", "convention", "create_registry", "merge_table_arguments", "metadata_registry", "orm_registry", "table_name_regexp", ) UUIDBaseT = TypeVar("UUIDBaseT", bound="UUIDBase") """Type variable for :class:`UUIDBase`.""" BigIntBaseT = TypeVar("BigIntBaseT", bound="BigIntBase") """Type variable for :class:`BigIntBase`.""" UUIDv6BaseT = TypeVar("UUIDv6BaseT", bound="UUIDv6Base") """Type variable for :class:`UUIDv6Base`.""" UUIDv7BaseT = TypeVar("UUIDv7BaseT", bound="UUIDv7Base") """Type variable for :class:`UUIDv7Base`.""" NanoIDBaseT = TypeVar("NanoIDBaseT", bound="NanoIDBase") """Type variable for :class:`NanoIDBase`.""" convention: "NamingSchemaParameter" = { "ix": "ix_%(column_0_label)s", "uq": "uq_%(table_name)s_%(column_0_name)s", "ck": "ck_%(table_name)s_%(constraint_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", "pk": "pk_%(table_name)s", } """Templates for automated constraint name generation.""" table_name_regexp = re.compile("((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))") """Regular expression for table name""" def merge_table_arguments(cls: type[DeclarativeBase], table_args: Optional[TableArgsType] = None) -> TableArgsType: """Merge Table Arguments. This function helps merge table arguments when using mixins that include their own table args, making it easier to append additional information such as comments or constraints to the model. Args: cls (type[:class:`sqlalchemy.orm.DeclarativeBase`]): The model that will get the table args. table_args (:class:`TableArgsType`, optional): Additional information to add to table_args. Returns: :class:`TableArgsType`: Merged table arguments. """ args: list[Any] = [] kwargs: dict[str, Any] = {} mixin_table_args = (getattr(super(base_cls, cls), "__table_args__", None) for base_cls in cls.__bases__) # pyright: ignore[reportUnknownParameter,reportUnknownArgumentType,reportArgumentType] for arg_to_merge in (*mixin_table_args, table_args): if arg_to_merge: if isinstance(arg_to_merge, tuple): last_positional_arg = arg_to_merge[-1] # pyright: ignore[reportUnknownVariableType] args.extend(arg_to_merge[:-1]) # pyright: ignore[reportUnknownArgumentType] if isinstance(last_positional_arg, dict): kwargs.update(last_positional_arg) # pyright: ignore[reportUnknownArgumentType] else: args.append(last_positional_arg) else: kwargs.update(arg_to_merge) if args: if kwargs: return (*args, kwargs) return tuple(args) return kwargs @runtime_checkable class ModelProtocol(Protocol): """The base SQLAlchemy model protocol. Attributes: __table__ (:class:`sqlalchemy.sql.FromClause`): The table associated with the model. __mapper__ (:class:`sqlalchemy.orm.Mapper`): The mapper for the model. __name__ (str): The name of the model. """ if TYPE_CHECKING: __table__: FromClause __mapper__: Mapper[Any] __name__: str def to_dict(self, exclude: Optional[set[str]] = None) -> dict[str, Any]: """Convert model to dictionary. Returns: Dict[str, Any]: A dict representation of the model """ ... class BasicAttributes: """Basic attributes for SQLAlchemy tables and queries. Provides a method to convert the model to a dictionary representation. Methods: to_dict: Converts the model to a dictionary, excluding specified fields. :no-index: """ if TYPE_CHECKING: __name__: str __table__: FromClause __mapper__: Mapper[Any] def to_dict(self, exclude: Optional[set[str]] = None) -> dict[str, Any]: """Convert model to dictionary. Returns: Dict[str, Any]: A dict representation of the model """ exclude = {"sa_orm_sentinel", "_sentinel"}.union(self._sa_instance_state.unloaded).union(exclude or []) # type: ignore[attr-defined] return { field: getattr(self, field) for field in self.__mapper__.columns.keys() # noqa: SIM118 if field not in exclude } class CommonTableAttributes(BasicAttributes): """Common attributes for SQLAlchemy tables. Inherits from :class:`BasicAttributes` and provides a mechanism to infer table names from class names. Attributes: __tablename__ (str): The inferred table name. """ if TYPE_CHECKING: __tablename__: str else: @declared_attr.directive def __tablename__(cls) -> str: """Infer table name from class name.""" return table_name_regexp.sub(r"_\1", cls.__name__).lower() def create_registry( custom_annotation_map: Optional[dict[Any, Union[type[TypeEngine[Any]], TypeEngine[Any]]]] = None, ) -> SQLAlchemyRegistry: """Create a new SQLAlchemy registry. Args: custom_annotation_map (dict, optional): Custom type annotations to use for the registry. Returns: :class:`sqlalchemy.orm.registry`: A new SQLAlchemy registry with the specified type annotations. """ import uuid as core_uuid meta = MetaData(naming_convention=convention) type_annotation_map: dict[Any, Union[type[TypeEngine[Any]], TypeEngine[Any]]] = { UUID: GUID, core_uuid.UUID: GUID, datetime.datetime: DateTimeUTC, datetime.date: Date, dict: JsonB, dict[str, Any]: JsonB, dict[str, str]: JsonB, DataclassProtocol: JsonB, } with contextlib.suppress(ImportError): from pydantic import AnyHttpUrl, AnyUrl, EmailStr, IPvAnyAddress, IPvAnyInterface, IPvAnyNetwork, Json type_annotation_map.update( { EmailStr: String, AnyUrl: String, AnyHttpUrl: String, Json: JsonB, IPvAnyAddress: String, IPvAnyInterface: String, IPvAnyNetwork: String, } ) with contextlib.suppress(ImportError): from msgspec import Struct type_annotation_map[Struct] = JsonB if custom_annotation_map is not None: type_annotation_map.update(custom_annotation_map) return SQLAlchemyRegistry(metadata=meta, type_annotation_map=type_annotation_map) orm_registry = create_registry() class MetadataRegistry: """A registry for metadata. Provides methods to get and set metadata for different bind keys. Methods: get: Retrieves the metadata for a given bind key. set: Sets the metadata for a given bind key. """ _instance: Optional["MetadataRegistry"] = None _registry: dict[Union[str, None], MetaData] = {None: orm_registry.metadata} def __new__(cls) -> Self: if cls._instance is None: cls._instance = super().__new__(cls) return cast("Self", cls._instance) def get(self, bind_key: Optional[str] = None) -> MetaData: """Get the metadata for the given bind key.""" return self._registry.setdefault(bind_key, MetaData(naming_convention=convention)) def set(self, bind_key: Optional[str], metadata: MetaData) -> None: """Set the metadata for the given bind key.""" self._registry[bind_key] = metadata def __iter__(self) -> Iterator[Union[str, None]]: return iter(self._registry) def __getitem__(self, bind_key: Union[str, None]) -> MetaData: return self._registry[bind_key] def __setitem__(self, bind_key: Union[str, None], metadata: MetaData) -> None: self._registry[bind_key] = metadata def __contains__(self, bind_key: Union[str, None]) -> bool: return bind_key in self._registry metadata_registry = MetadataRegistry() class AdvancedDeclarativeBase(DeclarativeBase): """A subclass of declarative base that allows for overriding of the registry. Inherits from :class:`sqlalchemy.orm.DeclarativeBase`. Attributes: registry (:class:`sqlalchemy.orm.registry`): The registry for the declarative base. __metadata_registry__ (:class:`~advanced_alchemy.base.MetadataRegistry`): The metadata registry. __bind_key__ (Optional[:class:`str`]): The bind key for the metadata. """ registry = orm_registry __abstract__ = True __metadata_registry__: MetadataRegistry = MetadataRegistry() __bind_key__: Optional[str] = None def __init_subclass__(cls, **kwargs: Any) -> None: bind_key = getattr(cls, "__bind_key__", None) if bind_key is not None: cls.metadata = cls.__metadata_registry__.get(bind_key) elif None not in cls.__metadata_registry__ and getattr(cls, "metadata", None) is not None: cls.__metadata_registry__[None] = cls.metadata super().__init_subclass__(**kwargs) class UUIDBase(UUIDPrimaryKey, CommonTableAttributes, AdvancedDeclarativeBase, AsyncAttrs): """Base for all SQLAlchemy declarative models with UUID v4 primary keys. .. seealso:: :class:`CommonTableAttributes` :class:`advanced_alchemy.mixins.UUIDPrimaryKey` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class UUIDAuditBase(CommonTableAttributes, UUIDPrimaryKey, AuditColumns, AdvancedDeclarativeBase, AsyncAttrs): """Base for declarative models with UUID v4 primary keys and audit columns. .. seealso:: :class:`CommonTableAttributes` :class:`advanced_alchemy.mixins.UUIDPrimaryKey` :class:`advanced_alchemy.mixins.AuditColumns` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class UUIDv6Base(UUIDv6PrimaryKey, CommonTableAttributes, AdvancedDeclarativeBase, AsyncAttrs): """Base for all SQLAlchemy declarative models with UUID v6 primary keys. .. seealso:: :class:`advanced_alchemy.mixins.UUIDv6PrimaryKey` :class:`CommonTableAttributes` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class UUIDv6AuditBase(CommonTableAttributes, UUIDv6PrimaryKey, AuditColumns, AdvancedDeclarativeBase, AsyncAttrs): """Base for declarative models with UUID v6 primary keys and audit columns. .. seealso:: :class:`CommonTableAttributes` :class:`advanced_alchemy.mixins.UUIDv6PrimaryKey` :class:`advanced_alchemy.mixins.AuditColumns` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class UUIDv7Base(UUIDv7PrimaryKey, CommonTableAttributes, AdvancedDeclarativeBase, AsyncAttrs): """Base for all SQLAlchemy declarative models with UUID v7 primary keys. .. seealso:: :class:`advanced_alchemy.mixins.UUIDv7PrimaryKey` :class:`CommonTableAttributes` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class UUIDv7AuditBase(CommonTableAttributes, UUIDv7PrimaryKey, AuditColumns, AdvancedDeclarativeBase, AsyncAttrs): """Base for declarative models with UUID v7 primary keys and audit columns. .. seealso:: :class:`CommonTableAttributes` :class:`advanced_alchemy.mixins.UUIDv7PrimaryKey` :class:`advanced_alchemy.mixins.AuditColumns` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class NanoIDBase(NanoIDPrimaryKey, CommonTableAttributes, AdvancedDeclarativeBase, AsyncAttrs): """Base for all SQLAlchemy declarative models with Nano ID primary keys. .. seealso:: :class:`advanced_alchemy.mixins.NanoIDPrimaryKey` :class:`CommonTableAttributes` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class NanoIDAuditBase(CommonTableAttributes, NanoIDPrimaryKey, AuditColumns, AdvancedDeclarativeBase, AsyncAttrs): """Base for declarative models with Nano ID primary keys and audit columns. .. seealso:: :class:`CommonTableAttributes` :class:`advanced_alchemy.mixins.NanoIDPrimaryKey` :class:`advanced_alchemy.mixins.AuditColumns` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class BigIntBase(BigIntPrimaryKey, CommonTableAttributes, AdvancedDeclarativeBase, AsyncAttrs): """Base for all SQLAlchemy declarative models with BigInt primary keys. .. seealso:: :class:`advanced_alchemy.mixins.BigIntPrimaryKey` :class:`CommonTableAttributes` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class BigIntAuditBase(CommonTableAttributes, BigIntPrimaryKey, AuditColumns, AdvancedDeclarativeBase, AsyncAttrs): """Base for declarative models with BigInt primary keys and audit columns. .. seealso:: :class:`CommonTableAttributes` :class:`advanced_alchemy.mixins.BigIntPrimaryKey` :class:`advanced_alchemy.mixins.AuditColumns` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class DefaultBase(CommonTableAttributes, AdvancedDeclarativeBase, AsyncAttrs): """Base for all SQLAlchemy declarative models. No primary key is added. .. seealso:: :class:`CommonTableAttributes` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True class SQLQuery(BasicAttributes, AdvancedDeclarativeBase, AsyncAttrs): """Base for all SQLAlchemy custom mapped objects. .. seealso:: :class:`BasicAttributes` :class:`AdvancedDeclarativeBase` :class:`AsyncAttrs` """ __abstract__ = True __allow_unmapped__ = True python-advanced-alchemy-1.0.1/advanced_alchemy/cli.py000066400000000000000000000376211476663714600226420ustar00rootroot00000000000000from collections.abc import Sequence from pathlib import Path from typing import TYPE_CHECKING, Optional, Union, cast if TYPE_CHECKING: from click import Group from advanced_alchemy.config import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig from alembic.migration import MigrationContext from alembic.operations.ops import MigrationScript, UpgradeOps __all__ = ("add_migration_commands", "get_alchemy_group") def get_alchemy_group() -> "Group": """Get the Advanced Alchemy CLI group.""" from advanced_alchemy.exceptions import MissingDependencyError try: import rich_click as click except ImportError: try: import click # type: ignore[no-redef] except ImportError as e: raise MissingDependencyError(package="click", install_package="cli") from e @click.group(name="alchemy") @click.option( "--config", help="Dotted path to SQLAlchemy config(s) (e.g. 'myapp.config.alchemy_configs')", required=True, type=str, ) @click.pass_context def alchemy_group(ctx: "click.Context", config: str) -> None: """Advanced Alchemy CLI commands.""" from rich import get_console from advanced_alchemy.utils import module_loader console = get_console() ctx.ensure_object(dict) try: config_instance = module_loader.import_string(config) if isinstance(config_instance, Sequence): ctx.obj["configs"] = config_instance else: ctx.obj["configs"] = [config_instance] except ImportError as e: console.print(f"[red]Error loading config: {e}[/]") ctx.exit(1) return alchemy_group def add_migration_commands(database_group: Optional["Group"] = None) -> "Group": # noqa: C901, PLR0915 """Add migration commands to the database group.""" from advanced_alchemy.exceptions import MissingDependencyError try: import rich_click as click except ImportError: try: import click # type: ignore[no-redef] except ImportError as e: raise MissingDependencyError(package="click", install_package="cli") from e from rich import get_console console = get_console() if database_group is None: database_group = get_alchemy_group() bind_key_option = click.option( "--bind-key", help="Specify which SQLAlchemy config to use by bind key", type=str, default=None, ) verbose_option = click.option( "--verbose", help="Enable verbose output.", type=bool, default=False, is_flag=True, ) no_prompt_option = click.option( "--no-prompt", help="Do not prompt for confirmation before executing the command.", type=bool, default=False, required=False, show_default=True, is_flag=True, ) def get_config_by_bind_key( ctx: "click.Context", bind_key: Optional[str] ) -> "Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]": """Get the SQLAlchemy config for the specified bind key.""" configs = ctx.obj["configs"] if bind_key is None: return cast("Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]", configs[0]) for config in configs: if config.bind_key == bind_key: return cast("Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]", config) console.print(f"[red]No config found for bind key: {bind_key}[/]") ctx.exit(1) # noqa: RET503 @database_group.command( name="show-current-revision", help="Shows the current revision for the database.", ) @bind_key_option @verbose_option def show_database_revision(bind_key: Optional[str], verbose: bool) -> None: # pyright: ignore[reportUnusedFunction] """Show current database revision.""" from advanced_alchemy.alembic.commands import AlembicCommands ctx = click.get_current_context() console.rule("[yellow]Listing current revision[/]", align="left") sqlalchemy_config = get_config_by_bind_key(ctx, bind_key) alembic_commands = AlembicCommands(sqlalchemy_config=sqlalchemy_config) alembic_commands.current(verbose=verbose) @database_group.command( name="downgrade", help="Downgrade database to a specific revision.", ) @bind_key_option @click.option("--sql", type=bool, help="Generate SQL output for offline migrations.", default=False, is_flag=True) @click.option( "--tag", help="an arbitrary 'tag' that can be intercepted by custom env.py scripts via the .EnvironmentContext.get_tag_argument method.", type=str, default=None, ) @no_prompt_option @click.argument( "revision", type=str, default="-1", ) def downgrade_database( # pyright: ignore[reportUnusedFunction] bind_key: Optional[str], revision: str, sql: bool, tag: Optional[str], no_prompt: bool ) -> None: """Downgrade the database to the latest revision.""" from rich.prompt import Confirm from advanced_alchemy.alembic.commands import AlembicCommands ctx = click.get_current_context() console.rule("[yellow]Starting database downgrade process[/]", align="left") input_confirmed = ( True if no_prompt else Confirm.ask(f"Are you sure you want to downgrade the database to the `{revision}` revision?") ) if input_confirmed: sqlalchemy_config = get_config_by_bind_key(ctx, bind_key) alembic_commands = AlembicCommands(sqlalchemy_config=sqlalchemy_config) alembic_commands.downgrade(revision=revision, sql=sql, tag=tag) @database_group.command( name="upgrade", help="Upgrade database to a specific revision.", ) @bind_key_option @click.option("--sql", type=bool, help="Generate SQL output for offline migrations.", default=False, is_flag=True) @click.option( "--tag", help="an arbitrary 'tag' that can be intercepted by custom env.py scripts via the .EnvironmentContext.get_tag_argument method.", type=str, default=None, ) @no_prompt_option @click.argument( "revision", type=str, default="head", ) def upgrade_database( # pyright: ignore[reportUnusedFunction] bind_key: Optional[str], revision: str, sql: bool, tag: Optional[str], no_prompt: bool ) -> None: """Upgrade the database to the latest revision.""" from rich.prompt import Confirm from advanced_alchemy.alembic.commands import AlembicCommands ctx = click.get_current_context() console.rule("[yellow]Starting database upgrade process[/]", align="left") input_confirmed = ( True if no_prompt else Confirm.ask(f"[bold]Are you sure you want migrate the database to the `{revision}` revision?[/]") ) if input_confirmed: sqlalchemy_config = get_config_by_bind_key(ctx, bind_key) alembic_commands = AlembicCommands(sqlalchemy_config=sqlalchemy_config) alembic_commands.upgrade(revision=revision, sql=sql, tag=tag) @database_group.command( name="init", help="Initialize migrations for the project.", ) @bind_key_option @click.argument( "directory", default=None, required=False, ) @click.option("--multidb", is_flag=True, default=False, help="Support multiple databases") @click.option("--package", is_flag=True, default=True, help="Create `__init__.py` for created folder") @no_prompt_option def init_alembic( # pyright: ignore[reportUnusedFunction] bind_key: Optional[str], directory: Optional[str], multidb: bool, package: bool, no_prompt: bool ) -> None: """Initialize the database migrations.""" from rich.prompt import Confirm from advanced_alchemy.alembic.commands import AlembicCommands ctx = click.get_current_context() console.rule("[yellow]Initializing database migrations.", align="left") input_confirmed = ( True if no_prompt else Confirm.ask("[bold]Are you sure you want initialize migrations for the project?[/]") ) if input_confirmed: configs = [get_config_by_bind_key(ctx, bind_key)] if bind_key is not None else ctx.obj["configs"] for config in configs: directory = config.alembic_config.script_location if directory is None else directory alembic_commands = AlembicCommands(sqlalchemy_config=config) alembic_commands.init(directory=cast("str", directory), multidb=multidb, package=package) @database_group.command( name="make-migrations", help="Create a new migration revision.", ) @bind_key_option @click.option("-m", "--message", default=None, help="Revision message") @click.option( "--autogenerate/--no-autogenerate", default=True, help="Automatically populate revision with detected changes" ) @click.option("--sql", is_flag=True, default=False, help="Export to `.sql` instead of writing to the database.") @click.option("--head", default="head", help="Specify head revision to use as base for new revision.") @click.option( "--splice", is_flag=True, default=False, help='Allow a non-head revision as the "head" to splice onto' ) @click.option("--branch-label", default=None, help="Specify a branch label to apply to the new revision") @click.option("--version-path", default=None, help="Specify specific path from config for version file") @click.option("--rev-id", default=None, help="Specify a ID to use for revision.") @no_prompt_option def create_revision( # pyright: ignore[reportUnusedFunction] bind_key: Optional[str], message: Optional[str], autogenerate: bool, sql: bool, head: str, splice: bool, branch_label: Optional[str], version_path: Optional[str], rev_id: Optional[str], no_prompt: bool, ) -> None: """Create a new database revision.""" from rich.prompt import Prompt from advanced_alchemy.alembic.commands import AlembicCommands def process_revision_directives( context: "MigrationContext", # noqa: ARG001 revision: tuple[str], # noqa: ARG001 directives: list["MigrationScript"], ) -> None: """Handle revision directives.""" if autogenerate and cast("UpgradeOps", directives[0].upgrade_ops).is_empty(): console.rule( "[magenta]The generation of a migration file is being skipped because it would result in an empty file.", style="magenta", align="left", ) console.rule( "[magenta]More information can be found here. https://alembic.sqlalchemy.org/en/latest/autogenerate.html#what-does-autogenerate-detect-and-what-does-it-not-detect", style="magenta", align="left", ) console.rule( "[magenta]If you intend to create an empty migration file, use the --no-autogenerate option.", style="magenta", align="left", ) directives.clear() ctx = click.get_current_context() console.rule("[yellow]Starting database upgrade process[/]", align="left") if message is None: message = "autogenerated" if no_prompt else Prompt.ask("Please enter a message describing this revision") sqlalchemy_config = get_config_by_bind_key(ctx, bind_key) alembic_commands = AlembicCommands(sqlalchemy_config=sqlalchemy_config) alembic_commands.revision( message=message, autogenerate=autogenerate, sql=sql, head=head, splice=splice, branch_label=branch_label, version_path=version_path, rev_id=rev_id, process_revision_directives=process_revision_directives, # type: ignore[arg-type] ) @database_group.command(name="drop-all", help="Drop all tables from the database.") @bind_key_option @no_prompt_option def drop_all(bind_key: Optional[str], no_prompt: bool) -> None: # pyright: ignore[reportUnusedFunction] """Drop all tables from the database.""" from anyio import run from rich.prompt import Confirm from advanced_alchemy.alembic.utils import drop_all from advanced_alchemy.base import metadata_registry ctx = click.get_current_context() console.rule("[yellow]Dropping all tables from the database[/]", align="left") input_confirmed = no_prompt or Confirm.ask( "[bold red]Are you sure you want to drop all tables from the database?" ) async def _drop_all( configs: "Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]", ) -> None: for config in configs: engine = config.get_engine() await drop_all(engine, config.alembic_config.version_table_name, metadata_registry.get(config.bind_key)) if input_confirmed: configs = [get_config_by_bind_key(ctx, bind_key)] if bind_key is not None else ctx.obj["configs"] run(_drop_all, configs) @database_group.command(name="dump-data", help="Dump specified tables from the database to JSON files.") @bind_key_option @click.option( "--table", "table_names", help="Name of the table to dump. Multiple tables can be specified. Use '*' to dump all tables.", type=str, required=True, multiple=True, ) @click.option( "--dir", "dump_dir", help="Directory to save the JSON files. Defaults to WORKDIR/fixtures", type=click.Path(path_type=Path), default=Path.cwd() / "fixtures", required=False, ) def dump_table_data(bind_key: Optional[str], table_names: tuple[str, ...], dump_dir: Path) -> None: # pyright: ignore[reportUnusedFunction] """Dump table data to JSON files.""" from anyio import run from rich.prompt import Confirm from advanced_alchemy.alembic.utils import dump_tables from advanced_alchemy.base import metadata_registry, orm_registry ctx = click.get_current_context() all_tables = "*" in table_names if all_tables and not Confirm.ask( "[yellow bold]You have specified '*'. Are you sure you want to dump all tables from the database?", ): return console.rule("[red bold]No data was dumped.", style="red", align="left") async def _dump_tables() -> None: configs = [get_config_by_bind_key(ctx, bind_key)] if bind_key is not None else ctx.obj["configs"] for config in configs: target_tables = set(metadata_registry.get(config.bind_key).tables) if not all_tables: for table_name in set(table_names) - target_tables: console.rule( f"[red bold]Skipping table '{table_name}' because it is not available in the default registry", style="red", align="left", ) target_tables.intersection_update(table_names) else: console.rule("[yellow bold]Dumping all tables", style="yellow", align="left") models = [ mapper.class_ for mapper in orm_registry.mappers if mapper.class_.__table__.name in target_tables ] await dump_tables(dump_dir, config.get_session(), models) console.rule("[green bold]Data dump complete", align="left") return run(_dump_tables) return database_group python-advanced-alchemy-1.0.1/advanced_alchemy/config/000077500000000000000000000000001476663714600227555ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/config/__init__.py000066400000000000000000000017571476663714600251000ustar00rootroot00000000000000from advanced_alchemy.config.asyncio import AlembicAsyncConfig, AsyncSessionConfig, SQLAlchemyAsyncConfig from advanced_alchemy.config.common import ( ConnectionT, EngineT, GenericAlembicConfig, GenericSessionConfig, GenericSQLAlchemyConfig, SessionMakerT, SessionT, ) from advanced_alchemy.config.engine import EngineConfig from advanced_alchemy.config.sync import AlembicSyncConfig, SQLAlchemySyncConfig, SyncSessionConfig from advanced_alchemy.config.types import CommitStrategy, TypeDecodersSequence, TypeEncodersMap __all__ = ( "AlembicAsyncConfig", "AlembicSyncConfig", "AsyncSessionConfig", "CommitStrategy", "ConnectionT", "EngineConfig", "EngineT", "GenericAlembicConfig", "GenericSQLAlchemyConfig", "GenericSessionConfig", "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", "SessionMakerT", "SessionT", "SyncSessionConfig", "TypeDecodersSequence", "TypeEncodersMap", ) python-advanced-alchemy-1.0.1/advanced_alchemy/config/asyncio.py000066400000000000000000000063541476663714600250040ustar00rootroot00000000000000from collections.abc import AsyncGenerator from contextlib import asynccontextmanager from dataclasses import dataclass, field from typing import TYPE_CHECKING, Callable, Union from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine from advanced_alchemy.config.common import ( GenericAlembicConfig, GenericSessionConfig, GenericSQLAlchemyConfig, ) from advanced_alchemy.utils.dataclass import Empty if TYPE_CHECKING: from typing import Callable from sqlalchemy.orm import Session from advanced_alchemy.utils.dataclass import EmptyType __all__ = ( "AlembicAsyncConfig", "AsyncSessionConfig", "SQLAlchemyAsyncConfig", ) @dataclass class AsyncSessionConfig(GenericSessionConfig[AsyncConnection, AsyncEngine, AsyncSession]): """SQLAlchemy async session config.""" sync_session_class: "Union[type[Session], None, EmptyType]" = Empty """A :class:`Session ` subclass or other callable which will be used to construct the :class:`Session ` which will be proxied. This parameter may be used to provide custom :class:`Session ` subclasses. Defaults to the :attr:`AsyncSession.sync_session_class ` class-level attribute.""" @dataclass class AlembicAsyncConfig(GenericAlembicConfig): """Configuration for an Async Alembic's Config class. .. seealso:: https://alembic.sqlalchemy.org/en/latest/api/config.html """ @dataclass class SQLAlchemyAsyncConfig(GenericSQLAlchemyConfig[AsyncEngine, AsyncSession, async_sessionmaker[AsyncSession]]): """Async SQLAlchemy Configuration. Note: The alembic configuration options are documented in the Alembic documentation. """ create_engine_callable: "Callable[[str], AsyncEngine]" = create_async_engine """Callable that creates an :class:`AsyncEngine ` instance or instance of its subclass. """ session_config: AsyncSessionConfig = field(default_factory=AsyncSessionConfig) # pyright: ignore[reportIncompatibleVariableOverride] """Configuration options for the :class:`async_sessionmaker`.""" session_maker_class: "type[async_sessionmaker[AsyncSession]]" = async_sessionmaker # pyright: ignore[reportIncompatibleVariableOverride] """Sessionmaker class to use.""" alembic_config: "AlembicAsyncConfig" = field(default_factory=AlembicAsyncConfig) """Configuration for the SQLAlchemy Alembic migrations. The configuration options are documented in the Alembic documentation. """ def __hash__(self) -> int: return super().__hash__() def __eq__(self, other: object) -> bool: return super().__eq__(other) @asynccontextmanager async def get_session( self, ) -> AsyncGenerator[AsyncSession, None]: """Get a session from the session maker. Returns: AsyncGenerator[AsyncSession, None]: An async context manager that yields an AsyncSession. """ session_maker = self.create_session_maker() async with session_maker() as session: yield session python-advanced-alchemy-1.0.1/advanced_alchemy/config/common.py000066400000000000000000000325011476663714600246200ustar00rootroot00000000000000from dataclasses import dataclass, field from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Optional, Union, cast from typing_extensions import TypeVar from advanced_alchemy.base import metadata_registry from advanced_alchemy.config.engine import EngineConfig from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.utils.dataclass import Empty, simple_asdict if TYPE_CHECKING: from sqlalchemy import Connection, Engine, MetaData from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine, AsyncSession, async_sessionmaker from sqlalchemy.orm import Mapper, Query, Session, sessionmaker from sqlalchemy.orm.session import JoinTransactionMode from sqlalchemy.sql import TableClause from advanced_alchemy.utils.dataclass import EmptyType __all__ = ( "ALEMBIC_TEMPLATE_PATH", "ConnectionT", "EngineT", "GenericAlembicConfig", "GenericSQLAlchemyConfig", "GenericSessionConfig", "SessionMakerT", "SessionT", ) ALEMBIC_TEMPLATE_PATH = f"{Path(__file__).parent.parent}/alembic/templates" """Path to the Alembic templates.""" ConnectionT = TypeVar("ConnectionT", bound="Union[Connection, AsyncConnection]") """Type variable for SQLAlchemy connection types. .. seealso:: :class:`sqlalchemy.Connection` :class:`sqlalchemy.ext.asyncio.AsyncConnection` """ EngineT = TypeVar("EngineT", bound="Union[Engine, AsyncEngine]") """Type variable for a SQLAlchemy engine. .. seealso:: :class:`sqlalchemy.Engine` :class:`sqlalchemy.ext.asyncio.AsyncEngine` """ SessionT = TypeVar("SessionT", bound="Union[Session, AsyncSession]") """Type variable for a SQLAlchemy session. .. seealso:: :class:`sqlalchemy.Session` :class:`sqlalchemy.ext.asyncio.AsyncSession` """ SessionMakerT = TypeVar("SessionMakerT", bound="Union[sessionmaker[Session], async_sessionmaker[AsyncSession]]") """Type variable for a SQLAlchemy sessionmaker. .. seealso:: :class:`sqlalchemy.orm.sessionmaker` :class:`sqlalchemy.ext.asyncio.async_sessionmaker` """ @dataclass class GenericSessionConfig(Generic[ConnectionT, EngineT, SessionT]): """SQLAlchemy async session config. Types: ConnectionT: :class:`sqlalchemy.Connection` | :class:`sqlalchemy.ext.asyncio.AsyncConnection` EngineT: :class:`sqlalchemy.Engine` | :class:`sqlalchemy.ext.asyncio.AsyncEngine` SessionT: :class:`sqlalchemy.Session` | :class:`sqlalchemy.ext.asyncio.AsyncSession` """ autobegin: "Union[bool, EmptyType]" = Empty """Automatically start transactions when database access is requested by an operation. Bool or :class:`Empty ` """ autoflush: "Union[bool, EmptyType]" = Empty """When ``True``, all query operations will issue a flush call to this :class:`Session ` before proceeding""" bind: "Union[EngineT, ConnectionT, None, EmptyType]" = Empty """The :class:`Engine ` or :class:`Connection ` that new :class:`Session ` objects will be bound to.""" binds: "Union[dict[Union[type[Any], Mapper[Any], TableClause, str], Union[EngineT, ConnectionT]], None, EmptyType]" = Empty """A dictionary which may specify any number of :class:`Engine ` or :class:`Connection ` objects as the source of connectivity for SQL operations on a per-entity basis. The keys of the dictionary consist of any series of mapped classes, arbitrary Python classes that are bases for mapped classes, :class:`Table ` objects and :class:`Mapper ` objects. The values of the dictionary are then instances of :class:`Engine ` or less commonly :class:`Connection ` objects.""" class_: "Union[type[SessionT], EmptyType]" = Empty """Class to use in order to create new :class:`Session ` objects.""" expire_on_commit: "Union[bool, EmptyType]" = Empty """If ``True``, all instances will be expired after each commit.""" info: "Union[dict[str, Any], None, EmptyType]" = Empty """Optional dictionary of information that will be available via the :attr:`Session.info `""" join_transaction_mode: "Union[JoinTransactionMode, EmptyType]" = Empty """Describes the transactional behavior to take when a given bind is a Connection that has already begun a transaction outside the scope of this Session; in other words the :attr:`Connection.in_transaction() ` method returns True.""" query_cls: "Union[type[Query], None, EmptyType]" = Empty # pyright: ignore[reportMissingTypeArgument] """Class which should be used to create new Query objects, as returned by the :attr:`Session.query() ` method.""" twophase: "Union[bool, EmptyType]" = Empty """When ``True``, all transactions will be started as a โ€œtwo phaseโ€ transaction, i.e. using the โ€œtwo phaseโ€ semantics of the database in use along with an XID. During a :attr:`commit() `, after :attr:`flush() ` has been issued for all attached databases, the :attr:`TwoPhaseTransaction.prepare() ` method on each database`s :class:`TwoPhaseTransaction ` will be called. This allows each database to roll back the entire transaction, before each transaction is committed.""" @dataclass class GenericSQLAlchemyConfig(Generic[EngineT, SessionT, SessionMakerT]): """Common SQLAlchemy Configuration. Types: EngineT: :class:`sqlalchemy.Engine` or :class:`sqlalchemy.ext.asyncio.AsyncEngine` SessionT: :class:`sqlalchemy.Session` or :class:`sqlalchemy.ext.asyncio.AsyncSession` SessionMakerT: :class:`sqlalchemy.orm.sessionmaker` or :class:`sqlalchemy.ext.asyncio.async_sessionmaker` """ create_engine_callable: "Callable[[str], EngineT]" """Callable that creates an :class:`AsyncEngine ` instance or instance of its subclass. """ session_config: "GenericSessionConfig[Any, Any, Any]" """Configuration options for either the :class:`async_sessionmaker ` or :class:`sessionmaker `. """ session_maker_class: "type[Union[sessionmaker[Session], async_sessionmaker[AsyncSession]]]" """Sessionmaker class to use. .. seealso:: :class:`sqlalchemy.orm.sessionmaker` :class:`sqlalchemy.ext.asyncio.async_sessionmaker` """ connection_string: "Union[str, None]" = field(default=None) """Database connection string in one of the formats supported by SQLAlchemy. Notes: - For async connections, the connection string must include the correct async prefix. e.g. ``'postgresql+asyncpg://...'`` instead of ``'postgresql://'``, and for sync connections its the opposite. """ engine_config: "EngineConfig" = field(default_factory=EngineConfig) """Configuration for the SQLAlchemy engine. The configuration options are documented in the SQLAlchemy documentation. """ session_maker: "Union[Callable[[], SessionT], None]" = None """Callable that returns a session. If provided, the plugin will use this rather than instantiate a sessionmaker. """ engine_instance: "Union[EngineT, None]" = None """Optional engine to use. If set, the plugin will use the provided instance rather than instantiate an engine. """ create_all: bool = False """If true, all models are automatically created on engine creation.""" metadata: "Union[MetaData, None]" = None """Optional metadata to use. If set, the plugin will use the provided instance rather than the default metadata.""" enable_touch_updated_timestamp_listener: bool = True """Enable Created/Updated Timestamp event listener. This is a listener that will update ``created_at`` and ``updated_at`` columns on record modification. Disable if you plan to bring your own update mechanism for these columns""" bind_key: "Union[str, None]" = None """Bind key to register a metadata to a specific engine configuration.""" _SESSION_SCOPE_KEY_REGISTRY: "ClassVar[set[str]]" = field(init=False, default=cast("set[str]", set())) """Internal counter for ensuring unique identification of session scope keys in the class.""" _ENGINE_APP_STATE_KEY_REGISTRY: "ClassVar[set[str]]" = field(init=False, default=cast("set[str]", set())) """Internal counter for ensuring unique identification of engine app state keys in the class.""" _SESSIONMAKER_APP_STATE_KEY_REGISTRY: "ClassVar[set[str]]" = field(init=False, default=cast("set[str]", set())) """Internal counter for ensuring unique identification of sessionmaker state keys in the class.""" def __post_init__(self) -> None: if self.connection_string is not None and self.engine_instance is not None: msg = "Only one of 'connection_string' or 'engine_instance' can be provided." raise ImproperConfigurationError(msg) if self.metadata is None: self.metadata = metadata_registry.get(self.bind_key) else: metadata_registry.set(self.bind_key, self.metadata) if self.enable_touch_updated_timestamp_listener: from sqlalchemy import event from sqlalchemy.orm import Session from advanced_alchemy._listeners import touch_updated_timestamp event.listen(Session, "before_flush", touch_updated_timestamp) def __hash__(self) -> int: # pragma: no cover return hash( ( self.__class__.__qualname__, self.connection_string, self.engine_config.__class__.__qualname__, self.bind_key, ) ) def __eq__(self, other: object) -> bool: return self.__hash__() == other.__hash__() @property def engine_config_dict(self) -> dict[str, Any]: """Return the engine configuration as a dict. Returns: A string keyed dict of config kwargs for the SQLAlchemy :func:`sqlalchemy.get_engine` function. """ return simple_asdict(self.engine_config, exclude_empty=True) @property def session_config_dict(self) -> dict[str, Any]: """Return the session configuration as a dict. Returns: A string keyed dict of config kwargs for the SQLAlchemy :class:`sqlalchemy.orm.sessionmaker` class. """ return simple_asdict(self.session_config, exclude_empty=True) def get_engine(self) -> EngineT: """Return an engine. If none exists yet, create one. Returns: :class:`sqlalchemy.Engine` or :class:`sqlalchemy.ext.asyncio.AsyncEngine` instance used by the plugin. """ if self.engine_instance: return self.engine_instance if self.connection_string is None: msg = "One of 'connection_string' or 'engine_instance' must be provided." raise ImproperConfigurationError(msg) engine_config = self.engine_config_dict try: return self.create_engine_callable(self.connection_string, **engine_config) except TypeError: # likely due to a dialect that doesn't support json type del engine_config["json_deserializer"] del engine_config["json_serializer"] return self.create_engine_callable(self.connection_string, **engine_config) def create_session_maker(self) -> "Callable[[], SessionT]": # pragma: no cover """Get a session maker. If none exists yet, create one. Returns: :class:`sqlalchemy.orm.sessionmaker` or :class:`sqlalchemy.ext.asyncio.async_sessionmaker` factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if session_kws.get("bind") is None: session_kws["bind"] = self.get_engine() return cast("Callable[[], SessionT]", self.session_maker_class(**session_kws)) @dataclass class GenericAlembicConfig: """Configuration for Alembic's :class:`Config `. For details see: https://alembic.sqlalchemy.org/en/latest/api/config.html """ script_config: str = "alembic.ini" """A path to the Alembic configuration file such as ``alembic.ini``. If left unset, the default configuration will be used. """ version_table_name: str = "alembic_versions" """Configure the name of the table used to hold the applied alembic revisions. Defaults to ``alembic_versions``. """ version_table_schema: "Optional[str]" = None """Configure the schema to use for the alembic revisions revisions. If unset, it defaults to connection's default schema.""" script_location: str = "migrations" """A path to save generated migrations. """ user_module_prefix: "Optional[str]" = "sa." """User module prefix.""" render_as_batch: bool = True """Render as batch.""" compare_type: bool = False """Compare type.""" template_path: str = ALEMBIC_TEMPLATE_PATH """Template path.""" python-advanced-alchemy-1.0.1/advanced_alchemy/config/engine.py000066400000000000000000000261311476663714600245770ustar00rootroot00000000000000from dataclasses import dataclass from typing import TYPE_CHECKING, Callable, Literal, Union from advanced_alchemy._serialization import decode_json, encode_json from advanced_alchemy.utils.dataclass import Empty if TYPE_CHECKING: from collections.abc import Mapping from typing import Any from sqlalchemy.engine.interfaces import IsolationLevel from sqlalchemy.pool import Pool from typing_extensions import TypeAlias from advanced_alchemy.utils.dataclass import EmptyType _EchoFlagType: "TypeAlias" = 'Union[None, bool, Literal["debug"]]' _ParamStyle = Literal["qmark", "numeric", "named", "format", "pyformat", "numeric_dollar"] __all__ = ("EngineConfig",) @dataclass class EngineConfig: """Configuration for SQLAlchemy's Engine. This class provides configuration options for SQLAlchemy engine creation. See: https://docs.sqlalchemy.org/en/20/core/engines.html """ connect_args: "Union[dict[Any, Any], EmptyType]" = Empty """A dictionary of arguments which will be passed directly to the DBAPI's ``connect()`` method as keyword arguments. """ echo: "Union[_EchoFlagType, EmptyType]" = Empty """If ``True``, the Engine will log all statements as well as a ``repr()`` of their parameter lists to the default log handler, which defaults to ``sys.stdout`` for output. If set to the string "debug", result rows will be printed to the standard output as well. The echo attribute of Engine can be modified at any time to turn logging on and off; direct control of logging is also available using the standard Python logging module. """ echo_pool: "Union[_EchoFlagType, EmptyType]" = Empty """If ``True``, the connection pool will log informational output such as when connections are invalidated as well as when connections are recycled to the default log handler, which defaults to sys.stdout for output. If set to the string "debug", the logging will include pool checkouts and checkins. Direct control of logging is also available using the standard Python logging module.""" enable_from_linting: "Union[bool, EmptyType]" = Empty """Defaults to True. Will emit a warning if a given SELECT statement is found to have un-linked FROM elements which would cause a cartesian product.""" execution_options: "Union[Mapping[str, Any], EmptyType]" = Empty """Dictionary execution options which will be applied to all connections. See :attr:`Connection.execution_options() ` for details.""" hide_parameters: "Union[bool, EmptyType]" = Empty """Boolean, when set to ``True``, SQL statement parameters will not be displayed in INFO logging nor will they be formatted into the string representation of :class:`StatementError ` objects.""" insertmanyvalues_page_size: "Union[int, EmptyType]" = Empty """Number of rows to format into an INSERT statement when the statement uses โ€œinsertmanyvaluesโ€ mode, which is a paged form of bulk insert that is used for many backends when using executemany execution typically in conjunction with RETURNING. Defaults to 1000, but may also be subject to dialect-specific limiting factors which may override this value on a per-statement basis.""" isolation_level: "Union[IsolationLevel, EmptyType]" = Empty """Optional string name of an isolation level which will be set on all new connections unconditionally. Isolation levels are typically some subset of the string names "SERIALIZABLE", "REPEATABLE READ", "READ COMMITTED", "READ UNCOMMITTED" and "AUTOCOMMIT" based on backend.""" json_deserializer: "Callable[[str], Any]" = decode_json """For dialects that support the :class:`JSON ` datatype, this is a Python callable that will convert a JSON string to a Python object. By default, this is set to Litestar's :attr:`decode_json() <.serialization.decode_json>` function.""" json_serializer: "Callable[[Any], str]" = encode_json """For dialects that support the JSON datatype, this is a Python callable that will render a given object as JSON. By default, Litestar's :attr:`encode_json() <.serialization.encode_json>` is used.""" label_length: "Union[int, None, EmptyType]" = Empty """Optional integer value which limits the size of dynamically generated column labels to that many characters. If less than 6, labels are generated as โ€œ_(counter)โ€. If ``None``, the value of ``dialect.max_identifier_length``, which may be affected via the :attr:`get_engine.max_identifier_length parameter `, is used instead. The value of :attr:`get_engine.label_length ` may not be larger than that of :attr:`get_engine.max_identifier_length `.""" logging_name: "Union[str, EmptyType]" = Empty """String identifier which will be used within the โ€œnameโ€ field of logging records generated within the โ€œsqlalchemy.engineโ€ logger. Defaults to a hexstring of the object`s id.""" max_identifier_length: "Union[int, None, EmptyType]" = Empty """Override the max_identifier_length determined by the dialect. if ``None`` or ``0``, has no effect. This is the database`s configured maximum number of characters that may be used in a SQL identifier such as a table name, column name, or label name. All dialects determine this value automatically, however in the case of a new database version for which this value has changed but SQLAlchemy`s dialect has not been adjusted, the value may be passed here.""" max_overflow: "Union[int, EmptyType]" = Empty """The number of connections to allow in connection pool โ€œoverflowโ€, that is connections that can be opened above and beyond the pool_size setting, which defaults to five. This is only used with :class:`QueuePool `.""" module: "Union[Any, None, EmptyType]" = Empty """Reference to a Python module object (the module itself, not its string name). Specifies an alternate DBAPI module to be used by the engine`s dialect. Each sub-dialect references a specific DBAPI which will be imported before first connect. This parameter causes the import to be bypassed, and the given module to be used instead. Can be used for testing of DBAPIs as well as to inject โ€œmockโ€ DBAPI implementations into the :class:`Engine `.""" paramstyle: "Union[_ParamStyle, None, EmptyType]" = Empty """The paramstyle to use when rendering bound parameters. This style defaults to the one recommended by the DBAPI itself, which is retrieved from the ``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept more than one paramstyle, and in particular it may be desirable to change a โ€œnamedโ€ paramstyle into a โ€œpositionalโ€ one, or vice versa. When this attribute is passed, it should be one of the values "qmark", "numeric", "named", "format" or "pyformat", and should correspond to a parameter style known to be supported by the DBAPI in use.""" pool: "Union[Pool, None, EmptyType]" = Empty """An already-constructed instance of :class:`Pool `, such as a :class:`QueuePool ` instance. If non-None, this pool will be used directly as the underlying connection pool for the engine, bypassing whatever connection parameters are present in the URL argument. For information on constructing connection pools manually, see `Connection Pooling `_.""" poolclass: "Union[type[Pool], None, EmptyType]" = Empty """A :class:`Pool ` subclass, which will be used to create a connection pool instance using the connection parameters given in the URL. Note this differs from pool in that you don`t actually instantiate the pool in this case, you just indicate what type of pool to be used.""" pool_logging_name: "Union[str, EmptyType]" = Empty """String identifier which will be used within the โ€œnameโ€ field of logging records generated within the โ€œsqlalchemy.poolโ€ logger. Defaults to a hexstring of the object`s id.""" pool_pre_ping: "Union[bool, EmptyType]" = Empty """If True will enable the connection pool โ€œpre-pingโ€ feature that tests connections for liveness upon each checkout.""" pool_size: "Union[int, EmptyType]" = Empty """The number of connections to keep open inside the connection pool. This used with :class:`QueuePool ` as well as :class:`SingletonThreadPool `. With :class:`QueuePool `, a pool_size setting of ``0`` indicates no limit; to disable pooling, set ``poolclass`` to :class:`NullPool ` instead.""" pool_recycle: "Union[int, EmptyType]" = Empty """This setting causes the pool to recycle connections after the given number of seconds has passed. It defaults to ``-1``, or no timeout. For example, setting to ``3600`` means connections will be recycled after one hour. Note that MySQL in particular will disconnect automatically if no activity is detected on a connection for eight hours (although this is configurable with the MySQLDB connection itself and the server configuration as well).""" pool_reset_on_return: 'Union[Literal["rollback", "commit"], EmptyType]' = Empty """Set the :attr:`Pool.reset_on_return ` object, which can be set to the values ``"rollback"``, ``"commit"``, or ``None``.""" pool_timeout: "Union[int, EmptyType]" = Empty """Number of seconds to wait before giving up on getting a connection from the pool. This is only used with :class:`QueuePool `. This can be a float but is subject to the limitations of Python time functions which may not be reliable in the tens of milliseconds.""" pool_use_lifo: "Union[bool, EmptyType]" = Empty """Use LIFO (last-in-first-out) when retrieving connections from :class:`QueuePool ` instead of FIFO (first-in-first-out). Using LIFO, a server-side timeout scheme can reduce the number of connections used during non-peak periods of use. When planning for server-side timeouts, ensure that a recycle or pre-ping strategy is in use to gracefully handle stale connections.""" plugins: "Union[list[str], EmptyType]" = Empty """String list of plugin names to load. See :class:`CreateEnginePlugin ` for background.""" query_cache_size: "Union[int, EmptyType]" = Empty """Size of the cache used to cache the SQL string form of queries. Set to zero to disable caching. See :attr:`query_cache_size ` for more info. """ use_insertmanyvalues: "Union[bool, EmptyType]" = Empty """``True`` by default, use the โ€œinsertmanyvaluesโ€ execution style for INSERT..RETURNING statements by default.""" python-advanced-alchemy-1.0.1/advanced_alchemy/config/sync.py000066400000000000000000000051561476663714600243120ustar00rootroot00000000000000"""Sync SQLAlchemy configuration module.""" from contextlib import contextmanager from dataclasses import dataclass, field from typing import TYPE_CHECKING from sqlalchemy import Connection, Engine, create_engine from sqlalchemy.orm import Session, sessionmaker from advanced_alchemy.config.common import GenericAlembicConfig, GenericSessionConfig, GenericSQLAlchemyConfig if TYPE_CHECKING: from collections.abc import Generator from typing import Callable __all__ = ( "AlembicSyncConfig", "SQLAlchemySyncConfig", "SyncSessionConfig", ) @dataclass class SyncSessionConfig(GenericSessionConfig[Connection, Engine, Session]): """Configuration for synchronous SQLAlchemy sessions.""" @dataclass class AlembicSyncConfig(GenericAlembicConfig): """Configuration for Alembic's synchronous migrations. For details see: https://alembic.sqlalchemy.org/en/latest/api/config.html """ @dataclass class SQLAlchemySyncConfig(GenericSQLAlchemyConfig[Engine, Session, sessionmaker[Session]]): """Synchronous SQLAlchemy Configuration. Note: The alembic configuration options are documented in the Alembic documentation. """ create_engine_callable: "Callable[[str], Engine]" = create_engine """Callable that creates an :class:`Engine ` instance or instance of its subclass.""" session_config: SyncSessionConfig = field(default_factory=SyncSessionConfig) # pyright: ignore[reportIncompatibleVariableOverride] """Configuration options for the :class:`sessionmaker`.""" session_maker_class: type[sessionmaker[Session]] = sessionmaker # pyright: ignore[reportIncompatibleVariableOverride] """Sessionmaker class to use.""" alembic_config: AlembicSyncConfig = field(default_factory=AlembicSyncConfig) """Configuration for the SQLAlchemy Alembic migrations. The configuration options are documented in the Alembic documentation. """ def __hash__(self) -> int: return super().__hash__() def __eq__(self, other: object) -> bool: return super().__eq__(other) @contextmanager def get_session(self) -> "Generator[Session, None, None]": """Get a session context manager. Yields: Generator[sqlalchemy.orm.Session, None, None]: A context manager yielding an active SQLAlchemy Session. Examples: Using the session context manager: >>> with config.get_session() as session: ... session.execute(...) """ session_maker = self.create_session_maker() with session_maker() as session: yield session python-advanced-alchemy-1.0.1/advanced_alchemy/config/types.py000066400000000000000000000014751476663714600245020ustar00rootroot00000000000000"""Type aliases and constants used in the package config.""" from collections.abc import Mapping, Sequence from typing import Any, Callable, Literal from typing_extensions import TypeAlias TypeEncodersMap: TypeAlias = Mapping[Any, Callable[[Any], Any]] """Type alias for a mapping of type encoders. Maps types to their encoder functions. """ TypeDecodersSequence: TypeAlias = Sequence[tuple[Callable[[Any], bool], Callable[[Any, Any], Any]]] """Type alias for a sequence of type decoders. Each tuple contains a type check predicate and its corresponding decoder function. """ CommitStrategy: TypeAlias = Literal["always", "match_status"] """Commit strategy for SQLAlchemy sessions. Values: always: Always commit the session after operations match_status: Only commit if the HTTP status code indicates success """ python-advanced-alchemy-1.0.1/advanced_alchemy/exceptions.py000066400000000000000000000272411476663714600242510ustar00rootroot00000000000000import re from collections.abc import Generator from contextlib import contextmanager from typing import Any, Callable, Optional, TypedDict, Union, cast from sqlalchemy.exc import IntegrityError as SQLAlchemyIntegrityError from sqlalchemy.exc import InvalidRequestError as SQLAlchemyInvalidRequestError from sqlalchemy.exc import MultipleResultsFound, SQLAlchemyError, StatementError __all__ = ( "AdvancedAlchemyError", "DuplicateKeyError", "ErrorMessages", "ForeignKeyError", "ImproperConfigurationError", "IntegrityError", "MissingDependencyError", "MultipleResultsFoundError", "NotFoundError", "RepositoryError", "SerializationError", "wrap_sqlalchemy_exception", ) DUPLICATE_KEY_REGEXES = { "postgresql": [ re.compile( r"^.*duplicate\s+key.*\"(?P[^\"]+)\"\s*\n.*Key\s+\((?P.*)\)=\((?P.*)\)\s+already\s+exists.*$", ), re.compile(r"^.*duplicate\s+key.*\"(?P[^\"]+)\"\s*\n.*$"), ], "sqlite": [ re.compile(r"^.*columns?(?P[^)]+)(is|are)\s+not\s+unique$"), re.compile(r"^.*UNIQUE\s+constraint\s+failed:\s+(?P.+)$"), re.compile(r"^.*PRIMARY\s+KEY\s+must\s+be\s+unique.*$"), ], "mysql": [ re.compile(r"^.*\b1062\b.*Duplicate entry '(?P.*)' for key '(?P[^']+)'.*$"), re.compile(r"^.*\b1062\b.*Duplicate entry \\'(?P.*)\\' for key \\'(?P.+)\\'.*$"), ], "oracle": [], "spanner+spanner": [], "duckdb": [], "mssql": [], "bigquery": [], "cockroach": [], } FOREIGN_KEY_REGEXES = { "postgresql": [ re.compile( r".*on table \"(?P[^\"]+)\" violates " r"foreign key constraint \"(?P[^\"]+)\".*\n" r"DETAIL: Key \((?P.+)\)=\(.+\) " r"is (not present in|still referenced from) table " r"\"(?P[^\"]+)\".", ), ], "sqlite": [ re.compile(r"(?i).*foreign key constraint failed"), ], "mysql": [ re.compile( r".*Cannot (add|delete) or update a (child|parent) row: " r'a foreign key constraint fails \([`"].+[`"]\.[`"](?P
.+)[`"], ' r'CONSTRAINT [`"](?P.+)[`"] FOREIGN KEY ' r'\([`"](?P.+)[`"]\) REFERENCES [`"](?P.+)[`"] ', ), ], "oracle": [], "spanner+spanner": [], "duckdb": [], "mssql": [], "bigquery": [], "cockroach": [], } CHECK_CONSTRAINT_REGEXES = { "postgresql": [ re.compile(r".*new row for relation \"(?P
.+)\" violates check constraint (?P.+)"), ], "sqlite": [], "mysql": [], "oracle": [], "spanner+spanner": [], "duckdb": [], "mssql": [], "bigquery": [], "cockroach": [], } class AdvancedAlchemyError(Exception): """Base exception class from which all Advanced Alchemy exceptions inherit.""" detail: str def __init__(self, *args: Any, detail: str = "") -> None: """Initialize ``AdvancedAlchemyException``. Args: *args: args are converted to :class:`str` before passing to :class:`Exception` detail: detail of the exception. """ str_args = [str(arg) for arg in args if arg] if not detail: if str_args: detail, *str_args = str_args elif hasattr(self, "detail"): detail = self.detail self.detail = detail super().__init__(*str_args) def __repr__(self) -> str: if self.detail: return f"{self.__class__.__name__} - {self.detail}" return self.__class__.__name__ def __str__(self) -> str: return " ".join((*self.args, self.detail)).strip() class MissingDependencyError(AdvancedAlchemyError, ImportError): """Missing optional dependency. This exception is raised when a module depends on a dependency that has not been installed. Args: package: Name of the missing package. install_package: Optional alternative package name to install. """ def __init__(self, package: str, install_package: Optional[str] = None) -> None: super().__init__( f"Package {package!r} is not installed but required. You can install it by running " f"'pip install advanced_alchemy[{install_package or package}]' to install advanced_alchemy with the required extra " f"or 'pip install {install_package or package}' to install the package separately", ) class ImproperConfigurationError(AdvancedAlchemyError): """Improper Configuration error. This exception is raised when there is an issue with the configuration of a module. Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class SerializationError(AdvancedAlchemyError): """Encoding or decoding error. This exception is raised when serialization or deserialization of an object fails. Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class RepositoryError(AdvancedAlchemyError): """Base repository exception type. Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class IntegrityError(RepositoryError): """Data integrity error. Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class DuplicateKeyError(IntegrityError): """Duplicate key error. Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class ForeignKeyError(IntegrityError): """Foreign key error. Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class NotFoundError(RepositoryError): """Not found error. This exception is raised when a requested resource is not found. Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class MultipleResultsFoundError(RepositoryError): """Multiple results found error. This exception is raised when a single result was expected but multiple were found. Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class InvalidRequestError(RepositoryError): """Invalid request error. This exception is raised when SQLAlchemy is unable to complete the request due to a runtime error Args: *args: Variable length argument list passed to parent class. detail: Detailed error message. """ class ErrorMessages(TypedDict, total=False): duplicate_key: Union[str, Callable[[Exception], str]] integrity: Union[str, Callable[[Exception], str]] foreign_key: Union[str, Callable[[Exception], str]] multiple_rows: Union[str, Callable[[Exception], str]] check_constraint: Union[str, Callable[[Exception], str]] other: Union[str, Callable[[Exception], str]] not_found: Union[str, Callable[[Exception], str]] def _get_error_message(error_messages: ErrorMessages, key: str, exc: Exception) -> str: template: Union[str, Callable[[Exception], str]] = error_messages.get(key, f"{key} error: {exc}") # type: ignore[assignment] if callable(template): # pyright: ignore[reportUnknownArgumentType] template = template(exc) # pyright: ignore[reportUnknownVariableType] return template # pyright: ignore[reportUnknownVariableType] @contextmanager def wrap_sqlalchemy_exception( # noqa: C901, PLR0915 error_messages: Optional[ErrorMessages] = None, dialect_name: Optional[str] = None, wrap_exceptions: bool = True, ) -> Generator[None, None, None]: """Do something within context to raise a ``RepositoryError`` chained from an original ``SQLAlchemyError``. Args: error_messages: Error messages to use for the exception. dialect_name: The name of the dialect to use for the exception. wrap_exceptions: Wrap SQLAlchemy exceptions in a ``RepositoryError``. When set to ``False``, the original exception will be raised. >>> try: ... with wrap_sqlalchemy_exception(): ... raise SQLAlchemyError("Original Exception") ... except RepositoryError as exc: ... print( ... f"caught repository exception from {type(exc.__context__)}" ... ) caught repository exception from """ try: yield except NotFoundError as exc: if wrap_exceptions is False: raise if error_messages is not None: msg = _get_error_message(error_messages=error_messages, key="not_found", exc=exc) else: msg = "No rows matched the specified data" raise NotFoundError(detail=msg) from exc except MultipleResultsFound as exc: if wrap_exceptions is False: raise if error_messages is not None: msg = _get_error_message(error_messages=error_messages, key="multiple_rows", exc=exc) else: msg = "Multiple rows matched the specified data" raise MultipleResultsFoundError(detail=msg) from exc except SQLAlchemyIntegrityError as exc: if wrap_exceptions is False: raise if error_messages is not None and dialect_name is not None: _keys_to_regex = { "duplicate_key": (DUPLICATE_KEY_REGEXES.get(dialect_name, []), DuplicateKeyError), "check_constraint": (CHECK_CONSTRAINT_REGEXES.get(dialect_name, []), IntegrityError), "foreign_key": (FOREIGN_KEY_REGEXES.get(dialect_name, []), ForeignKeyError), } detail = " - ".join(str(exc_arg) for exc_arg in exc.orig.args) if exc.orig.args else "" # type: ignore[union-attr] # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] for key, (regexes, exception) in _keys_to_regex.items(): for regex in regexes: if (match := regex.findall(detail)) and match[0]: raise exception( detail=_get_error_message(error_messages=error_messages, key=key, exc=exc), ) from exc raise IntegrityError( detail=_get_error_message(error_messages=error_messages, key="integrity", exc=exc), ) from exc raise IntegrityError(detail=f"An integrity error occurred: {exc}") from exc except SQLAlchemyInvalidRequestError as exc: if wrap_exceptions is False: raise raise InvalidRequestError(detail="An invalid request was made.") from exc except StatementError as exc: if wrap_exceptions is False: raise raise IntegrityError( detail=cast("str", getattr(exc.orig, "detail", "There was an issue processing the statement.")) ) from exc except SQLAlchemyError as exc: if wrap_exceptions is False: raise if error_messages is not None: msg = _get_error_message(error_messages=error_messages, key="other", exc=exc) else: msg = f"An exception occurred: {exc}" raise RepositoryError(detail=msg) from exc except AttributeError as exc: if wrap_exceptions is False: raise if error_messages is not None: msg = _get_error_message(error_messages=error_messages, key="other", exc=exc) else: msg = f"An attribute error occurred during processing: {exc}" raise RepositoryError(detail=msg) from exc python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/000077500000000000000000000000001476663714600237075ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/__init__.py000066400000000000000000000000001476663714600260060ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/fastapi/000077500000000000000000000000001476663714600253365ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/fastapi/__init__.py000066400000000000000000000022541476663714600274520ustar00rootroot00000000000000"""FastAPI extension for Advanced Alchemy. This module provides FastAPI integration for Advanced Alchemy, including session management, database migrations, and service utilities. """ from advanced_alchemy import base, exceptions, filters, mixins, operations, repository, service, types, utils from advanced_alchemy.alembic.commands import AlembicCommands from advanced_alchemy.config import AlembicAsyncConfig, AlembicSyncConfig, AsyncSessionConfig, SyncSessionConfig from advanced_alchemy.extensions.fastapi.cli import get_database_migration_plugin from advanced_alchemy.extensions.fastapi.config import EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig from advanced_alchemy.extensions.fastapi.extension import AdvancedAlchemy, assign_cli_group __all__ = ( "AdvancedAlchemy", "AlembicAsyncConfig", "AlembicCommands", "AlembicSyncConfig", "AsyncSessionConfig", "EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", "SyncSessionConfig", "assign_cli_group", "base", "exceptions", "filters", "get_database_migration_plugin", "mixins", "operations", "repository", "service", "types", "utils", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/fastapi/cli.py000066400000000000000000000024351476663714600264630ustar00rootroot00000000000000from typing import TYPE_CHECKING, Optional, cast try: import rich_click as click except ImportError: import click # type: ignore[no-redef] from advanced_alchemy.cli import add_migration_commands if TYPE_CHECKING: from fastapi import FastAPI from advanced_alchemy.extensions.fastapi.extension import AdvancedAlchemy def get_database_migration_plugin(app: "FastAPI") -> "AdvancedAlchemy": # pragma: no cover """Retrieve the Advanced Alchemy extension from a FastAPI application instance.""" from advanced_alchemy.exceptions import ImproperConfigurationError extension = cast("Optional[AdvancedAlchemy]", getattr(app.state, "advanced_alchemy", None)) if extension is None: msg = "Failed to initialize database CLI. The Advanced Alchemy extension is not properly configured." raise ImproperConfigurationError(msg) return extension def register_database_commands(app: "FastAPI") -> click.Group: # pragma: no cover @click.group(name="database") @click.pass_context def database_group(ctx: click.Context) -> None: """Manage SQLAlchemy database components.""" ctx.ensure_object(dict) ctx.obj["configs"] = get_database_migration_plugin(app).config add_migration_commands(database_group) return database_group python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/fastapi/config.py000066400000000000000000000003101476663714600271470ustar00rootroot00000000000000from advanced_alchemy.extensions.starlette import EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig __all__ = ( "EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/fastapi/extension.py000066400000000000000000000027431476663714600277320ustar00rootroot00000000000000from typing import TYPE_CHECKING, Optional, Union from advanced_alchemy.extensions.fastapi.cli import register_database_commands from advanced_alchemy.extensions.starlette import AdvancedAlchemy as StarletteAdvancedAlchemy if TYPE_CHECKING: from collections.abc import Sequence from fastapi import FastAPI from advanced_alchemy.extensions.fastapi.config import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig __all__ = ("AdvancedAlchemy",) def assign_cli_group(app: "FastAPI") -> None: # pragma: no cover try: from fastapi_cli.cli import app as fastapi_cli_app # pyright: ignore[reportUnknownVariableType] from typer.main import get_group except ImportError: print("FastAPI CLI is not installed. Skipping CLI registration.") # noqa: T201 return click_app = get_group(fastapi_cli_app) # pyright: ignore[reportUnknownArgumentType] click_app.add_command(register_database_commands(app)) class AdvancedAlchemy(StarletteAdvancedAlchemy): """AdvancedAlchemy integration for FastAPI applications. This class manages SQLAlchemy sessions and engine lifecycle within a FastAPI application. It provides middleware for handling transactions based on commit strategies. """ def __init__( self, config: "Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig, Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]]", app: "Optional[FastAPI]" = None, ) -> None: super().__init__(config, app) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/flask/000077500000000000000000000000001476663714600250075ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/flask/__init__.py000066400000000000000000000023261476663714600271230ustar00rootroot00000000000000"""Flask extension for Advanced Alchemy. This module provides Flask integration for Advanced Alchemy, including session management, database migrations, and service utilities. """ from advanced_alchemy import base, exceptions, filters, mixins, operations, repository, service, types, utils from advanced_alchemy.alembic.commands import AlembicCommands from advanced_alchemy.config import AlembicAsyncConfig, AlembicSyncConfig, AsyncSessionConfig, SyncSessionConfig from advanced_alchemy.extensions.flask.cli import get_database_migration_plugin from advanced_alchemy.extensions.flask.config import EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig from advanced_alchemy.extensions.flask.extension import AdvancedAlchemy from advanced_alchemy.extensions.flask.utils import FlaskServiceMixin __all__ = ( "AdvancedAlchemy", "AlembicAsyncConfig", "AlembicCommands", "AlembicSyncConfig", "AsyncSessionConfig", "EngineConfig", "FlaskServiceMixin", "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", "SyncSessionConfig", "base", "exceptions", "filters", "get_database_migration_plugin", "mixins", "operations", "repository", "service", "types", "utils", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/flask/cli.py000066400000000000000000000032451476663714600261340ustar00rootroot00000000000000"""Command-line interface utilities for Flask integration. This module provides CLI commands for database management in Flask applications. """ from contextlib import suppress from typing import TYPE_CHECKING, cast from flask.cli import with_appcontext from advanced_alchemy.cli import add_migration_commands try: import rich_click as click except ImportError: import click # type: ignore[no-redef] if TYPE_CHECKING: from flask import Flask from advanced_alchemy.extensions.flask.extension import AdvancedAlchemy def get_database_migration_plugin(app: "Flask") -> "AdvancedAlchemy": """Retrieve the Advanced Alchemy extension from the Flask application. Args: app: The :class:`flask.Flask` application instance. Returns: :class:`AdvancedAlchemy`: The Advanced Alchemy extension instance. Raises: :exc:`advanced_alchemy.exceptions.ImproperConfigurationError`: If the extension is not found. """ from advanced_alchemy.exceptions import ImproperConfigurationError with suppress(KeyError): return cast("AdvancedAlchemy", app.extensions["advanced_alchemy"]) msg = "Failed to initialize database migrations. The Advanced Alchemy extension is not properly configured." raise ImproperConfigurationError(msg) @click.group(name="database") @with_appcontext def database_group() -> None: """Manage SQLAlchemy database components. This command group provides database management commands like migrations. """ ctx = click.get_current_context() app = ctx.obj.load_app() ctx.obj = {"app": app, "configs": get_database_migration_plugin(app).config} add_migration_commands(database_group) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/flask/config.py000066400000000000000000000246011476663714600266310ustar00rootroot00000000000000"""Configuration classes for Flask integration. This module provides configuration classes for integrating SQLAlchemy with Flask applications, including both synchronous and asynchronous database configurations. """ from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar, Union, cast from click import echo from flask import g, has_request_context from sqlalchemy.exc import OperationalError from typing_extensions import Literal from advanced_alchemy._serialization import decode_json, encode_json from advanced_alchemy.base import metadata_registry from advanced_alchemy.config import EngineConfig as _EngineConfig from advanced_alchemy.config.asyncio import SQLAlchemyAsyncConfig as _SQLAlchemyAsyncConfig from advanced_alchemy.config.sync import SQLAlchemySyncConfig as _SQLAlchemySyncConfig from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.service import schema_dump if TYPE_CHECKING: from flask import Flask, Response from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session from advanced_alchemy.utils.portals import Portal __all__ = ("EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig") ConfigT = TypeVar("ConfigT", bound="Union[SQLAlchemySyncConfig, SQLAlchemyAsyncConfig]") def serializer(value: "Any") -> str: """Serialize JSON field values. Calls the `:func:schema_dump` function to convert the value to a built-in before encoding. Args: value: Any JSON serializable value. Returns: str: JSON string representation of the value. """ return encode_json(schema_dump(value)) @dataclass class EngineConfig(_EngineConfig): """Configuration for SQLAlchemy's Engine. This class extends the base EngineConfig with Flask-specific JSON serialization options. For details see: https://docs.sqlalchemy.org/en/20/core/engines.html Attributes: json_deserializer: Callable for converting JSON strings to Python objects. json_serializer: Callable for converting Python objects to JSON strings. """ json_deserializer: "Callable[[str], Any]" = decode_json """For dialects that support the :class:`~sqlalchemy.types.JSON` datatype, this is a Python callable that will convert a JSON string to a Python object.""" json_serializer: "Callable[[Any], str]" = serializer """For dialects that support the JSON datatype, this is a Python callable that will render a given object as JSON.""" @dataclass class SQLAlchemySyncConfig(_SQLAlchemySyncConfig): """Flask-specific synchronous SQLAlchemy configuration. Attributes: app: The Flask application instance. commit_mode: The commit mode to use for database sessions. """ app: "Optional[Flask]" = None """The Flask application instance.""" commit_mode: Literal["manual", "autocommit", "autocommit_include_redirect"] = "manual" """The commit mode to use for database sessions.""" def create_session_maker(self) -> "Callable[[], Session]": """Get a session maker. If none exists yet, create one. Returns: Callable[[], Session]: Session factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if self.engine_instance is None: self.engine_instance = self.get_engine() if session_kws.get("bind") is None: session_kws["bind"] = self.engine_instance self.session_maker = self.session_maker_class(**session_kws) return self.session_maker def init_app(self, app: "Flask", portal: "Optional[Portal]" = None) -> None: """Initialize the Flask application with this configuration. Args: app: The Flask application instance. portal: The portal to use for thread-safe communication. Unused in synchronous configurations. """ self.app = app self.bind_key = self.bind_key or "default" if self.create_all: self.create_all_metadata() if self.commit_mode != "manual": self._setup_session_handling(app) def _setup_session_handling(self, app: "Flask") -> None: """Set up the session handling for the Flask application. Args: app: The Flask application instance. """ @app.after_request def handle_db_session(response: "Response") -> "Response": # pyright: ignore[reportUnusedFunction] """Commit the session if the response meets the commit criteria.""" if not has_request_context(): return response db_session = cast("Optional[Session]", g.pop(f"advanced_alchemy_session_{self.bind_key}", None)) if db_session is not None: if (self.commit_mode == "autocommit" and 200 <= response.status_code < 300) or ( # noqa: PLR2004 self.commit_mode == "autocommit_include_redirect" and 200 <= response.status_code < 400 # noqa: PLR2004 ): db_session.commit() db_session.close() return response def close_engines(self, portal: "Portal") -> None: """Close the engines. Args: portal: The portal to use for thread-safe communication. """ if self.engine_instance is not None: self.engine_instance.dispose() def create_all_metadata(self) -> None: # pragma: no cover """Create all metadata tables in the database.""" if self.engine_instance is None: self.engine_instance = self.get_engine() with self.engine_instance.begin() as conn: try: metadata_registry.get(None if self.bind_key == "default" else self.bind_key).create_all(conn) except OperationalError as exc: echo(f" * Could not create target metadata. Reason: {exc}") else: echo(" * Created target metadata.") @dataclass class SQLAlchemyAsyncConfig(_SQLAlchemyAsyncConfig): """Flask-specific asynchronous SQLAlchemy configuration. Attributes: app: The Flask application instance. commit_mode: The commit mode to use for database sessions. """ app: "Optional[Flask]" = None """The Flask application instance.""" commit_mode: Literal["manual", "autocommit", "autocommit_include_redirect"] = "manual" """The commit mode to use for database sessions.""" def create_session_maker(self) -> "Callable[[], AsyncSession]": """Get a session maker. If none exists yet, create one. Returns: Callable[[], AsyncSession]: Session factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if self.engine_instance is None: self.engine_instance = self.get_engine() if session_kws.get("bind") is None: session_kws["bind"] = self.engine_instance self.session_maker = self.session_maker_class(**session_kws) return self.session_maker def init_app(self, app: "Flask", portal: "Optional[Portal]" = None) -> None: """Initialize the Flask application with this configuration. Args: app: The Flask application instance. portal: The portal to use for thread-safe communication. Raises: ImproperConfigurationError: If portal is not provided for async configuration. """ self.app = app self.bind_key = self.bind_key or "default" if portal is None: msg = "Portal is required for asynchronous configurations" raise ImproperConfigurationError(msg) if self.create_all: _ = portal.call(self.create_all_metadata) self._setup_session_handling(app, portal) def _setup_session_handling(self, app: "Flask", portal: "Portal") -> None: """Set up the session handling for the Flask application. Args: app: The Flask application instance. portal: The portal to use for thread-safe communication. """ @app.after_request def handle_db_session(response: "Response") -> "Response": # pyright: ignore[reportUnusedFunction] """Commit the session if the response meets the commit criteria.""" if not has_request_context(): return response db_session = cast("Optional[AsyncSession]", g.pop(f"advanced_alchemy_session_{self.bind_key}", None)) if db_session is not None: p = getattr(db_session, "_session_portal", None) or portal if (self.commit_mode == "autocommit" and 200 <= response.status_code < 300) or ( # noqa: PLR2004 self.commit_mode == "autocommit_include_redirect" and 200 <= response.status_code < 400 # noqa: PLR2004 ): _ = p.call(db_session.commit) _ = p.call(db_session.close) return response @app.teardown_appcontext def close_db_session(_: "Optional[BaseException]" = None) -> None: # pyright: ignore[reportUnusedFunction] """Close the session at the end of the request.""" db_session = cast("Optional[AsyncSession]", g.pop(f"advanced_alchemy_session_{self.bind_key}", None)) if db_session is not None: p = getattr(db_session, "_session_portal", None) or portal _ = p.call(db_session.close) def close_engines(self, portal: "Portal") -> None: """Close the engines. Args: portal: The portal to use for thread-safe communication. """ if self.engine_instance is not None: _ = portal.call(self.engine_instance.dispose) async def create_all_metadata(self) -> None: # pragma: no cover """Create all metadata tables in the database.""" if self.engine_instance is None: self.engine_instance = self.get_engine() async with self.engine_instance.begin() as conn: try: await conn.run_sync( metadata_registry.get(None if self.bind_key == "default" else self.bind_key).create_all ) await conn.commit() except OperationalError as exc: echo(f" * Could not create target metadata. Reason: {exc}") else: echo(" * Created target metadata.") python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/flask/extension.py000066400000000000000000000163031476663714600274000ustar00rootroot00000000000000# ruff: noqa: SLF001, ARG001 """Flask extension for Advanced Alchemy.""" from collections.abc import Generator, Sequence from contextlib import contextmanager, suppress from typing import TYPE_CHECKING, Callable, Optional, Union, cast from flask import g from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.extensions.flask.cli import database_group from advanced_alchemy.extensions.flask.config import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig from advanced_alchemy.utils.portals import Portal, PortalProvider if TYPE_CHECKING: from flask import Flask class AdvancedAlchemy: """Flask extension for Advanced Alchemy.""" __slots__ = ( "_config", "_has_async_config", "_session_makers", "portal_provider", ) def __init__( self, config: "Union[SQLAlchemySyncConfig, SQLAlchemyAsyncConfig, Sequence[Union[SQLAlchemySyncConfig, SQLAlchemyAsyncConfig]]]", app: "Optional[Flask]" = None, *, portal_provider: "Optional[PortalProvider]" = None, ) -> None: """Initialize the extension.""" self.portal_provider = portal_provider if portal_provider is not None else PortalProvider() self._config = config if isinstance(config, Sequence) else [config] self._has_async_config = any(isinstance(c, SQLAlchemyAsyncConfig) for c in self.config) self._session_makers: dict[str, Callable[..., Union[AsyncSession, Session]]] = {} if app is not None: self.init_app(app) @property def portal(self) -> "Portal": """Get the portal.""" return self.portal_provider.portal @property def config(self) -> "Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]": """Get the SQLAlchemy configuration(s).""" return self._config @property def is_async_enabled(self) -> bool: """Return True if any of the database configs are async.""" return self._has_async_config def init_app(self, app: "Flask") -> None: """Initialize the Flask application. Args: app: The Flask application to initialize. Raises: ImproperConfigurationError: If the extension is already registered on the Flask application. """ if "advanced_alchemy" in app.extensions: msg = "Advanced Alchemy extension is already registered on this Flask application." raise ImproperConfigurationError(msg) if self._has_async_config: self.portal_provider.start() # Create tables for async configs for cfg in self._config: if isinstance(cfg, SQLAlchemyAsyncConfig): self.portal_provider.portal.call(cfg.create_all_metadata) # Register shutdown handler for the portal @app.teardown_appcontext def shutdown_portal(exception: "Optional[BaseException]" = None) -> None: # pyright: ignore[reportUnusedFunction] """Stop the portal when the application shuts down.""" if not app.debug: # Don't stop portal in debug mode with suppress(Exception): self.portal_provider.stop() # Initialize each config with the app for config in self.config: config.init_app(app, self.portal_provider.portal) bind_key = config.bind_key if config.bind_key is not None else "default" session_maker = config.create_session_maker() self._session_makers[bind_key] = session_maker # Register session cleanup only app.teardown_appcontext(self._teardown_appcontext) app.extensions["advanced_alchemy"] = self app.cli.add_command(database_group) def _teardown_appcontext(self, exception: "Optional[BaseException]" = None) -> None: """Clean up resources when the application context ends.""" for key in list(g): if key.startswith("advanced_alchemy_session_"): session = getattr(g, key) if isinstance(session, AsyncSession): # Close async sessions through the portal with suppress(ImproperConfigurationError): self.portal_provider.portal.call(session.close) else: session.close() delattr(g, key) def get_session(self, bind_key: str = "default") -> "Union[AsyncSession, Session]": """Get a new session from the configured session factory. Args: bind_key: The bind key to use for the session. Returns: A new session from the configured session factory. Raises: ImproperConfigurationError: If no session maker is found for the bind key. """ if bind_key == "default" and len(self.config) == 1: bind_key = self.config[0].bind_key if self.config[0].bind_key is not None else "default" session_key = f"advanced_alchemy_session_{bind_key}" if hasattr(g, session_key): return cast("Union[AsyncSession, Session]", getattr(g, session_key)) session_maker = self._session_makers.get(bind_key) if session_maker is None: msg = f'No session maker found for bind key "{bind_key}"' raise ImproperConfigurationError(msg) session = session_maker() if self._has_async_config: # Ensure portal is started if not self.portal_provider.is_running: self.portal_provider.start() setattr(session, "_session_portal", self.portal_provider.portal) setattr(g, session_key, session) return session def get_async_session(self, bind_key: str = "default") -> AsyncSession: """Get an async session from the configured session factory.""" session = self.get_session(bind_key) if not isinstance(session, AsyncSession): msg = f"Expected async session for bind key {bind_key}, but got {type(session)}" raise ImproperConfigurationError(msg) return session def get_sync_session(self, bind_key: str = "default") -> Session: """Get a sync session from the configured session factory.""" session = self.get_session(bind_key) if not isinstance(session, Session): msg = f"Expected sync session for bind key {bind_key}, but got {type(session)}" raise ImproperConfigurationError(msg) return session @contextmanager def with_session( # pragma: no cover (more on this later) self, bind_key: str = "default" ) -> "Generator[Union[AsyncSession, Session], None, None]": """Provide a transactional scope around a series of operations. Args: bind_key: The bind key to use for the session. Yields: A session. """ session = self.get_session(bind_key) try: yield session finally: if isinstance(session, AsyncSession): with suppress(ImproperConfigurationError): self.portal_provider.portal.call(session.close) else: session.close() python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/flask/utils.py000066400000000000000000000022451476663714600265240ustar00rootroot00000000000000"""Flask-specific service classes. This module provides Flask-specific service mixins and utilities for integrating with the Advanced Alchemy service layer. """ from typing import Any from flask import Response, current_app from advanced_alchemy.extensions.flask.config import serializer class FlaskServiceMixin: """Flask service mixin. This mixin provides Flask-specific functionality for services. """ def jsonify( self, data: Any, *args: Any, status_code: int = 200, **kwargs: Any, ) -> Response: """Convert data to a Flask JSON response. Args: data: Data to serialize to JSON. *args: Additional positional arguments passed to Flask's response class. status_code: HTTP status code for the response. Defaults to 200. **kwargs: Additional keyword arguments passed to Flask's response class. Returns: :class:`flask.Response`: A Flask response with JSON content type. """ return current_app.response_class( serializer(data), status=status_code, mimetype="application/json", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/000077500000000000000000000000001476663714600255365ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/__init__.py000066400000000000000000000053021476663714600276470ustar00rootroot00000000000000from advanced_alchemy import base, exceptions, filters, mixins, operations, repository, service, types, utils from advanced_alchemy.alembic.commands import AlembicCommands from advanced_alchemy.config import AlembicAsyncConfig, AlembicSyncConfig, AsyncSessionConfig, SyncSessionConfig from advanced_alchemy.extensions.litestar import providers from advanced_alchemy.extensions.litestar.cli import get_database_migration_plugin from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig from advanced_alchemy.extensions.litestar.plugins import ( EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemyInitPlugin, SQLAlchemyPlugin, SQLAlchemySerializationPlugin, SQLAlchemySyncConfig, ) from advanced_alchemy.extensions.litestar.plugins.init.config.asyncio import ( autocommit_before_send_handler as async_autocommit_before_send_handler, ) from advanced_alchemy.extensions.litestar.plugins.init.config.asyncio import ( autocommit_handler_maker as async_autocommit_handler_maker, ) from advanced_alchemy.extensions.litestar.plugins.init.config.asyncio import ( default_before_send_handler as async_default_before_send_handler, ) from advanced_alchemy.extensions.litestar.plugins.init.config.asyncio import ( default_handler_maker as async_default_handler_maker, ) from advanced_alchemy.extensions.litestar.plugins.init.config.sync import ( autocommit_before_send_handler as sync_autocommit_before_send_handler, ) from advanced_alchemy.extensions.litestar.plugins.init.config.sync import ( autocommit_handler_maker as sync_autocommit_handler_maker, ) from advanced_alchemy.extensions.litestar.plugins.init.config.sync import ( default_before_send_handler as sync_default_before_send_handler, ) from advanced_alchemy.extensions.litestar.plugins.init.config.sync import ( default_handler_maker as sync_default_handler_maker, ) __all__ = ( "AlembicAsyncConfig", "AlembicCommands", "AlembicSyncConfig", "AsyncSessionConfig", "EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemyDTO", "SQLAlchemyDTOConfig", "SQLAlchemyInitPlugin", "SQLAlchemyPlugin", "SQLAlchemySerializationPlugin", "SQLAlchemySyncConfig", "SyncSessionConfig", "async_autocommit_before_send_handler", "async_autocommit_handler_maker", "async_default_before_send_handler", "async_default_handler_maker", "base", "exceptions", "filters", "get_database_migration_plugin", "mixins", "operations", "providers", "repository", "service", "sync_autocommit_before_send_handler", "sync_autocommit_handler_maker", "sync_default_before_send_handler", "sync_default_handler_maker", "types", "utils", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/_utils.py000066400000000000000000000036371476663714600274200ustar00rootroot00000000000000from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from litestar.types import Scope __all__ = ( "delete_aa_scope_state", "get_aa_scope_state", "set_aa_scope_state", ) _SCOPE_NAMESPACE = "_aa_connection_state" def get_aa_scope_state(scope: "Scope", key: str, default: Any = None, pop: bool = False) -> Any: """Get an internal value from connection scope state. Note: If called with a default value, this method behaves like to `dict.set_default()`, both setting the key in the namespace to the default value, and returning it. If called without a default value, the method behaves like `dict.get()`, returning ``None`` if the key does not exist. Args: scope: The connection scope. key: Key to get from internal namespace in scope state. default: Default value to return. pop: Boolean flag dictating whether the value should be deleted from the state. Returns: Value mapped to ``key`` in internal connection scope namespace. """ namespace = scope.setdefault(_SCOPE_NAMESPACE, {}) # type: ignore[misc] return namespace.pop(key, default) if pop else namespace.get(key, default) # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType] def set_aa_scope_state(scope: "Scope", key: str, value: Any) -> None: """Set an internal value in connection scope state. Args: scope: The connection scope. key: Key to set under internal namespace in scope state. value: Value for key. """ scope.setdefault(_SCOPE_NAMESPACE, {})[key] = value # type: ignore[misc] def delete_aa_scope_state(scope: "Scope", key: str) -> None: """Delete an internal value from connection scope state. Args: scope: The connection scope. key: Key to set under internal namespace in scope state. """ del scope.setdefault(_SCOPE_NAMESPACE, {})[key] # type: ignore[misc] python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/cli.py000066400000000000000000000023601476663714600266600ustar00rootroot00000000000000from contextlib import suppress from typing import TYPE_CHECKING from litestar.cli._utils import LitestarGroup from advanced_alchemy.cli import add_migration_commands try: import rich_click as click except ImportError: import click # type: ignore[no-redef] if TYPE_CHECKING: from litestar import Litestar from advanced_alchemy.extensions.litestar.plugins import SQLAlchemyInitPlugin def get_database_migration_plugin(app: "Litestar") -> "SQLAlchemyInitPlugin": """Retrieve a database migration plugin from the Litestar application's plugins.""" from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.extensions.litestar.plugins import SQLAlchemyInitPlugin with suppress(KeyError): return app.plugins.get(SQLAlchemyInitPlugin) msg = "Failed to initialize database migrations. The required plugin (SQLAlchemyPlugin or SQLAlchemyInitPlugin) is missing." raise ImproperConfigurationError(msg) @click.group(cls=LitestarGroup, name="database") def database_group(ctx: "click.Context") -> None: """Manage SQLAlchemy database components.""" ctx.obj = {"app": ctx.obj, "configs": get_database_migration_plugin(ctx.obj.app).config} add_migration_commands(database_group) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/dto.py000066400000000000000000000460671476663714600267130ustar00rootroot00000000000000from collections.abc import Collection, Generator from collections.abc import Set as AbstractSet from dataclasses import asdict, dataclass, field, replace from functools import singledispatchmethod from typing import ( Any, ClassVar, Generic, Literal, Optional, Union, ) from litestar.dto.base_dto import AbstractDTO from litestar.dto.config import DTOConfig from litestar.dto.data_structures import DTOFieldDefinition from litestar.dto.field import DTO_FIELD_META_KEY, DTOField, Mark from litestar.types.empty import Empty from litestar.typing import FieldDefinition from litestar.utils.signature import ParsedSignature from sqlalchemy import Column, inspect, orm, sql from sqlalchemy.ext.associationproxy import AssociationProxy, AssociationProxyExtensionType from sqlalchemy.ext.hybrid import HybridExtensionType, hybrid_property from sqlalchemy.orm import ( ColumnProperty, CompositeProperty, DeclarativeBase, DynamicMapped, InspectionAttr, InstrumentedAttribute, Mapped, MappedColumn, NotExtension, QueryableAttribute, Relationship, RelationshipDirection, RelationshipProperty, WriteOnlyMapped, ) from sqlalchemy.sql.expression import ColumnClause, Label from typing_extensions import TypeAlias, TypeVar from advanced_alchemy.exceptions import ImproperConfigurationError __all__ = ("SQLAlchemyDTO",) T = TypeVar("T", bound="Union[DeclarativeBase, Collection[DeclarativeBase]]") ElementType: TypeAlias = Union[ "Column[Any]", "RelationshipProperty[Any]", "CompositeProperty[Any]", "ColumnClause[Any]", "Label[Any]" ] SQLA_NS = {**vars(orm), **vars(sql)} @dataclass(frozen=True) class SQLAlchemyDTOConfig(DTOConfig): """Additional controls for the generated SQLAlchemy DTO.""" exclude: AbstractSet[Union[str, InstrumentedAttribute[Any]]] = field(default_factory=set) # type: ignore[assignment] # pyright: ignore[reportIncompatibleVariableOverride] """Explicitly exclude fields from the generated DTO. If exclude is specified, all fields not specified in exclude will be included by default. Notes: - The field names are dot-separated paths to nested fields, e.g. ``"address.street"`` will exclude the ``"street"`` field from a nested ``"address"`` model. - 'exclude' mutually exclusive with 'include' - specifying both values will raise an ``ImproperlyConfiguredException``. """ include: AbstractSet[Union[str, InstrumentedAttribute[Any]]] = field(default_factory=set) # type: ignore[assignment] # pyright: ignore[reportIncompatibleVariableOverride] """Explicitly include fields in the generated DTO. If include is specified, all fields not specified in include will be excluded by default. Notes: - The field names are dot-separated paths to nested fields, e.g. ``"address.street"`` will include the ``"street"`` field from a nested ``"address"`` model. - 'include' mutually exclusive with 'exclude' - specifying both values will raise an ``ImproperlyConfiguredException``. """ rename_fields: dict[Union[str, InstrumentedAttribute[Any]], str] = field(default_factory=dict) # type: ignore[assignment] # pyright: ignore[reportIncompatibleVariableOverride] """Mapping of field names, to new name.""" include_implicit_fields: Union[bool, Literal["hybrid-only"]] = True """Fields that are implicitly mapped are included. Turning this off will lead to exclude all fields not using ``Mapped`` annotation, When setting this to ``hybrid-only``, all implicitly mapped fields are excluded with the exception for hybrid properties. """ def __post_init__(self) -> None: super().__post_init__() object.__setattr__( self, "exclude", {f.key if isinstance(f, InstrumentedAttribute) else f for f in self.exclude} ) object.__setattr__( self, "include", {f.key if isinstance(f, InstrumentedAttribute) else f for f in self.include} ) object.__setattr__( self, "rename_fields", {f.key if isinstance(f, InstrumentedAttribute) else f: v for f, v in self.rename_fields.items()}, ) class SQLAlchemyDTO(AbstractDTO[T], Generic[T]): """Support for domain modelling with SQLAlchemy.""" config: ClassVar[SQLAlchemyDTOConfig] @staticmethod def _ensure_sqla_dto_config(config: Union[DTOConfig, SQLAlchemyDTOConfig]) -> SQLAlchemyDTOConfig: if not isinstance(config, SQLAlchemyDTOConfig): return SQLAlchemyDTOConfig(**asdict(config)) return config def __init_subclass__(cls, **kwargs: Any) -> None: super().__init_subclass__(**kwargs) if hasattr(cls, "config"): cls.config = cls._ensure_sqla_dto_config(cls.config) # pyright: ignore[reportIncompatibleVariableOverride] @singledispatchmethod @classmethod def handle_orm_descriptor( cls, extension_type: Union[NotExtension, AssociationProxyExtensionType, HybridExtensionType], orm_descriptor: InspectionAttr, key: str, model_type_hints: dict[str, FieldDefinition], model_name: str, ) -> list[DTOFieldDefinition]: msg = f"Unsupported extension type: {extension_type}" raise NotImplementedError(msg) @handle_orm_descriptor.register(NotExtension) @classmethod def _( cls, extension_type: NotExtension, key: str, orm_descriptor: InspectionAttr, model_type_hints: dict[str, FieldDefinition], model_name: str, ) -> list[DTOFieldDefinition]: if not isinstance(orm_descriptor, QueryableAttribute): # pragma: no cover msg = f"Unexpected descriptor type for '{extension_type}': '{orm_descriptor}'" raise NotImplementedError(msg) elem: ElementType if isinstance( orm_descriptor.property, # pyright: ignore[reportUnknownMemberType] ColumnProperty, # pragma: no cover ): if not isinstance( orm_descriptor.property.expression, # pyright: ignore[reportUnknownMemberType] (Column, ColumnClause, Label), ): msg = f"Expected 'Column', got: '{orm_descriptor.property.expression}, {type(orm_descriptor.property.expression)}'" # pyright: ignore[reportUnknownMemberType] raise NotImplementedError(msg) elem = orm_descriptor.property.expression # pyright: ignore[reportUnknownMemberType,reportUnknownVariableType] elif isinstance(orm_descriptor.property, (RelationshipProperty, CompositeProperty)): # pyright: ignore[reportUnknownMemberType,reportUnknownVariableType] elem = orm_descriptor.property # pyright: ignore[reportUnknownMemberType,reportUnknownVariableType] else: # pragma: no cover msg = f"Unhandled property type: '{orm_descriptor.property}'" # pyright: ignore[reportUnknownMemberType] raise NotImplementedError(msg) default, default_factory = _detect_defaults(elem) try: if (field_definition := model_type_hints[key]).origin in { Mapped, WriteOnlyMapped, DynamicMapped, Relationship, }: (field_definition,) = field_definition.inner_types else: # pragma: no cover msg = f"Expected 'Mapped' origin, got: '{field_definition.origin}'" raise NotImplementedError(msg) except KeyError: field_definition = parse_type_from_element(elem, orm_descriptor) # pyright: ignore[reportUnknownArgumentType] dto_field = elem.info.get(DTO_FIELD_META_KEY) if hasattr(elem, "info") else None # pyright: ignore[reportArgumentMemberType] if dto_field is None and isinstance(orm_descriptor, InstrumentedAttribute) and hasattr(orm_descriptor, "info"): # pyright: ignore[reportUnknownArgumentType] dto_field = orm_descriptor.info.get(DTO_FIELD_META_KEY) # pyright: ignore[reportArgumentMemberType] if dto_field is None: dto_field = DTOField() return [ DTOFieldDefinition.from_field_definition( field_definition=replace( field_definition, name=key, default=default, ), default_factory=default_factory, dto_field=dto_field, model_name=model_name, ), ] @handle_orm_descriptor.register(AssociationProxyExtensionType) @classmethod def _( cls, extension_type: AssociationProxyExtensionType, key: str, orm_descriptor: InspectionAttr, model_type_hints: dict[str, FieldDefinition], model_name: str, ) -> list[DTOFieldDefinition]: if not isinstance(orm_descriptor, AssociationProxy): # pragma: no cover msg = f"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'" raise NotImplementedError(msg) if (field_definition := model_type_hints[key]).origin is AssociationProxy: (field_definition,) = field_definition.inner_types else: # pragma: no cover msg = f"Expected 'AssociationProxy' origin, got: '{field_definition.origin}'" raise NotImplementedError(msg) return [ DTOFieldDefinition.from_field_definition( field_definition=replace( field_definition, name=key, default=Empty, ), default_factory=None, dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)), model_name=model_name, ), ] @handle_orm_descriptor.register(HybridExtensionType) @classmethod def _( cls, extension_type: HybridExtensionType, key: str, orm_descriptor: InspectionAttr, model_type_hints: dict[str, FieldDefinition], model_name: str, ) -> list[DTOFieldDefinition]: if not isinstance(orm_descriptor, hybrid_property): msg = f"Unexpected descriptor type '{orm_descriptor}' for '{extension_type}'" raise NotImplementedError(msg) getter_sig = ParsedSignature.from_fn(orm_descriptor.fget, {}) # pyright: ignore[reportUnknownArgumentType,reportUnknownMemberType,reportAttributeAccessIssue] field_defs = [ DTOFieldDefinition.from_field_definition( field_definition=replace( getter_sig.return_type, name=orm_descriptor.__name__, default=Empty, ), default_factory=None, dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.READ_ONLY)), model_name=model_name, ), ] if orm_descriptor.fset is not None: # pyright: ignore[reportUnknownMemberType] setter_sig = ParsedSignature.from_fn(orm_descriptor.fset, {}) # pyright: ignore[reportUnknownArgumentType,reportUnknownMemberType] field_defs.append( DTOFieldDefinition.from_field_definition( field_definition=replace( next(iter(setter_sig.parameters.values())), name=orm_descriptor.__name__, default=Empty, ), default_factory=None, dto_field=orm_descriptor.info.get(DTO_FIELD_META_KEY, DTOField(mark=Mark.WRITE_ONLY)), model_name=model_name, ), ) return field_defs @classmethod def generate_field_definitions(cls, model_type: type[DeclarativeBase]) -> Generator[DTOFieldDefinition, None, None]: """Generate DTO field definitions from a SQLAlchemy model. Args: model_type (typing.Type[sqlalchemy.orm.DeclarativeBase]): The SQLAlchemy model type to generate field definitions from. Yields: collections.abc.Generator[litestar.dto.data_structures.DTOFieldDefinition, None, None]: A generator yielding DTO field definitions. Raises: RuntimeError: If the mapper cannot be found for the model type. NotImplementedError: If an unsupported property or extension type is encountered. ImproperConfigurationError: If a type cannot be parsed from an element. """ if (mapper := inspect(model_type)) is None: # pragma: no cover # pyright: ignore[reportUnnecessaryComparison] msg = "Unexpected `None` value for mapper." # type: ignore[unreachable] raise RuntimeError(msg) # includes SQLAlchemy names and other mapped class names in the forward reference resolution namespace namespace = {**SQLA_NS, **{m.class_.__name__: m.class_ for m in mapper.registry.mappers if m is not mapper}} model_type_hints = cls.get_model_type_hints(model_type, namespace=namespace) model_name = model_type.__name__ include_implicit_fields = cls.config.include_implicit_fields # the same hybrid property descriptor can be included in `all_orm_descriptors` multiple times, once # for each method name it is bound to. We only need to see it once, so track views of it here. seen_hybrid_descriptors: set[hybrid_property] = set() # pyright: ignore[reportUnknownVariableType,reportMissingTypeArgument] skipped_descriptors: set[str] = set() for composite_property in mapper.composites: # pragma: no cover for attr in composite_property.attrs: if isinstance(attr, (MappedColumn, Column)): skipped_descriptors.add(attr.name) elif isinstance(attr, str): skipped_descriptors.add(attr) for key, orm_descriptor in mapper.all_orm_descriptors.items(): if is_hybrid_property := isinstance(orm_descriptor, hybrid_property): if orm_descriptor in seen_hybrid_descriptors: continue seen_hybrid_descriptors.add(orm_descriptor) # pyright: ignore[reportUnknownMemberType] if key in skipped_descriptors: continue should_skip_descriptor = False dto_field: Optional[DTOField] = None if hasattr(orm_descriptor, "property"): # pyright: ignore[reportUnknownArgumentType] dto_field = orm_descriptor.property.info.get(DTO_FIELD_META_KEY) # pyright: ignore # noqa: PGH003 # Case 1 is_field_marked_not_private = dto_field and dto_field.mark is not Mark.PRIVATE # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType] # Case 2 should_exclude_anything_implicit = not include_implicit_fields and key not in model_type_hints # Case 3 should_exclude_non_hybrid_only = ( not is_hybrid_property and include_implicit_fields == "hybrid-only" and key not in model_type_hints ) # Descriptor is marked with with either Mark.READ_ONLY or Mark.WRITE_ONLY (see Case 1): # - always include it regardless of anything else. # Descriptor is not marked: # - It's implicit BUT config excludes anything implicit (see Case 2): exclude # - It's implicit AND not hybrid BUT config includes hybrid-only implicit descriptors (Case 3): exclude should_skip_descriptor = not is_field_marked_not_private and ( should_exclude_anything_implicit or should_exclude_non_hybrid_only ) if should_skip_descriptor: continue yield from cls.handle_orm_descriptor( orm_descriptor.extension_type, key, orm_descriptor, model_type_hints, model_name, ) @classmethod def detect_nested_field(cls, field_definition: FieldDefinition) -> bool: return field_definition.is_subclass_of(DeclarativeBase) def _detect_defaults(elem: ElementType) -> tuple[Any, Any]: default: Any = Empty default_factory: Any = None # pyright:ignore # noqa: PGH003 if sqla_default := getattr(elem, "default", None): if sqla_default.is_scalar: default = sqla_default.arg elif sqla_default.is_callable: def default_factory(d: Any = sqla_default) -> Any: return d.arg({}) elif sqla_default.is_sequence or sqla_default.is_sentinel: # SQLAlchemy sequences represent server side defaults # so we cannot infer a reasonable default value for # them on the client side pass else: msg = "Unexpected default type" raise ValueError(msg) elif (isinstance(elem, RelationshipProperty) and detect_nullable_relationship(elem)) or getattr( elem, "nullable", False ): default = None return default, default_factory def parse_type_from_element(elem: ElementType, orm_descriptor: InspectionAttr) -> FieldDefinition: # noqa: PLR0911 """Parses a type from a SQLAlchemy element. Args: elem: The SQLAlchemy element to parse. orm_descriptor: The attribute `elem` was extracted from. Returns: FieldDefinition: The parsed type. Raises: ImproperlyConfiguredException: If the type cannot be parsed. """ if isinstance(elem, Column): if elem.nullable: return FieldDefinition.from_annotation(Optional[elem.type.python_type]) return FieldDefinition.from_annotation(elem.type.python_type) if isinstance(elem, RelationshipProperty): if elem.direction in (RelationshipDirection.ONETOMANY, RelationshipDirection.MANYTOMANY): collection_type = FieldDefinition.from_annotation(elem.collection_class or list) # pyright: ignore[reportUnknownMemberType] return FieldDefinition.from_annotation(collection_type.safe_generic_origin[elem.mapper.class_]) if detect_nullable_relationship(elem): return FieldDefinition.from_annotation(Optional[elem.mapper.class_]) return FieldDefinition.from_annotation(elem.mapper.class_) if isinstance(elem, CompositeProperty): return FieldDefinition.from_annotation(elem.composite_class) if isinstance(orm_descriptor, InstrumentedAttribute): return FieldDefinition.from_annotation(orm_descriptor.type.python_type) msg = f"Unable to parse type from element '{elem}'. Consider adding a type hint." raise ImproperConfigurationError( msg, ) def detect_nullable_relationship(elem: RelationshipProperty[Any]) -> bool: """Detects if a relationship is nullable. This attempts to decide if we should allow a ``None`` default value for a relationship by looking at the foreign key fields. If all foreign key fields are nullable, then we allow a ``None`` default value. Args: elem: The relationship to check. Returns: bool: ``True`` if the relationship is nullable, ``False`` otherwise. """ return elem.direction == RelationshipDirection.MANYTOONE and all(c.nullable for c in elem.local_columns) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/exception_handler.py000066400000000000000000000033051476663714600316040ustar00rootroot00000000000000from typing import TYPE_CHECKING, Any from litestar.connection import Request from litestar.connection.base import AuthT, StateT, UserT from litestar.exceptions import ( ClientException, HTTPException, InternalServerException, NotFoundException, ) from litestar.exceptions.responses import ( create_debug_response, # pyright: ignore[reportUnknownVariableType] create_exception_response, # pyright: ignore[reportUnknownVariableType] ) from litestar.response import Response from litestar.status_codes import ( HTTP_409_CONFLICT, ) from advanced_alchemy.exceptions import ( DuplicateKeyError, ForeignKeyError, IntegrityError, NotFoundError, RepositoryError, ) if TYPE_CHECKING: from litestar.connection import Request from litestar.connection.base import AuthT, StateT, UserT from litestar.response import Response class ConflictError(ClientException): """Request conflict with the current state of the target resource.""" status_code: int = HTTP_409_CONFLICT def exception_to_http_response(request: "Request[UserT, AuthT, StateT]", exc: "RepositoryError") -> "Response[Any]": """Handler for all exceptions subclassed from HTTPException.""" if isinstance(exc, NotFoundError): http_exc: type[HTTPException] = NotFoundException elif isinstance(exc, (DuplicateKeyError, IntegrityError, ForeignKeyError)): http_exc = ConflictError else: http_exc = InternalServerException if request.app.debug: return create_debug_response(request, exc) # pyright: ignore[reportUnknownVariableType] return create_exception_response(request, http_exc(detail=str(exc.detail))) # pyright: ignore[reportUnknownVariableType] python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/000077500000000000000000000000001476663714600272175ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/__init__.py000066400000000000000000000033061476663714600313320ustar00rootroot00000000000000from collections.abc import Sequence from typing import Union from litestar.config.app import AppConfig from litestar.plugins import InitPluginProtocol from advanced_alchemy.extensions.litestar.plugins import _slots_base from advanced_alchemy.extensions.litestar.plugins.init import ( EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemyInitPlugin, SQLAlchemySyncConfig, ) from advanced_alchemy.extensions.litestar.plugins.serialization import SQLAlchemySerializationPlugin class SQLAlchemyPlugin(InitPluginProtocol, _slots_base.SlotsBase): """A plugin that provides SQLAlchemy integration.""" def __init__( self, config: Union[ SQLAlchemyAsyncConfig, SQLAlchemySyncConfig, Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]] ], ) -> None: """Initialize ``SQLAlchemyPlugin``. Args: config: configure DB connection and hook handlers and dependencies. """ self._config = config if isinstance(config, Sequence) else [config] @property def config( self, ) -> Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]: return self._config def on_app_init(self, app_config: AppConfig) -> AppConfig: """Configure application for use with SQLAlchemy. Args: app_config: The :class:`AppConfig <.config.app.AppConfig>` instance. """ app_config.plugins.extend([SQLAlchemyInitPlugin(config=self._config), SQLAlchemySerializationPlugin()]) return app_config __all__ = ( "EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemyInitPlugin", "SQLAlchemyPlugin", "SQLAlchemySerializationPlugin", "SQLAlchemySyncConfig", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/_slots_base.py000066400000000000000000000004341476663714600320670ustar00rootroot00000000000000"""Base class that aggregates slots for all SQLAlchemy plugins. See: https://stackoverflow.com/questions/53060607/python-3-6-5-multiple-bases-have-instance-lay-out-conflict-when-multi-inherit """ class SlotsBase: __slots__ = ( "_config", "_type_dto_map", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/000077500000000000000000000000001476663714600301625ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/__init__.py000066400000000000000000000005421476663714600322740ustar00rootroot00000000000000from advanced_alchemy.extensions.litestar.plugins.init.config import ( EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig, ) from advanced_alchemy.extensions.litestar.plugins.init.plugin import SQLAlchemyInitPlugin __all__ = ( "EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemyInitPlugin", "SQLAlchemySyncConfig", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/config/000077500000000000000000000000001476663714600314275ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/config/__init__.py000066400000000000000000000005671476663714600335500ustar00rootroot00000000000000from advanced_alchemy.extensions.litestar.plugins.init.config.asyncio import SQLAlchemyAsyncConfig from advanced_alchemy.extensions.litestar.plugins.init.config.engine import EngineConfig from advanced_alchemy.extensions.litestar.plugins.init.config.sync import SQLAlchemySyncConfig __all__ = ( "EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/config/asyncio.py000066400000000000000000000262271476663714600334570ustar00rootroot00000000000000from collections.abc import AsyncGenerator, Coroutine from contextlib import asynccontextmanager from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, Union, cast from litestar.cli._utils import console from litestar.constants import HTTP_RESPONSE_START from sqlalchemy.exc import OperationalError from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession from advanced_alchemy.base import metadata_registry from advanced_alchemy.config.asyncio import SQLAlchemyAsyncConfig as _SQLAlchemyAsyncConfig from advanced_alchemy.extensions.litestar._utils import ( delete_aa_scope_state, get_aa_scope_state, set_aa_scope_state, ) from advanced_alchemy.extensions.litestar.plugins.init.config.common import ( SESSION_SCOPE_KEY, SESSION_TERMINUS_ASGI_EVENTS, ) from advanced_alchemy.extensions.litestar.plugins.init.config.engine import EngineConfig if TYPE_CHECKING: from collections.abc import AsyncGenerator, Coroutine from litestar import Litestar from litestar.datastructures.state import State from litestar.types import BeforeMessageSendHookHandler, Message, Scope # noinspection PyUnresolvedReferences __all__ = ( "SQLAlchemyAsyncConfig", "autocommit_before_send_handler", "autocommit_handler_maker", "default_before_send_handler", "default_handler_maker", ) def default_handler_maker( session_scope_key: str = SESSION_SCOPE_KEY, ) -> "Callable[[Message, Scope], Coroutine[Any, Any, None]]": """Set up the handler to issue a transaction commit or rollback based on specified status codes Args: session_scope_key: The key to use within the application state Returns: The handler callable """ async def handler(message: "Message", scope: "Scope") -> None: """Handle commit/rollback, closing and cleaning up sessions before sending. Args: message: ASGI-``Message`` scope: An ASGI-``Scope`` Returns: None """ session = cast("Optional[AsyncSession]", get_aa_scope_state(scope, session_scope_key)) if session and message["type"] in SESSION_TERMINUS_ASGI_EVENTS: await session.close() delete_aa_scope_state(scope, session_scope_key) return handler default_before_send_handler = default_handler_maker() def autocommit_handler_maker( commit_on_redirect: bool = False, extra_commit_statuses: Optional[set[int]] = None, extra_rollback_statuses: Optional[set[int]] = None, session_scope_key: str = SESSION_SCOPE_KEY, ) -> "Callable[[Message, Scope], Coroutine[Any, Any, None]]": """Set up the handler to issue a transaction commit or rollback based on specified status codes Args: commit_on_redirect: Issue a commit when the response status is a redirect (``3XX``) extra_commit_statuses: A set of additional status codes that trigger a commit extra_rollback_statuses: A set of additional status codes that trigger a rollback session_scope_key: The key to use within the application state Returns: The handler callable """ if extra_commit_statuses is None: extra_commit_statuses = set() if extra_rollback_statuses is None: extra_rollback_statuses = set() if len(extra_commit_statuses & extra_rollback_statuses) > 0: msg = "Extra rollback statuses and commit statuses must not share any status codes" raise ValueError(msg) commit_range = range(200, 400 if commit_on_redirect else 300) async def handler(message: "Message", scope: "Scope") -> None: """Handle commit/rollback, closing and cleaning up sessions before sending. Args: message: ASGI-``litestar.types.Message`` scope: An ASGI-``litestar.types.Scope`` Returns: None """ session = cast("Optional[AsyncSession]", get_aa_scope_state(scope, session_scope_key)) try: if session is not None and message["type"] == HTTP_RESPONSE_START: if (message["status"] in commit_range or message["status"] in extra_commit_statuses) and message[ "status" ] not in extra_rollback_statuses: await session.commit() else: await session.rollback() finally: if session and message["type"] in SESSION_TERMINUS_ASGI_EVENTS: await session.close() delete_aa_scope_state(scope, session_scope_key) return handler autocommit_before_send_handler = autocommit_handler_maker() @dataclass class SQLAlchemyAsyncConfig(_SQLAlchemyAsyncConfig): """Litestar Async SQLAlchemy Configuration.""" before_send_handler: Optional[ Union["BeforeMessageSendHookHandler", Literal["autocommit", "autocommit_include_redirects"]] ] = None """Handler to call before the ASGI message is sent. The handler should handle closing the session stored in the ASGI scope, if it's still open, and committing and uncommitted data. """ engine_dependency_key: str = "db_engine" """Key to use for the dependency injection of database engines.""" session_dependency_key: str = "db_session" """Key to use for the dependency injection of database sessions.""" engine_app_state_key: str = "db_engine" """Key under which to store the SQLAlchemy engine in the application :class:`State ` instance. """ session_maker_app_state_key: str = "session_maker_class" """Key under which to store the SQLAlchemy :class:`sessionmaker ` in the application :class:`State ` instance. """ session_scope_key: str = SESSION_SCOPE_KEY """Key under which to store the SQLAlchemy scope in the application.""" engine_config: EngineConfig = field(default_factory=EngineConfig) # pyright: ignore[reportIncompatibleVariableOverride] """Configuration for the SQLAlchemy engine. The configuration options are documented in the SQLAlchemy documentation. """ set_default_exception_handler: bool = True """Sets the default exception handler on application start.""" def _ensure_unique(self, registry_name: str, key: str, new_key: Optional[str] = None, _iter: int = 0) -> str: new_key = new_key if new_key is not None else key if new_key in getattr(self.__class__, registry_name, {}): _iter += 1 new_key = self._ensure_unique(registry_name, key, f"{key}_{_iter}", _iter) return new_key def __post_init__(self) -> None: self.session_scope_key = self._ensure_unique("_SESSION_SCOPE_KEY_REGISTRY", self.session_scope_key) self.engine_app_state_key = self._ensure_unique("_ENGINE_APP_STATE_KEY_REGISTRY", self.engine_app_state_key) self.session_maker_app_state_key = self._ensure_unique( "_SESSIONMAKER_APP_STATE_KEY_REGISTRY", self.session_maker_app_state_key, ) self.__class__._SESSION_SCOPE_KEY_REGISTRY.add(self.session_scope_key) # noqa: SLF001 self.__class__._ENGINE_APP_STATE_KEY_REGISTRY.add(self.engine_app_state_key) # noqa: SLF001 self.__class__._SESSIONMAKER_APP_STATE_KEY_REGISTRY.add(self.session_maker_app_state_key) # noqa: SLF001 if self.before_send_handler is None: self.before_send_handler = default_handler_maker(session_scope_key=self.session_scope_key) if self.before_send_handler == "autocommit": self.before_send_handler = autocommit_handler_maker(session_scope_key=self.session_scope_key) if self.before_send_handler == "autocommit_include_redirects": self.before_send_handler = autocommit_handler_maker( session_scope_key=self.session_scope_key, commit_on_redirect=True, ) super().__post_init__() def create_session_maker(self) -> "Callable[[], AsyncSession]": """Get a session maker. If none exists yet, create one. Returns: Session factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if session_kws.get("bind") is None: session_kws["bind"] = self.get_engine() return self.session_maker_class(**session_kws) # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType] @asynccontextmanager async def lifespan( self, app: "Litestar", ) -> "AsyncGenerator[None, None]": deps = self.create_app_state_items() app.state.update(deps) try: if self.create_all: await self.create_all_metadata(app) yield finally: if self.engine_dependency_key in deps: engine = deps[self.engine_dependency_key] if hasattr(engine, "dispose"): await cast("AsyncEngine", engine).dispose() def provide_engine(self, state: "State") -> "AsyncEngine": """Create an engine instance. Args: state: The ``Litestar.state`` instance. Returns: An engine instance. """ return cast("AsyncEngine", state.get(self.engine_app_state_key)) def provide_session(self, state: "State", scope: "Scope") -> "AsyncSession": """Create a session instance. Args: state: The ``Litestar.state`` instance. scope: The current connection's scope. Returns: A session instance. """ session = cast("Optional[AsyncSession]", get_aa_scope_state(scope, self.session_scope_key)) if session is None: session_maker = cast("Callable[[], AsyncSession]", state[self.session_maker_app_state_key]) session = session_maker() set_aa_scope_state(scope, self.session_scope_key, session) return session @property def signature_namespace(self) -> dict[str, Any]: """Return the plugin's signature namespace. Returns: A string keyed dict of names to be added to the namespace for signature forward reference resolution. """ return {"AsyncEngine": AsyncEngine, "AsyncSession": AsyncSession} async def create_all_metadata(self, app: "Litestar") -> None: """Create all metadata Args: app (Litestar): The ``Litestar`` instance """ async with self.get_engine().begin() as conn: try: await conn.run_sync(metadata_registry.get(self.bind_key).create_all) except OperationalError as exc: console.print(f"[bold red] * Could not create target metadata. Reason: {exc}") def create_app_state_items(self) -> dict[str, Any]: """Key/value pairs to be stored in application state.""" return { self.engine_app_state_key: self.get_engine(), self.session_maker_app_state_key: self.create_session_maker(), } def update_app_state(self, app: "Litestar") -> None: """Set the app state with engine and session. Args: app: The ``Litestar`` instance. """ app.state.update(self.create_app_state_items()) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/config/common.py000066400000000000000000000005211476663714600332670ustar00rootroot00000000000000from litestar.constants import HTTP_DISCONNECT, HTTP_RESPONSE_START, WEBSOCKET_CLOSE, WEBSOCKET_DISCONNECT SESSION_SCOPE_KEY = "_sqlalchemy_db_session" """Session scope key.""" SESSION_TERMINUS_ASGI_EVENTS = {HTTP_RESPONSE_START, HTTP_DISCONNECT, WEBSOCKET_DISCONNECT, WEBSOCKET_CLOSE} """ASGI events that terminate a session scope.""" python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/config/engine.py000066400000000000000000000022301476663714600332430ustar00rootroot00000000000000from dataclasses import dataclass from typing import Any, Callable from litestar.serialization import decode_json, encode_json from advanced_alchemy.config import EngineConfig as _EngineConfig __all__ = ("EngineConfig",) def serializer(value: Any) -> str: """Serialize JSON field values. Args: value: Any json serializable value. Returns: JSON string. """ return encode_json(value).decode("utf-8") @dataclass class EngineConfig(_EngineConfig): """Configuration for SQLAlchemy's :class:`Engine `. For details see: https://docs.sqlalchemy.org/en/20/core/engines.html """ json_deserializer: Callable[[str], Any] = decode_json """For dialects that support the :class:`JSON ` datatype, this is a Python callable that will convert a JSON string to a Python object. By default, this is set to Litestar's decode_json function.""" json_serializer: Callable[[Any], str] = serializer """For dialects that support the JSON datatype, this is a Python callable that will render a given object as JSON. By default, Litestar's encode_json function is used.""" python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/config/sync.py000066400000000000000000000254101476663714600327570ustar00rootroot00000000000000from contextlib import asynccontextmanager from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, Union, cast from litestar.cli._utils import console from litestar.constants import HTTP_RESPONSE_START from sqlalchemy import Engine from sqlalchemy.exc import OperationalError from sqlalchemy.orm import Session from advanced_alchemy.base import metadata_registry from advanced_alchemy.config.sync import SQLAlchemySyncConfig as _SQLAlchemySyncConfig from advanced_alchemy.extensions.litestar._utils import ( delete_aa_scope_state, get_aa_scope_state, set_aa_scope_state, ) from advanced_alchemy.extensions.litestar.plugins.init.config.common import ( SESSION_SCOPE_KEY, SESSION_TERMINUS_ASGI_EVENTS, ) from advanced_alchemy.extensions.litestar.plugins.init.config.engine import EngineConfig if TYPE_CHECKING: from collections.abc import AsyncGenerator from litestar import Litestar from litestar.datastructures.state import State from litestar.types import BeforeMessageSendHookHandler, Message, Scope __all__ = ( "SQLAlchemySyncConfig", "autocommit_before_send_handler", "autocommit_handler_maker", "default_before_send_handler", "default_handler_maker", ) def default_handler_maker( session_scope_key: str = SESSION_SCOPE_KEY, ) -> "Callable[[Message, Scope], None]": """Set up the handler to issue a transaction commit or rollback based on specified status codes Args: session_scope_key: The key to use within the application state Returns: The handler callable """ def handler(message: "Message", scope: "Scope") -> None: """Handle commit/rollback, closing and cleaning up sessions before sending. Args: message: ASGI-``Message`` scope: An ASGI-``Scope`` Returns: None """ session = cast("Optional[Session]", get_aa_scope_state(scope, session_scope_key)) if session and message["type"] in SESSION_TERMINUS_ASGI_EVENTS: session.close() delete_aa_scope_state(scope, session_scope_key) return handler default_before_send_handler = default_handler_maker() def autocommit_handler_maker( commit_on_redirect: bool = False, extra_commit_statuses: "Optional[set[int]]" = None, extra_rollback_statuses: "Optional[set[int]]" = None, session_scope_key: str = SESSION_SCOPE_KEY, ) -> "Callable[[Message, Scope], None]": """Set up the handler to issue a transaction commit or rollback based on specified status codes Args: commit_on_redirect: Issue a commit when the response status is a redirect (``3XX``) extra_commit_statuses: A set of additional status codes that trigger a commit extra_rollback_statuses: A set of additional status codes that trigger a rollback session_scope_key: The key to use within the application state Returns: The handler callable """ if extra_commit_statuses is None: extra_commit_statuses = set() if extra_rollback_statuses is None: extra_rollback_statuses = set() if len(extra_commit_statuses & extra_rollback_statuses) > 0: msg = "Extra rollback statuses and commit statuses must not share any status codes" raise ValueError(msg) commit_range = range(200, 400 if commit_on_redirect else 300) def handler(message: "Message", scope: "Scope") -> None: """Handle commit/rollback, closing and cleaning up sessions before sending. Args: message: ASGI-``Message`` scope: An ASGI-``Scope`` Returns: None """ session = cast("Optional[Session]", get_aa_scope_state(scope, session_scope_key)) try: if session is not None and message["type"] == HTTP_RESPONSE_START: if (message["status"] in commit_range or message["status"] in extra_commit_statuses) and message[ "status" ] not in extra_rollback_statuses: session.commit() else: session.rollback() finally: if session and message["type"] in SESSION_TERMINUS_ASGI_EVENTS: session.close() delete_aa_scope_state(scope, session_scope_key) return handler autocommit_before_send_handler = autocommit_handler_maker() @dataclass class SQLAlchemySyncConfig(_SQLAlchemySyncConfig): """Litestar Sync SQLAlchemy Configuration.""" before_send_handler: Optional[ Union["BeforeMessageSendHookHandler", Literal["autocommit", "autocommit_include_redirects"]] ] = None """Handler to call before the ASGI message is sent. The handler should handle closing the session stored in the ASGI scope, if it's still open, and committing and uncommitted data. """ engine_dependency_key: str = "db_engine" """Key to use for the dependency injection of database engines.""" session_dependency_key: str = "db_session" """Key to use for the dependency injection of database sessions.""" engine_app_state_key: str = "db_engine" """Key under which to store the SQLAlchemy engine in the application :class:`State <.datastructures.State>` instance. """ session_maker_app_state_key: str = "session_maker_class" """Key under which to store the SQLAlchemy :class:`sessionmaker ` in the application :class:`State <.datastructures.State>` instance. """ session_scope_key: str = SESSION_SCOPE_KEY """Key under which to store the SQLAlchemy scope in the application.""" engine_config: EngineConfig = field(default_factory=EngineConfig) # pyright: ignore[reportIncompatibleVariableOverride] """Configuration for the SQLAlchemy engine. The configuration options are documented in the SQLAlchemy documentation. """ set_default_exception_handler: bool = True """Sets the default exception handler on application start.""" def _ensure_unique(self, registry_name: str, key: str, new_key: Optional[str] = None, _iter: int = 0) -> str: new_key = new_key if new_key is not None else key if new_key in getattr(self.__class__, registry_name, {}): _iter += 1 new_key = self._ensure_unique(registry_name, key, f"{key}_{_iter}", _iter) return new_key def __post_init__(self) -> None: self.session_scope_key = self._ensure_unique("_SESSION_SCOPE_KEY_REGISTRY", self.session_scope_key) self.engine_app_state_key = self._ensure_unique("_ENGINE_APP_STATE_KEY_REGISTRY", self.engine_app_state_key) self.session_maker_app_state_key = self._ensure_unique( "_SESSIONMAKER_APP_STATE_KEY_REGISTRY", self.session_maker_app_state_key, ) self.__class__._SESSION_SCOPE_KEY_REGISTRY.add(self.session_scope_key) # noqa: SLF001 self.__class__._ENGINE_APP_STATE_KEY_REGISTRY.add(self.engine_app_state_key) # noqa: SLF001 self.__class__._SESSIONMAKER_APP_STATE_KEY_REGISTRY.add(self.session_maker_app_state_key) # noqa: SLF001 if self.before_send_handler is None: self.before_send_handler = default_handler_maker(session_scope_key=self.session_scope_key) if self.before_send_handler == "autocommit": self.before_send_handler = autocommit_handler_maker(session_scope_key=self.session_scope_key) if self.before_send_handler == "autocommit_include_redirects": self.before_send_handler = autocommit_handler_maker( session_scope_key=self.session_scope_key, commit_on_redirect=True, ) super().__post_init__() def create_session_maker(self) -> "Callable[[], Session]": """Get a session maker. If none exists yet, create one. Returns: Session factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if session_kws.get("bind") is None: session_kws["bind"] = self.get_engine() return self.session_maker_class(**session_kws) @asynccontextmanager async def lifespan( self, app: "Litestar", ) -> "AsyncGenerator[None, None]": deps = self.create_app_state_items() app.state.update(deps) try: if self.create_all: self.create_all_metadata(app) yield finally: if self.engine_dependency_key in deps: engine = deps[self.engine_dependency_key] if hasattr(engine, "dispose"): cast("Engine", engine).dispose() def provide_engine(self, state: "State") -> "Engine": """Create an engine instance. Args: state: The ``Litestar.state`` instance. Returns: An engine instance. """ return cast("Engine", state.get(self.engine_app_state_key)) def provide_session(self, state: "State", scope: "Scope") -> "Session": """Create a session instance. Args: state: The ``Litestar.state`` instance. scope: The current connection's scope. Returns: A session instance. """ session = cast("Optional[Session]", get_aa_scope_state(scope, self.session_scope_key)) if session is None: session_maker = cast("Callable[[], Session]", state[self.session_maker_app_state_key]) session = session_maker() set_aa_scope_state(scope, self.session_scope_key, session) return session @property def signature_namespace(self) -> "dict[str, Any]": """Return the plugin's signature namespace. Returns: A string keyed dict of names to be added to the namespace for signature forward reference resolution. """ return {"Engine": Engine, "Session": Session} def create_all_metadata(self, app: "Litestar") -> None: """Create all metadata Args: app (Litestar): The ``Litestar`` instance """ with self.get_engine().begin() as conn: try: metadata_registry.get(self.bind_key).create_all(bind=conn) except OperationalError as exc: console.print(f"[bold red] * Could not create target metadata. Reason: {exc}") def create_app_state_items(self) -> "dict[str, Any]": """Key/value pairs to be stored in application state.""" return { self.engine_app_state_key: self.get_engine(), self.session_maker_app_state_key: self.create_session_maker(), } def update_app_state(self, app: "Litestar") -> None: """Set the app state with engine and session. Args: app: The ``Litestar`` instance. """ app.state.update(self.create_app_state_items()) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/init/plugin.py000066400000000000000000000141741476663714600320410ustar00rootroot00000000000000import contextlib from collections.abc import Sequence from typing import TYPE_CHECKING, Any, Union, cast from litestar.di import Provide from litestar.dto import DTOData from litestar.params import Dependency, Parameter from litestar.plugins import CLIPlugin, InitPluginProtocol from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session from sqlalchemy.orm import Session, scoped_session from advanced_alchemy.exceptions import ImproperConfigurationError, RepositoryError from advanced_alchemy.extensions.litestar.exception_handler import exception_to_http_response from advanced_alchemy.extensions.litestar.plugins import _slots_base from advanced_alchemy.filters import ( BeforeAfter, CollectionFilter, FilterTypes, LimitOffset, NotInCollectionFilter, NotInSearchFilter, OnBeforeAfter, OrderBy, SearchFilter, ) from advanced_alchemy.service import ModelDictListT, ModelDictT, ModelDTOT, ModelOrRowMappingT, ModelT, OffsetPagination if TYPE_CHECKING: from click import Group from litestar.config.app import AppConfig from litestar.types import BeforeMessageSendHookHandler from advanced_alchemy.extensions.litestar.plugins.init.config import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig __all__ = ("SQLAlchemyInitPlugin",) signature_namespace_values: dict[str, Any] = { "BeforeAfter": BeforeAfter, "OnBeforeAfter": OnBeforeAfter, "CollectionFilter": CollectionFilter, "LimitOffset": LimitOffset, "OrderBy": OrderBy, "SearchFilter": SearchFilter, "NotInCollectionFilter": NotInCollectionFilter, "NotInSearchFilter": NotInSearchFilter, "FilterTypes": FilterTypes, "OffsetPagination": OffsetPagination, "Parameter": Parameter, "Dependency": Dependency, "DTOData": DTOData, "Sequence": Sequence, "ModelT": ModelT, "ModelDictT": ModelDictT, "ModelDTOT": ModelDTOT, "ModelDictListT": ModelDictListT, "ModelOrRowMappingT": ModelOrRowMappingT, "Session": Session, "scoped_session": scoped_session, "AsyncSession": AsyncSession, "async_scoped_session": async_scoped_session, } class SQLAlchemyInitPlugin(InitPluginProtocol, CLIPlugin, _slots_base.SlotsBase): """SQLAlchemy application lifecycle configuration.""" def __init__( self, config: Union[ "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", "Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]", ], ) -> None: """Initialize ``SQLAlchemyPlugin``. Args: config: configure DB connection and hook handlers and dependencies. """ self._config = config @property def config(self) -> "Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]": return self._config if isinstance(self._config, Sequence) else [self._config] def on_cli_init(self, cli: "Group") -> None: from advanced_alchemy.extensions.litestar.cli import database_group cli.add_command(database_group) def _validate_config(self) -> None: configs = self._config if isinstance(self._config, Sequence) else [self._config] scope_keys = {config.session_scope_key for config in configs} engine_keys = {config.engine_dependency_key for config in configs} session_keys = {config.session_dependency_key for config in configs} if len(configs) > 1 and any(len(i) != len(configs) for i in (scope_keys, engine_keys, session_keys)): raise ImproperConfigurationError( detail="When using multiple configurations, please ensure the `session_dependency_key` and `engine_dependency_key` settings are unique across all configs. Additionally, iF you are using a custom `before_send` handler, ensure `session_scope_key` is unique.", ) def on_app_init(self, app_config: "AppConfig") -> "AppConfig": """Configure application for use with SQLAlchemy. Args: app_config: The :class:`AppConfig <.config.app.AppConfig>` instance. """ self._validate_config() with contextlib.suppress(ImportError): from asyncpg.pgproto import pgproto # pyright: ignore[reportMissingImports] signature_namespace_values.update({"pgproto.UUID": pgproto.UUID}) app_config.type_encoders = {pgproto.UUID: str, **(app_config.type_encoders or {})} with contextlib.suppress(ImportError): import uuid_utils # pyright: ignore[reportMissingImports] signature_namespace_values.update({"uuid_utils.UUID": uuid_utils.UUID}) # pyright: ignore[reportUnknownMemberType] app_config.type_encoders = {uuid_utils.UUID: str, **(app_config.type_encoders or {})} # pyright: ignore[reportUnknownMemberType] app_config.type_decoders = [ (lambda x: x is uuid_utils.UUID, lambda t, v: t(str(v))), # pyright: ignore[reportUnknownMemberType] *(app_config.type_decoders or []), ] configure_exception_handler = False for config in self.config: if config.set_default_exception_handler: configure_exception_handler = True signature_namespace_values.update(config.signature_namespace) app_config.lifespan.append(config.lifespan) # pyright: ignore[reportUnknownMemberType] app_config.dependencies.update( { config.engine_dependency_key: Provide(config.provide_engine, sync_to_thread=False), config.session_dependency_key: Provide(config.provide_session, sync_to_thread=False), }, ) app_config.before_send.append(cast("BeforeMessageSendHookHandler", config.before_send_handler)) app_config.signature_namespace.update(signature_namespace_values) if configure_exception_handler and not any( isinstance(exc, int) or issubclass(exc, RepositoryError) for exc in app_config.exception_handlers # pyright: ignore[reportUnknownMemberType] ): app_config.exception_handlers.update({RepositoryError: exception_to_http_response}) # pyright: ignore[reportUnknownMemberType] return app_config python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/plugins/serialization.py000066400000000000000000000026461476663714600324560ustar00rootroot00000000000000from typing import Any from litestar.plugins import SerializationPlugin from litestar.typing import FieldDefinition from sqlalchemy.orm import DeclarativeBase from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO from advanced_alchemy.extensions.litestar.plugins import _slots_base class SQLAlchemySerializationPlugin(SerializationPlugin, _slots_base.SlotsBase): def __init__(self) -> None: self._type_dto_map: dict[type[DeclarativeBase], type[SQLAlchemyDTO[Any]]] = {} def supports_type(self, field_definition: FieldDefinition) -> bool: return ( field_definition.is_collection and field_definition.has_inner_subclass_of(DeclarativeBase) ) or field_definition.is_subclass_of(DeclarativeBase) def create_dto_for_type(self, field_definition: FieldDefinition) -> type[SQLAlchemyDTO[Any]]: # assumes that the type is a container of SQLAlchemy models or a single SQLAlchemy model annotation = next( ( inner_type.annotation for inner_type in field_definition.inner_types if inner_type.is_subclass_of(DeclarativeBase) ), field_definition.annotation, ) if annotation in self._type_dto_map: return self._type_dto_map[annotation] self._type_dto_map[annotation] = dto_type = SQLAlchemyDTO[annotation] # type:ignore[valid-type] return dto_type python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/litestar/providers.py000066400000000000000000000472271476663714600301410ustar00rootroot00000000000000# ruff: noqa: B008, PGH003 """Application dependency providers generators. This module contains functions to create dependency providers for services and filters. You should not have modify this module very often and should only be invoked under normal usage. """ import datetime import inspect from collections.abc import AsyncGenerator, Callable, Generator from typing import ( TYPE_CHECKING, Any, Literal, Optional, TypedDict, TypeVar, Union, cast, overload, ) from uuid import UUID from litestar.di import Provide from litestar.params import Dependency, Parameter from typing_extensions import NotRequired from advanced_alchemy.filters import ( BeforeAfter, CollectionFilter, FilterTypes, LimitOffset, OrderBy, SearchFilter, ) from advanced_alchemy.service import ( Empty, EmptyType, ErrorMessages, LoadSpec, ModelT, SQLAlchemyAsyncRepositoryService, SQLAlchemySyncRepositoryService, ) if TYPE_CHECKING: from sqlalchemy import Select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session from advanced_alchemy.config import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig DTorNone = Optional[datetime.datetime] StringOrNone = Optional[str] UuidOrNone = Optional[UUID] IntOrNone = Optional[int] BooleanOrNone = Optional[bool] SortOrder = Literal["asc", "desc"] SortOrderOrNone = Optional[SortOrder] AsyncServiceT_co = TypeVar("AsyncServiceT_co", bound=SQLAlchemyAsyncRepositoryService[Any], covariant=True) SyncServiceT_co = TypeVar("SyncServiceT_co", bound=SQLAlchemySyncRepositoryService[Any], covariant=True) class DependencyDefaults: FILTERS_DEPENDENCY_KEY: str = "filters" """Key for the filters dependency.""" CREATED_FILTER_DEPENDENCY_KEY: str = "created_filter" """Key for the created filter dependency.""" ID_FILTER_DEPENDENCY_KEY: str = "id_filter" """Key for the id filter dependency.""" LIMIT_OFFSET_DEPENDENCY_KEY: str = "limit_offset" """Key for the limit offset dependency.""" UPDATED_FILTER_DEPENDENCY_KEY: str = "updated_filter" """Key for the updated filter dependency.""" ORDER_BY_DEPENDENCY_KEY: str = "order_by" """Key for the order by dependency.""" SEARCH_FILTER_DEPENDENCY_KEY: str = "search_filter" """Key for the search filter dependency.""" DEFAULT_PAGINATION_SIZE: int = 20 """Default pagination size.""" DEPENDENCY_DEFAULTS = DependencyDefaults() class FilterConfig(TypedDict): """Configuration for generating dynamic filters.""" id_filter: NotRequired[type[Union[UUID, int]]] """Indicates that the id filter should be enabled. When set, the type specified will be used for the :class:`CollectionFilter`.""" id_field: NotRequired[str] """The field on the model that stored the primary key or identifier.""" sort_field: NotRequired[str] """The default field to use for the sort filter.""" sort_order: NotRequired[SortOrder] """The default order to use for the sort filter.""" pagination_type: NotRequired[Literal["limit_offset"]] """When set, pagination is enabled based on the type specified.""" pagination_size: NotRequired[int] """The size of the pagination.""" search: NotRequired[str] """When set, search is enabled for the specified fields.""" search_ignore_case: NotRequired[bool] """When set, search is case insensitive by default.""" created_at: NotRequired[bool] """When set, created_at filter is enabled.""" updated_at: NotRequired[bool] """When set, updated_at filter is enabled.""" class SingletonMeta(type): """Metaclass for singleton pattern.""" _instances: dict[type, Any] = {} def __call__(cls, *args: Any, **kwargs: Any) -> Any: if cls not in cls._instances: # pyright: ignore[reportUnnecessaryContains] cls._instances[cls] = super().__call__(*args, **kwargs) return cls._instances[cls] class DependencyCache(metaclass=SingletonMeta): """Simple dependency cache for the application. This is used to help memoize dependencies that are generated dynamically.""" def __init__(self) -> None: self.dependencies: dict[Union[int, str], dict[str, Provide]] = {} def add_dependencies(self, key: Union[int, str], dependencies: dict[str, Provide]) -> None: self.dependencies[key] = dependencies def get_dependencies(self, key: Union[int, str]) -> Optional[dict[str, Provide]]: return self.dependencies.get(key) dep_cache = DependencyCache() @overload def create_service_provider( service_class: type["AsyncServiceT_co"], /, statement: "Optional[Select[tuple[ModelT]]]" = None, config: "Optional[SQLAlchemyAsyncConfig]" = None, error_messages: "Optional[Union[ErrorMessages, EmptyType]]" = Empty, load: "Optional[LoadSpec]" = None, execution_options: "Optional[dict[str, Any]]" = None, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, ) -> Callable[..., AsyncGenerator[AsyncServiceT_co, None]]: ... @overload def create_service_provider( service_class: type["SyncServiceT_co"], /, statement: "Optional[Select[tuple[ModelT]]]" = None, config: "Optional[SQLAlchemySyncConfig]" = None, error_messages: "Optional[Union[ErrorMessages, EmptyType]]" = Empty, load: "Optional[LoadSpec]" = None, execution_options: "Optional[dict[str, Any]]" = None, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, ) -> Callable[..., Generator[SyncServiceT_co, None, None]]: ... def create_service_provider( service_class: type[Union["AsyncServiceT_co", "SyncServiceT_co"]], /, statement: "Optional[Select[tuple[ModelT]]]" = None, config: "Optional[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]" = None, error_messages: "Optional[Union[ErrorMessages, EmptyType]]" = Empty, load: "Optional[LoadSpec]" = None, execution_options: "Optional[dict[str, Any]]" = None, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, ) -> Callable[..., Union["AsyncGenerator[AsyncServiceT_co, None]", "Generator[SyncServiceT_co,None, None]"]]: """Create a dependency provider for a service.""" if issubclass(service_class, SQLAlchemyAsyncRepositoryService) or service_class is SQLAlchemyAsyncRepositoryService: # type: ignore[comparison-overlap] async def provide_async_service( db_session: "Optional[AsyncSession]" = None, ) -> "AsyncGenerator[AsyncServiceT_co, None]": # type: ignore[union-attr,unused-ignore] async with service_class.new( # type: ignore[union-attr,unused-ignore] session=db_session, # type: ignore[arg-type, unused-ignore] statement=statement, config=cast("Optional[SQLAlchemyAsyncConfig]", config), # type: ignore[arg-type] error_messages=error_messages, load=load, execution_options=execution_options, uniquify=uniquify, count_with_window_function=count_with_window_function, ) as service: yield service return provide_async_service def provide_sync_service( db_session: "Optional[Session]" = None, ) -> "Generator[SyncServiceT_co, None, None]": with service_class.new( session=db_session, # type: ignore[arg-type, unused-ignore] statement=statement, config=cast("Optional[SQLAlchemySyncConfig]", config), error_messages=error_messages, load=load, execution_options=execution_options, uniquify=uniquify, count_with_window_function=count_with_window_function, ) as service: yield service return provide_sync_service def create_service_dependencies( service_class: type[Union["AsyncServiceT_co", "SyncServiceT_co"]], /, key: str, statement: "Optional[Select[tuple[ModelT]]]" = None, config: "Optional[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]" = None, error_messages: "Optional[Union[ErrorMessages, EmptyType]]" = Empty, load: "Optional[LoadSpec]" = None, execution_options: "Optional[dict[str, Any]]" = None, filters: "Optional[FilterConfig]" = None, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, dep_defaults: "DependencyDefaults" = DEPENDENCY_DEFAULTS, ) -> dict[str, Provide]: """Create a dependency provider for the combined filter function. Args: key: The key to use for the dependency provider. service_class: The service class to create a dependency provider for. statement: The statement to use for the service. config: The configuration to use for the service. error_messages: The error messages to use for the service. load: The load spec to use for the service. execution_options: The execution options to use for the service. filters: The filter configuration to use for the service. uniquify: Whether to uniquify the service. count_with_window_function: Whether to count with a window function. dep_defaults: The dependency defaults to use for the service. Returns: A dictionary of dependency providers for the service. """ if issubclass(service_class, SQLAlchemyAsyncRepositoryService) or service_class is SQLAlchemyAsyncRepositoryService: # type: ignore[comparison-overlap] svc = create_service_provider( # type: ignore[type-var,misc,unused-ignore] service_class, statement, cast("Optional[SQLAlchemyAsyncConfig]", config), error_messages, load, execution_options, uniquify, count_with_window_function, ) deps = {key: Provide(svc)} else: svc = create_service_provider( # type: ignore[assignment] service_class, statement, cast("Optional[SQLAlchemySyncConfig]", config), error_messages, load, execution_options, uniquify, count_with_window_function, ) deps = {key: Provide(svc, sync_to_thread=False)} if filters: deps.update(create_filter_dependencies(filters, dep_defaults)) return deps def create_filter_dependencies( config: FilterConfig, dep_defaults: DependencyDefaults = DEPENDENCY_DEFAULTS ) -> dict[str, Provide]: """Create a dependency provider for the combined filter function. Args: config: FilterConfig instance with desired settings. dep_defaults: Dependency defaults to use for the filter dependencies Returns: A dependency provider function for the combined filter function. """ cache_key = sum(map(hash, config.items())) deps = dep_cache.get_dependencies(cache_key) if deps is not None: return deps deps = _create_statement_filters(config, dep_defaults) dep_cache.add_dependencies(cache_key, deps) return deps def _create_statement_filters( config: FilterConfig, dep_defaults: DependencyDefaults = DEPENDENCY_DEFAULTS ) -> dict[str, Provide]: """Create filter dependencies based on configuration. Args: config (FilterConfig): Configuration dictionary specifying which filters to enable dep_defaults (DependencyDefaults): Dependency defaults to use for the filter dependencies Returns: dict[str, Provide]: Dictionary of filter provider functions """ filters: dict[str, Provide] = {} if config.get("id_filter", False): def provide_id_filter( # pyright: ignore[reportUnknownParameterType] ids: Optional[list[str]] = Parameter(query="ids", default=None, required=False), ) -> CollectionFilter: # pyright: ignore[reportMissingTypeArgument] return CollectionFilter(field_name=config.get("id_field", "id"), values=ids) filters[dep_defaults.ID_FILTER_DEPENDENCY_KEY] = Provide(provide_id_filter, sync_to_thread=False) # pyright: ignore[reportUnknownArgumentType] if config.get("created_at", False): def provide_created_filter( before: DTorNone = Parameter(query="createdBefore", default=None, required=False), after: DTorNone = Parameter(query="createdAfter", default=None, required=False), ) -> BeforeAfter: return BeforeAfter("created_at", before, after) filters[dep_defaults.CREATED_FILTER_DEPENDENCY_KEY] = Provide(provide_created_filter, sync_to_thread=False) if config.get("updated_at", False): def provide_updated_filter( before: DTorNone = Parameter(query="updatedBefore", default=None, required=False), after: DTorNone = Parameter(query="updatedAfter", default=None, required=False), ) -> BeforeAfter: return BeforeAfter("updated_at", before, after) filters[dep_defaults.UPDATED_FILTER_DEPENDENCY_KEY] = Provide(provide_updated_filter, sync_to_thread=False) if config.get("pagination_type") == "limit_offset": def provide_limit_offset_pagination( current_page: int = Parameter(ge=1, query="currentPage", default=1, required=False), page_size: int = Parameter( query="pageSize", ge=1, default=config.get("pagination_size", dep_defaults.DEFAULT_PAGINATION_SIZE), required=False, ), ) -> LimitOffset: return LimitOffset(page_size, page_size * (current_page - 1)) filters[dep_defaults.LIMIT_OFFSET_DEPENDENCY_KEY] = Provide( provide_limit_offset_pagination, sync_to_thread=False ) if search_fields := config.get("search"): def provide_search_filter( search_string: StringOrNone = Parameter( title="Field to search", query="searchString", default=None, required=False, ), ignore_case: BooleanOrNone = Parameter( title="Search should be case sensitive", query="searchIgnoreCase", default=config.get("search_ignore_case", False), required=False, ), ) -> SearchFilter: return SearchFilter( field_name=set(search_fields.split(",")), value=search_string, # type: ignore[arg-type] ignore_case=ignore_case or False, ) filters[dep_defaults.SEARCH_FILTER_DEPENDENCY_KEY] = Provide(provide_search_filter, sync_to_thread=False) if sort_field := config.get("sort_field"): def provide_order_by( field_name: StringOrNone = Parameter( title="Order by field", query="orderBy", default=sort_field, required=False, ), sort_order: SortOrderOrNone = Parameter( title="Field to search", query="sortOrder", default=config.get("sort_order", "desc"), required=False, ), ) -> OrderBy: return OrderBy(field_name=field_name, sort_order=sort_order) # type: ignore[arg-type] filters[dep_defaults.ORDER_BY_DEPENDENCY_KEY] = Provide(provide_order_by, sync_to_thread=False) if filters: filters[dep_defaults.FILTERS_DEPENDENCY_KEY] = Provide( _create_filter_aggregate_function(config), sync_to_thread=False ) return filters def _create_filter_aggregate_function(config: FilterConfig) -> Callable[..., list[FilterTypes]]: """Create a filter function based on the provided configuration. Args: config: The filter configuration. Returns: A function that returns a list of filters based on the configuration. """ parameters: dict[str, inspect.Parameter] = {} annotations: dict[str, Any] = {} # Build parameters based on config if cls := config.get("id_filter"): parameters["id_filter"] = inspect.Parameter( name="id_filter", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Dependency(skip_validation=True), annotation=CollectionFilter[cls], # type: ignore[valid-type] ) annotations["id_filter"] = CollectionFilter[cls] # type: ignore[valid-type] if config.get("created_at"): parameters["created_filter"] = inspect.Parameter( name="created_filter", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Dependency(skip_validation=True), annotation=BeforeAfter, ) annotations["created_filter"] = BeforeAfter if config.get("updated_at"): parameters["updated_filter"] = inspect.Parameter( name="updated_filter", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Dependency(skip_validation=True), annotation=BeforeAfter, ) annotations["updated_filter"] = BeforeAfter if config.get("search"): parameters["search_filter"] = inspect.Parameter( name="search_filter", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Dependency(skip_validation=True), annotation=SearchFilter, ) annotations["search_filter"] = SearchFilter if config.get("pagination_type") == "limit_offset": parameters["limit_offset"] = inspect.Parameter( name="limit_offset", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Dependency(skip_validation=True), annotation=LimitOffset, ) annotations["limit_offset"] = LimitOffset if config.get("sort_field"): parameters["order_by"] = inspect.Parameter( name="order_by", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Dependency(skip_validation=True), annotation=OrderBy, ) annotations["order_by"] = OrderBy def provide_filters(**kwargs: FilterTypes) -> list[FilterTypes]: """Provide filter dependencies based on configuration. Args: **kwargs: Filter parameters dynamically provided based on configuration. Returns: list[FilterTypes]: List of configured filters. """ filters: list[FilterTypes] = [] if id_filter := kwargs.get("id_filter"): filters.append(id_filter) if created_filter := kwargs.get("created_filter"): filters.append(created_filter) if limit_offset := kwargs.get("limit_offset"): filters.append(limit_offset) if updated_filter := kwargs.get("updated_filter"): filters.append(updated_filter) if ( (search_filter := cast("Optional[SearchFilter]", kwargs.get("search_filter"))) and search_filter is not None # pyright: ignore[reportUnnecessaryComparison] and search_filter.field_name is not None # pyright: ignore[reportUnnecessaryComparison] and search_filter.value is not None # pyright: ignore[reportUnnecessaryComparison] ): filters.append(search_filter) if ( (order_by := cast("Optional[OrderBy]", kwargs.get("order_by"))) and order_by is not None # pyright: ignore[reportUnnecessaryComparison] and order_by.field_name is not None # pyright: ignore[reportUnnecessaryComparison] ): filters.append(order_by) return filters # Set both signature and annotations provide_filters.__signature__ = inspect.Signature( # type: ignore parameters=list(parameters.values()), return_annotation=list[FilterTypes], ) provide_filters.__annotations__ = annotations provide_filters.__annotations__["return"] = list[FilterTypes] return provide_filters python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/sanic/000077500000000000000000000000001476663714600250045ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/sanic/__init__.py000066400000000000000000000015411476663714600271160ustar00rootroot00000000000000from advanced_alchemy import base, exceptions, filters, mixins, operations, repository, service, types, utils from advanced_alchemy.alembic.commands import AlembicCommands from advanced_alchemy.config import ( AlembicAsyncConfig, AlembicSyncConfig, AsyncSessionConfig, SyncSessionConfig, ) from advanced_alchemy.extensions.sanic.config import EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig from advanced_alchemy.extensions.sanic.extension import AdvancedAlchemy __all__ = ( "AdvancedAlchemy", "AlembicAsyncConfig", "AlembicCommands", "AlembicSyncConfig", "AsyncSessionConfig", "EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", "SyncSessionConfig", "base", "exceptions", "filters", "mixins", "operations", "repository", "service", "types", "utils", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/sanic/config.py000066400000000000000000000550421476663714600266310ustar00rootroot00000000000000"""Configuration classes for Sanic integration. This module provides configuration classes for integrating SQLAlchemy with Sanic applications, including both synchronous and asynchronous database configurations. """ import asyncio import contextlib from dataclasses import dataclass, field from typing import Any, Callable, Optional, cast from click import echo from sanic import HTTPResponse, Request, Sanic from sqlalchemy.exc import OperationalError from advanced_alchemy.exceptions import ImproperConfigurationError try: from sanic_ext import Extend SANIC_INSTALLED = True except ModuleNotFoundError: # pragma: no cover SANIC_INSTALLED = False # pyright: ignore[reportConstantRedefinition] Extend = type("Extend", (), {}) # type: ignore # noqa: PGH003 from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker from sqlalchemy.orm import Session, sessionmaker from typing_extensions import Literal from advanced_alchemy._serialization import decode_json, encode_json from advanced_alchemy.base import metadata_registry from advanced_alchemy.config import EngineConfig as _EngineConfig from advanced_alchemy.config.asyncio import SQLAlchemyAsyncConfig as _SQLAlchemyAsyncConfig from advanced_alchemy.config.sync import SQLAlchemySyncConfig as _SQLAlchemySyncConfig from advanced_alchemy.service import schema_dump def _make_unique_context_key(app: "Sanic[Any, Any]", key: str) -> str: # pragma: no cover """Generates a unique context key for the Sanic application. Ensures that the key does not already exist in the application's state. Args: app (sanic.Sanic): The Sanic application instance. key (str): The base key name. Returns: str: A unique key name. """ i = 0 while True: if not hasattr(app.ctx, key): return key key = f"{key}_{i}" i += i def serializer(value: Any) -> str: """Serialize JSON field values. Args: value: Any JSON serializable value. Returns: str: JSON string representation of the value. """ return encode_json(schema_dump(value)) @dataclass class EngineConfig(_EngineConfig): """Configuration for SQLAlchemy's Engine. This class extends the base EngineConfig with Sanic-specific JSON serialization options. For details see: https://docs.sqlalchemy.org/en/20/core/engines.html Attributes: json_deserializer: Callable for converting JSON strings to Python objects. json_serializer: Callable for converting Python objects to JSON strings. """ json_deserializer: Callable[[str], Any] = decode_json """For dialects that support the :class:`~sqlalchemy.types.JSON` datatype, this is a Python callable that will convert a JSON string to a Python object. But default, this uses the built-in serializers.""" json_serializer: Callable[[Any], str] = serializer """For dialects that support the JSON datatype, this is a Python callable that will render a given object as JSON. By default, By default, the built-in serializer is used.""" @dataclass class SQLAlchemyAsyncConfig(_SQLAlchemyAsyncConfig): """SQLAlchemy Async config for Sanic.""" _app: "Optional[Sanic[Any, Any]]" = None """The Sanic application instance.""" commit_mode: Literal["manual", "autocommit", "autocommit_include_redirect"] = "manual" """The commit mode to use for database sessions.""" engine_key: str = "db_engine" """Key to use for the dependency injection of database engines.""" session_key: str = "db_session" """Key to use for the dependency injection of database sessions.""" session_maker_key: str = "session_maker_class" """Key under which to store the SQLAlchemy :class:`sessionmaker ` in the application state instance. """ engine_config: EngineConfig = field(default_factory=EngineConfig) # pyright: ignore[reportIncompatibleVariableOverride] """Configuration for the SQLAlchemy engine. The configuration options are documented in the SQLAlchemy documentation. """ async def create_all_metadata(self) -> None: # pragma: no cover """Create all metadata tables in the database.""" if self.engine_instance is None: self.engine_instance = self.get_engine() async with self.engine_instance.begin() as conn: try: await conn.run_sync( metadata_registry.get(None if self.bind_key == "default" else self.bind_key).create_all ) await conn.commit() except OperationalError as exc: echo(f" * Could not create target metadata. Reason: {exc}") else: echo(" * Created target metadata.") @property def app(self) -> "Sanic[Any, Any]": """The Sanic application instance.""" if self._app is None: msg = "The Sanic application instance is not set." raise ImproperConfigurationError(msg) return self._app def init_app(self, app: "Sanic[Any, Any]", bootstrap: "Extend") -> None: # pyright: ignore[reportUnknownParameterType,reportInvalidTypeForm] """Initialize the Sanic application with this configuration. Args: app: The Sanic application instance. bootstrap: The Sanic extension bootstrap. """ self._app = app self.bind_key = self.bind_key or "default" _ = self.create_session_maker() self.session_key = _make_unique_context_key(app, f"advanced_alchemy_async_session_{self.session_key}") self.engine_key = _make_unique_context_key(app, f"advanced_alchemy_async_engine_{self.engine_key}") self.session_maker_key = _make_unique_context_key( app, f"advanced_alchemy_async_session_maker_{self.session_maker_key}" ) self.startup(bootstrap) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType] def startup(self, bootstrap: "Extend") -> None: # pyright: ignore[reportUnknownParameterType,reportInvalidTypeForm] """Initialize the Sanic application with this configuration. Args: bootstrap: The Sanic extension bootstrap. """ @self.app.before_server_start # pyright: ignore[reportUnknownMemberType] async def on_startup(_: Any) -> None: # pyright: ignore[reportUnusedFunction] setattr(self.app.ctx, self.engine_key, self.get_engine()) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] setattr(self.app.ctx, self.session_maker_key, self.create_session_maker()) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] bootstrap.add_dependency( # pyright: ignore[reportUnknownMemberType] AsyncEngine, self.get_engine_from_request, ) bootstrap.add_dependency( # pyright: ignore[reportUnknownMemberType] async_sessionmaker[AsyncSession], self.get_sessionmaker_from_request, ) bootstrap.add_dependency( # pyright: ignore[reportUnknownMemberType] AsyncSession, self.get_session_from_request, ) await self.on_startup() @self.app.after_server_stop # pyright: ignore[reportUnknownMemberType] async def on_shutdown(_: Any) -> None: # pyright: ignore[reportUnusedFunction] if self.engine_instance is not None: await self.engine_instance.dispose() if hasattr(self.app.ctx, self.engine_key): # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] delattr(self.app.ctx, self.engine_key) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] if hasattr(self.app.ctx, self.session_maker_key): # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] delattr(self.app.ctx, self.session_maker_key) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] @self.app.middleware("request") # pyright: ignore[reportUnknownMemberType] async def on_request(request: Request) -> None: # pyright: ignore[reportUnusedFunction] session = cast("Optional[AsyncSession]", getattr(request.ctx, self.session_key, None)) if session is None: setattr(request.ctx, self.session_key, self.get_session()) @self.app.middleware("response") # type: ignore[arg-type] async def on_response(request: Request, response: HTTPResponse) -> None: # pyright: ignore[reportUnusedFunction] session = cast("Optional[AsyncSession]", getattr(request.ctx, self.session_key, None)) if session is not None: await self.session_handler(session=session, request=request, response=response) async def on_startup(self) -> None: """Initialize the Sanic application with this configuration.""" if self.create_all: await self.create_all_metadata() def create_session_maker(self) -> Callable[[], "AsyncSession"]: """Get a session maker. If none exists yet, create one. Returns: Callable[[], Session]: Session factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if self.engine_instance is None: self.engine_instance = self.get_engine() if session_kws.get("bind") is None: session_kws["bind"] = self.engine_instance self.session_maker = self.session_maker_class(**session_kws) return self.session_maker async def session_handler( self, session: "AsyncSession", request: "Request", response: "HTTPResponse" ) -> None: # pragma: no cover """Handles the session after a request is processed. Applies the commit strategy and ensures the session is closed. Args: session (sqlalchemy.ext.asyncio.AsyncSession): The database session. request (sanic.Request): The incoming HTTP request. response (sanic.HTTPResponse): The outgoing HTTP response. Returns: None """ try: if (self.commit_mode == "autocommit" and 200 <= response.status < 300) or ( # noqa: PLR2004 self.commit_mode == "autocommit_include_redirect" and 200 <= response.status < 400 # noqa: PLR2004 ): await session.commit() else: await session.rollback() finally: await session.close() with contextlib.suppress(AttributeError, KeyError): delattr(request.ctx, self.session_key) def get_engine_from_request(self, request: "Request") -> AsyncEngine: """Retrieve the engine from the request context. Args: request (sanic.Request): The incoming request. Returns: AsyncEngine: The SQLAlchemy engine. """ return cast("AsyncEngine", getattr(request.app.ctx, self.engine_key, self.get_engine())) # pragma: no cover def get_sessionmaker_from_request(self, request: "Request") -> async_sessionmaker[AsyncSession]: """Retrieve the session maker from the request context. Args: request (sanic.Request): The incoming request. Returns: SessionMakerT: The session maker. """ return cast( "async_sessionmaker[AsyncSession]", getattr(request.app.ctx, self.session_maker_key, None) ) # pragma: no cover def get_session_from_request(self, request: Request) -> AsyncSession: """Retrieve the session from the request context. Args: request (sanic.Request): The incoming request. Returns: SessionT: The session associated with the request. """ return cast("AsyncSession", getattr(request.ctx, self.session_key, None)) # pragma: no cover async def close_engine(self) -> None: # pragma: no cover """Close the engine.""" if self.engine_instance is not None: await self.engine_instance.dispose() async def on_shutdown(self) -> None: # pragma: no cover """Handles the shutdown event by disposing of the SQLAlchemy engine. Ensures that all connections are properly closed during application shutdown. Returns: None """ await self.close_engine() if hasattr(self.app.ctx, self.engine_key): # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] delattr(self.app.ctx, self.engine_key) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] if hasattr(self.app.ctx, self.session_maker_key): # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] delattr(self.app.ctx, self.session_maker_key) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] @dataclass class SQLAlchemySyncConfig(_SQLAlchemySyncConfig): """SQLAlchemy Sync config for Starlette.""" _app: "Optional[Sanic[Any, Any]]" = None """The Sanic application instance.""" commit_mode: Literal["manual", "autocommit", "autocommit_include_redirect"] = "manual" """The commit mode to use for database sessions.""" engine_key: str = "db_engine" """Key to use for the dependency injection of database engines.""" session_key: str = "db_session" """Key to use for the dependency injection of database sessions.""" session_maker_key: str = "session_maker_class" """Key under which to store the SQLAlchemy :class:`sessionmaker ` in the application state instance. """ engine_config: EngineConfig = field(default_factory=EngineConfig) # pyright: ignore[reportIncompatibleVariableOverride] """Configuration for the SQLAlchemy engine. The configuration options are documented in the SQLAlchemy documentation. """ @property def app(self) -> "Sanic[Any, Any]": """The Sanic application instance.""" if self._app is None: msg = "The Sanic application instance is not set." raise ImproperConfigurationError(msg) return self._app async def create_all_metadata(self) -> None: # pragma: no cover """Create all metadata tables in the database.""" if self.engine_instance is None: self.engine_instance = self.get_engine() with self.engine_instance.begin() as conn: try: loop = asyncio.get_event_loop() await loop.run_in_executor( None, metadata_registry.get(None if self.bind_key == "default" else self.bind_key).create_all, conn ) except OperationalError as exc: echo(f" * Could not create target metadata. Reason: {exc}") def init_app(self, app: "Sanic[Any, Any]", bootstrap: "Extend") -> None: # pyright: ignore[reportUnknownParameterType,reportInvalidTypeForm] """Initialize the Sanic application with this configuration. Args: app: The Sanic application instance. bootstrap: The Sanic extension bootstrap. """ self._app = app self.bind_key = self.bind_key or "default" _ = self.create_session_maker() self.session_key = _make_unique_context_key(app, f"advanced_alchemy_sync_session_{self.session_key}") self.engine_key = _make_unique_context_key(app, f"advanced_alchemy_sync_engine_{self.engine_key}") self.session_maker_key = _make_unique_context_key( app, f"advanced_alchemy_sync_session_maker_{self.session_maker_key}" ) self.startup(bootstrap) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType] def startup(self, bootstrap: "Extend") -> None: # pyright: ignore[reportUnknownParameterType,reportInvalidTypeForm] """Initialize the Sanic application with this configuration. Args: bootstrap: The Sanic extension bootstrap. """ @self.app.before_server_start # pyright: ignore[reportUnknownMemberType] async def on_startup(_: Any) -> None: # pyright: ignore[reportUnusedFunction] setattr(self.app.ctx, self.engine_key, self.get_engine()) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] setattr(self.app.ctx, self.session_maker_key, self.create_session_maker()) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] bootstrap.add_dependency( # pyright: ignore[reportUnknownMemberType] AsyncEngine, self.get_engine_from_request, ) bootstrap.add_dependency( # pyright: ignore[reportUnknownMemberType] sessionmaker[Session], self.get_sessionmaker_from_request, ) bootstrap.add_dependency( # pyright: ignore[reportUnknownMemberType] AsyncSession, self.get_session_from_request, ) await self.on_startup() @self.app.after_server_stop # pyright: ignore[reportUnknownMemberType] async def on_shutdown(_: Any) -> None: # pyright: ignore[reportUnusedFunction] await self.on_shutdown() @self.app.middleware("request") # pyright: ignore[reportUnknownMemberType] async def on_request(request: Request) -> None: # pyright: ignore[reportUnusedFunction] session = cast("Optional[Session]", getattr(request.ctx, self.session_key, None)) if session is None: setattr(request.ctx, self.session_key, self.get_session()) @self.app.middleware("response") # type: ignore[arg-type] async def on_response(request: Request, response: HTTPResponse) -> None: # pyright: ignore[reportUnusedFunction] session = cast("Optional[Session]", getattr(request.ctx, self.session_key, None)) if session is not None: await self.session_handler(session=session, request=request, response=response) async def on_startup(self) -> None: """Initialize the Sanic application with this configuration.""" if self.create_all: await self.create_all_metadata() def create_session_maker(self) -> Callable[[], "Session"]: """Get a session maker. If none exists yet, create one. Returns: Callable[[], Session]: Session factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if self.engine_instance is None: self.engine_instance = self.get_engine() if session_kws.get("bind") is None: session_kws["bind"] = self.engine_instance self.session_maker = self.session_maker_class(**session_kws) return self.session_maker async def session_handler( self, session: "Session", request: "Request", response: "HTTPResponse" ) -> None: # pragma: no cover """Handles the session after a request is processed. Applies the commit strategy and ensures the session is closed. Args: session (sqlalchemy.orm.Session): The database session. request (sanic.Request): The incoming HTTP request. response (sanic.HTTPResponse): The outgoing HTTP response. Returns: None """ loop = asyncio.get_event_loop() try: if (self.commit_mode == "autocommit" and 200 <= response.status < 300) or ( # noqa: PLR2004 self.commit_mode == "autocommit_include_redirect" and 200 <= response.status < 400 # noqa: PLR2004 ): await loop.run_in_executor(None, session.commit) else: await loop.run_in_executor(None, session.rollback) finally: await loop.run_in_executor(None, session.close) with contextlib.suppress(AttributeError, KeyError): delattr(request.ctx, self.session_key) def get_engine_from_request(self, request: Request) -> "AsyncEngine": """Retrieve the engine from the request context. Args: request (sanic.Request): The incoming request. Returns: AsyncEngine: The SQLAlchemy engine. """ return cast("AsyncEngine", getattr(request.app.ctx, self.engine_key, self.get_engine())) # pragma: no cover def get_sessionmaker_from_request(self, request: Request) -> sessionmaker[Session]: """Retrieve the session maker from the request context. Args: request (sanic.Request): The incoming request. Returns: SessionMakerT: The session maker. """ return cast("sessionmaker[Session]", getattr(request.app.ctx, self.session_maker_key, None)) # pragma: no cover def get_session_from_request(self, request: Request) -> "Session": """Retrieve the session from the request context. Args: request (sanic.Request): The incoming request. Returns: SessionT: The session associated with the request. """ return cast("Session", getattr(request.ctx, self.session_key, None)) # pragma: no cover async def close_engine(self) -> None: # pragma: no cover """Close the engine.""" if self.engine_instance is not None: loop = asyncio.get_event_loop() await loop.run_in_executor(None, self.engine_instance.dispose) async def on_shutdown(self) -> None: # pragma: no cover """Handles the shutdown event by disposing of the SQLAlchemy engine. Ensures that all connections are properly closed during application shutdown. Returns: None """ await self.close_engine() if hasattr(self.app.ctx, self.engine_key): # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] delattr(self.app.ctx, self.engine_key) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] if hasattr(self.app.ctx, self.session_maker_key): # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] delattr(self.app.ctx, self.session_maker_key) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType,reportOptionalMemberAccess] python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/sanic/extension.py000066400000000000000000000256311476663714600274010ustar00rootroot00000000000000from collections.abc import AsyncGenerator, Generator, Sequence from contextlib import asynccontextmanager, contextmanager from typing import TYPE_CHECKING, Any, Callable, Optional, Union, cast, overload from sanic import Request, Sanic from advanced_alchemy.exceptions import ImproperConfigurationError, MissingDependencyError from advanced_alchemy.extensions.sanic.config import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig try: from sanic_ext import Extend from sanic_ext.extensions.base import Extension SANIC_INSTALLED = True except ModuleNotFoundError: # pragma: no cover SANIC_INSTALLED = False # pyright: ignore[reportConstantRedefinition] Extension = type("Extension", (), {}) # type: ignore # noqa: PGH003 Extend = type("Extend", (), {}) # type: ignore # noqa: PGH003 if TYPE_CHECKING: from sanic import Sanic from sqlalchemy import Engine from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession from sqlalchemy.orm import Session __all__ = ("AdvancedAlchemy",) class AdvancedAlchemy(Extension): # type: ignore[no-untyped-call] # pyright: ignore[reportGeneralTypeIssues,reportUntypedBaseClass] """Sanic extension for integrating Advanced Alchemy with SQLAlchemy. Args: config: One or more configurations for SQLAlchemy. app: The Sanic application instance. """ name = "AdvancedAlchemy" def __init__( self, *, sqlalchemy_config: Union[ "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", Sequence[Union["SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig"]], ], sanic_app: Optional["Sanic[Any, Any]"] = None, ) -> None: if not SANIC_INSTALLED: # pragma: no cover msg = "Could not locate either Sanic or Sanic Extensions. Both libraries must be installed to use Advanced Alchemy. Try: pip install sanic[ext]" raise MissingDependencyError(msg) self._config = sqlalchemy_config if isinstance(sqlalchemy_config, Sequence) else [sqlalchemy_config] self._mapped_configs: dict[str, Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]] = self.map_configs() self._app = sanic_app self._initialized = False if self._app is not None: self.register(self._app) def register(self, sanic_app: "Sanic[Any, Any]") -> None: """Initialize the extension with the given Sanic app.""" self._app = sanic_app Extend.register(self) # pyright: ignore[reportUnknownMemberType,reportAttributeAccessIssue] self._initialized = True @property def sanic_app(self) -> "Sanic[Any, Any]": """The Sanic app.""" if self._app is None: # pragma: no cover msg = "AdvancedAlchemy has not been initialized with a Sanic app." raise ImproperConfigurationError(msg) return self._app @property def sqlalchemy_config(self) -> Sequence[Union["SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig"]]: """Current Advanced Alchemy configuration.""" return self._config def startup(self, bootstrap: "Extend") -> None: # pyright: ignore[reportUnknownParameterType,reportInvalidTypeForm] """Advanced Alchemy Sanic extension startup hook. Args: bootstrap (sanic_ext.Extend): The Sanic extension bootstrap. """ for config in self.sqlalchemy_config: config.init_app(self.sanic_app, bootstrap) # pyright: ignore[reportUnknownMemberType,reportUnknownArgumentType] def map_configs(self) -> dict[str, Union["SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig"]]: """Maps the configs to the session bind keys.""" mapped_configs: dict[str, Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]] = {} for config in self.sqlalchemy_config: if config.bind_key is None: config.bind_key = "default" mapped_configs[config.bind_key] = config return mapped_configs def get_config(self, key: Optional[str] = None) -> Union["SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig"]: """Get the config for the given key.""" if key is None: key = "default" if key == "default" and len(self.sqlalchemy_config) == 1: key = self.sqlalchemy_config[0].bind_key or "default" config = self._mapped_configs.get(key) if config is None: # pragma: no cover msg = f"Config with key {key} not found" raise ImproperConfigurationError(msg) return config def get_async_config(self, key: Optional[str] = None) -> "SQLAlchemyAsyncConfig": """Get the async config for the given key.""" config = self.get_config(key) if not isinstance(config, SQLAlchemyAsyncConfig): # pragma: no cover msg = "Expected an async config, but got a sync config" raise ImproperConfigurationError(msg) return config def get_sync_config(self, key: Optional[str] = None) -> "SQLAlchemySyncConfig": """Get the sync config for the given key.""" config = self.get_config(key) if not isinstance(config, SQLAlchemySyncConfig): # pragma: no cover msg = "Expected a sync config, but got an async config" raise ImproperConfigurationError(msg) return config @asynccontextmanager async def with_async_session( self, key: Optional[str] = None ) -> AsyncGenerator["AsyncSession", None]: # pragma: no cover """Context manager for getting an async session.""" config = self.get_async_config(key) async with config.get_session() as session: yield session @contextmanager def with_sync_session(self, key: Optional[str] = None) -> Generator["Session", None]: # pragma: no cover """Context manager for getting a sync session.""" config = self.get_sync_config(key) with config.get_session() as session: yield session @overload @staticmethod def _get_session_from_request(request: "Request", config: "SQLAlchemyAsyncConfig") -> "AsyncSession": ... @overload @staticmethod def _get_session_from_request(request: "Request", config: "SQLAlchemySyncConfig") -> "Session": ... @staticmethod def _get_session_from_request( request: "Request", config: Union["SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig"], # pragma: no cover ) -> Union["Session", "AsyncSession"]: # pragma: no cover """Get the session for the given key.""" session = getattr(request.ctx, config.session_key, None) if session is None: setattr(request.ctx, config.session_key, config.get_session()) return cast("Union[Session, AsyncSession]", session) def get_session( self, request: "Request", key: Optional[str] = None ) -> Union["Session", "AsyncSession"]: # pragma: no cover """Get the session for the given key.""" config = self.get_config(key) return self._get_session_from_request(request, config) def get_async_session(self, request: "Request", key: Optional[str] = None) -> "AsyncSession": # pragma: no cover """Get the async session for the given key.""" config = self.get_async_config(key) return self._get_session_from_request(request, config) def get_sync_session(self, request: "Request", key: Optional[str] = None) -> "Session": # pragma: no cover """Get the sync session for the given key.""" config = self.get_sync_config(key) return self._get_session_from_request(request, config) def provide_session( self, key: Optional[str] = None ) -> Callable[["Request"], Union["Session", "AsyncSession"]]: # pragma: no cover """Get the session for the given key.""" config = self.get_config(key) def _get_session(request: "Request") -> Union["Session", "AsyncSession"]: return self._get_session_from_request(request, config) return _get_session def provide_async_session( self, key: Optional[str] = None ) -> Callable[["Request"], "AsyncSession"]: # pragma: no cover """Get the async session for the given key.""" config = self.get_async_config(key) def _get_session(request: Request) -> "AsyncSession": return self._get_session_from_request(request, config) return _get_session def provide_sync_session(self, key: Optional[str] = None) -> Callable[[Request], "Session"]: # pragma: no cover """Get the sync session for the given key.""" config = self.get_sync_config(key) def _get_session(request: Request) -> "Session": return self._get_session_from_request(request, config) return _get_session def get_engine(self, key: Optional[str] = None) -> Union["Engine", "AsyncEngine"]: # pragma: no cover """Get the engine for the given key.""" config = self.get_config(key) return config.get_engine() def get_async_engine(self, key: Optional[str] = None) -> "AsyncEngine": # pragma: no cover """Get the async engine for the given key.""" config = self.get_async_config(key) return config.get_engine() def get_sync_engine(self, key: Optional[str] = None) -> "Engine": # pragma: no cover """Get the sync engine for the given key.""" config = self.get_sync_config(key) return config.get_engine() def provide_engine( self, key: Optional[str] = None ) -> Callable[[], Union["Engine", "AsyncEngine"]]: # pragma: no cover """Get the engine for the given key.""" config = self.get_config(key) def _get_engine() -> Union["Engine", "AsyncEngine"]: return config.get_engine() return _get_engine def provide_async_engine(self, key: Optional[str] = None) -> Callable[[], "AsyncEngine"]: # pragma: no cover """Get the async engine for the given key.""" config = self.get_async_config(key) def _get_engine() -> "AsyncEngine": return config.get_engine() return _get_engine def provide_sync_engine(self, key: Optional[str] = None) -> Callable[[], "Engine"]: # pragma: no cover """Get the sync engine for the given key.""" config = self.get_sync_config(key) def _get_engine() -> "Engine": return config.get_engine() return _get_engine def add_session_dependency( self, session_type: type[Union["Session", "AsyncSession"]], key: Optional[str] = None ) -> None: """Add a session dependency to the Sanic app.""" self.sanic_app.ext.add_dependency(session_type, self.provide_session(key)) # pyright: ignore[reportUnknownMemberType] def add_engine_dependency( self, engine_type: type[Union["Engine", "AsyncEngine"]], key: Optional[str] = None ) -> None: """Add an engine dependency to the Sanic app.""" self.sanic_app.ext.add_dependency(engine_type, self.provide_engine(key)) # pyright: ignore[reportUnknownMemberType] python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/starlette/000077500000000000000000000000001476663714600257165ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/starlette/__init__.py000066400000000000000000000017741476663714600300400ustar00rootroot00000000000000"""Starlette extension for Advanced Alchemy. This module provides Starlette integration for Advanced Alchemy, including session management and service utilities. """ from advanced_alchemy import base, exceptions, filters, mixins, operations, repository, service, types, utils from advanced_alchemy.alembic.commands import AlembicCommands from advanced_alchemy.config import AlembicAsyncConfig, AlembicSyncConfig, AsyncSessionConfig, SyncSessionConfig from advanced_alchemy.extensions.starlette.config import EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig from advanced_alchemy.extensions.starlette.extension import AdvancedAlchemy __all__ = ( "AdvancedAlchemy", "AlembicAsyncConfig", "AlembicCommands", "AlembicSyncConfig", "AsyncSessionConfig", "EngineConfig", "SQLAlchemyAsyncConfig", "SQLAlchemySyncConfig", "SyncSessionConfig", "base", "exceptions", "filters", "mixins", "operations", "repository", "service", "types", "utils", ) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/starlette/config.py000066400000000000000000000361131476663714600275410ustar00rootroot00000000000000"""Configuration classes for Starlette integration. This module provides configuration classes for integrating SQLAlchemy with Starlette applications, including both synchronous and asynchronous database configurations. """ import contextlib from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, Callable, Optional, cast from click import echo from sqlalchemy.exc import OperationalError from starlette.concurrency import run_in_threadpool from starlette.middleware.base import BaseHTTPMiddleware from typing_extensions import Literal from advanced_alchemy._serialization import decode_json, encode_json from advanced_alchemy.base import metadata_registry from advanced_alchemy.config import EngineConfig as _EngineConfig from advanced_alchemy.config.asyncio import SQLAlchemyAsyncConfig as _SQLAlchemyAsyncConfig from advanced_alchemy.config.sync import SQLAlchemySyncConfig as _SQLAlchemySyncConfig from advanced_alchemy.service import schema_dump if TYPE_CHECKING: from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session from starlette.applications import Starlette from starlette.middleware.base import RequestResponseEndpoint from starlette.requests import Request from starlette.responses import Response def _make_unique_state_key(app: "Starlette", key: str) -> str: # pragma: no cover """Generates a unique state key for the Starlette application. Ensures that the key does not already exist in the application's state. Args: app (starlette.applications.Starlette): The Starlette application instance. key (str): The base key name. Returns: str: A unique key name. """ i = 0 while True: if not hasattr(app.state, key): return key key = f"{key}_{i}" i += i def serializer(value: Any) -> str: """Serialize JSON field values. Args: value: Any JSON serializable value. Returns: str: JSON string representation of the value. """ return encode_json(schema_dump(value)) @dataclass class EngineConfig(_EngineConfig): """Configuration for SQLAlchemy's Engine. This class extends the base EngineConfig with Starlette-specific JSON serialization options. For details see: https://docs.sqlalchemy.org/en/20/core/engines.html Attributes: json_deserializer: Callable for converting JSON strings to Python objects. json_serializer: Callable for converting Python objects to JSON strings. """ json_deserializer: Callable[[str], Any] = decode_json """For dialects that support the :class:`~sqlalchemy.types.JSON` datatype, this is a Python callable that will convert a JSON string to a Python object. But default, this uses the built-in serializers.""" json_serializer: Callable[[Any], str] = serializer """For dialects that support the JSON datatype, this is a Python callable that will render a given object as JSON. By default, By default, the built-in serializer is used.""" @dataclass class SQLAlchemyAsyncConfig(_SQLAlchemyAsyncConfig): """SQLAlchemy Async config for Starlette.""" app: "Optional[Starlette]" = None """The Starlette application instance.""" commit_mode: Literal["manual", "autocommit", "autocommit_include_redirect"] = "manual" """The commit mode to use for database sessions.""" engine_key: str = "db_engine" """Key to use for the dependency injection of database engines.""" session_key: str = "db_session" """Key to use for the dependency injection of database sessions.""" session_maker_key: str = "session_maker_class" """Key under which to store the SQLAlchemy :class:`sessionmaker ` in the application state instance. """ engine_config: EngineConfig = field(default_factory=EngineConfig) # pyright: ignore[reportIncompatibleVariableOverride] """Configuration for the SQLAlchemy engine. The configuration options are documented in the SQLAlchemy documentation. """ async def create_all_metadata(self) -> None: # pragma: no cover """Create all metadata tables in the database.""" if self.engine_instance is None: self.engine_instance = self.get_engine() async with self.engine_instance.begin() as conn: try: await conn.run_sync( metadata_registry.get(None if self.bind_key == "default" else self.bind_key).create_all ) await conn.commit() except OperationalError as exc: echo(f" * Could not create target metadata. Reason: {exc}") else: echo(" * Created target metadata.") def init_app(self, app: "Starlette") -> None: """Initialize the Starlette application with this configuration. Args: app: The Starlette application instance. """ self.app = app self.bind_key = self.bind_key or "default" _ = self.create_session_maker() self.session_key = _make_unique_state_key(app, f"advanced_alchemy_async_session_{self.session_key}") self.engine_key = _make_unique_state_key(app, f"advanced_alchemy_async_engine_{self.engine_key}") self.session_maker_key = _make_unique_state_key( app, f"advanced_alchemy_async_session_maker_{self.session_maker_key}" ) app.add_middleware(BaseHTTPMiddleware, dispatch=self.middleware_dispatch) async def on_startup(self) -> None: """Initialize the Starlette application with this configuration.""" if self.create_all: await self.create_all_metadata() def create_session_maker(self) -> Callable[[], "AsyncSession"]: """Get a session maker. If none exists yet, create one. Returns: Callable[[], Session]: Session factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if self.engine_instance is None: self.engine_instance = self.get_engine() if session_kws.get("bind") is None: session_kws["bind"] = self.engine_instance self.session_maker = self.session_maker_class(**session_kws) return self.session_maker async def session_handler( self, session: "AsyncSession", request: "Request", response: "Response" ) -> None: # pragma: no cover """Handles the session after a request is processed. Applies the commit strategy and ensures the session is closed. Args: session (sqlalchemy.ext.asyncio.AsyncSession): The database session. request (starlette.requests.Request): The incoming HTTP request. response (starlette.responses.Response): The outgoing HTTP response. Returns: None """ try: if (self.commit_mode == "autocommit" and 200 <= response.status_code < 300) or ( # noqa: PLR2004 self.commit_mode == "autocommit_include_redirect" and 200 <= response.status_code < 400 # noqa: PLR2004 ): await session.commit() else: await session.rollback() finally: await session.close() with contextlib.suppress(AttributeError, KeyError): delattr(request.state, self.session_key) async def middleware_dispatch( self, request: "Request", call_next: "RequestResponseEndpoint" ) -> "Response": # pragma: no cover """Middleware dispatch function to handle requests and responses. Processes the request, invokes the next middleware or route handler, and applies the session handler after the response is generated. Args: request (starlette.requests.Request): The incoming HTTP request. call_next (starlette.middleware.base.RequestResponseEndpoint): The next middleware or route handler. Returns: starlette.responses.Response: The HTTP response. """ response = await call_next(request) session = cast("Optional[AsyncSession]", getattr(request.state, self.session_key, None)) if session is not None: await self.session_handler(session=session, request=request, response=response) return response async def close_engine(self) -> None: # pragma: no cover """Close the engine.""" if self.engine_instance is not None: await self.engine_instance.dispose() async def on_shutdown(self) -> None: # pragma: no cover """Handles the shutdown event by disposing of the SQLAlchemy engine. Ensures that all connections are properly closed during application shutdown. Returns: None """ await self.close_engine() if self.app is not None: with contextlib.suppress(AttributeError, KeyError): delattr(self.app.state, self.engine_key) delattr(self.app.state, self.session_maker_key) delattr(self.app.state, self.session_key) @dataclass class SQLAlchemySyncConfig(_SQLAlchemySyncConfig): """SQLAlchemy Sync config for Starlette.""" app: "Optional[Starlette]" = None """The Starlette application instance.""" commit_mode: Literal["manual", "autocommit", "autocommit_include_redirect"] = "manual" """The commit mode to use for database sessions.""" engine_key: str = "db_engine" """Key to use for the dependency injection of database engines.""" session_key: str = "db_session" """Key to use for the dependency injection of database sessions.""" session_maker_key: str = "session_maker_class" """Key under which to store the SQLAlchemy :class:`sessionmaker ` in the application state instance. """ engine_config: EngineConfig = field(default_factory=EngineConfig) # pyright: ignore[reportIncompatibleVariableOverride] """Configuration for the SQLAlchemy engine. The configuration options are documented in the SQLAlchemy documentation. """ async def create_all_metadata(self) -> None: # pragma: no cover """Create all metadata tables in the database.""" if self.engine_instance is None: self.engine_instance = self.get_engine() with self.engine_instance.begin() as conn: try: await run_in_threadpool( metadata_registry.get(None if self.bind_key == "default" else self.bind_key).create_all, conn ) except OperationalError as exc: echo(f" * Could not create target metadata. Reason: {exc}") def init_app(self, app: "Starlette") -> None: """Initialize the Starlette application with this configuration. Args: app: The Starlette application instance. """ self.app = app self.bind_key = self.bind_key or "default" self.session_key = _make_unique_state_key(app, f"advanced_alchemy_sync_session_{self.session_key}") self.engine_key = _make_unique_state_key(app, f"advanced_alchemy_sync_engine_{self.engine_key}") self.session_maker_key = _make_unique_state_key( app, f"advanced_alchemy_sync_session_maker_{self.session_maker_key}" ) _ = self.create_session_maker() app.add_middleware(BaseHTTPMiddleware, dispatch=self.middleware_dispatch) async def on_startup(self) -> None: """Initialize the Starlette application with this configuration.""" if self.create_all: await self.create_all_metadata() def create_session_maker(self) -> Callable[[], "Session"]: """Get a session maker. If none exists yet, create one. Returns: Callable[[], Session]: Session factory used by the plugin. """ if self.session_maker: return self.session_maker session_kws = self.session_config_dict if self.engine_instance is None: self.engine_instance = self.get_engine() if session_kws.get("bind") is None: session_kws["bind"] = self.engine_instance self.session_maker = self.session_maker_class(**session_kws) return self.session_maker async def session_handler( self, session: "Session", request: "Request", response: "Response" ) -> None: # pragma: no cover """Handles the session after a request is processed. Applies the commit strategy and ensures the session is closed. Args: session (sqlalchemy.orm.Session | sqlalchemy.ext.asyncio.AsyncSession): The database session. request (starlette.requests.Request): The incoming HTTP request. response (starlette.responses.Response): The outgoing HTTP response. Returns: None """ try: if (self.commit_mode == "autocommit" and 200 <= response.status_code < 300) or ( # noqa: PLR2004 self.commit_mode == "autocommit_include_redirect" and 200 <= response.status_code < 400 # noqa: PLR2004 ): await run_in_threadpool(session.commit) else: await run_in_threadpool(session.rollback) finally: await run_in_threadpool(session.close) with contextlib.suppress(AttributeError, KeyError): delattr(request.state, self.session_key) async def middleware_dispatch( self, request: "Request", call_next: "RequestResponseEndpoint" ) -> "Response": # pragma: no cover """Middleware dispatch function to handle requests and responses. Processes the request, invokes the next middleware or route handler, and applies the session handler after the response is generated. Args: request (starlette.requests.Request): The incoming HTTP request. call_next (starlette.middleware.base.RequestResponseEndpoint): The next middleware or route handler. Returns: starlette.responses.Response: The HTTP response. """ response = await call_next(request) session = cast("Optional[Session]", getattr(request.state, self.session_key, None)) if session is not None: await self.session_handler(session=session, request=request, response=response) return response async def close_engine(self) -> None: # pragma: no cover """Close the engines.""" if self.engine_instance is not None: await run_in_threadpool(self.engine_instance.dispose) async def on_shutdown(self) -> None: # pragma: no cover """Handles the shutdown event by disposing of the SQLAlchemy engine. Ensures that all connections are properly closed during application shutdown. Returns: None """ await self.close_engine() if self.app is not None: with contextlib.suppress(AttributeError, KeyError): delattr(self.app.state, self.engine_key) delattr(self.app.state, self.session_maker_key) delattr(self.app.state, self.session_key) python-advanced-alchemy-1.0.1/advanced_alchemy/extensions/starlette/extension.py000066400000000000000000000266611476663714600303170ustar00rootroot00000000000000# ruff: noqa: ARG001 import contextlib from collections.abc import AsyncGenerator, Generator, Sequence from contextlib import asynccontextmanager, contextmanager from typing import ( TYPE_CHECKING, Any, Callable, Optional, Union, cast, overload, ) from starlette.requests import Request from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.extensions.starlette.config import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig if TYPE_CHECKING: from sqlalchemy import Engine from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession from sqlalchemy.orm import Session from starlette.applications import Starlette class AdvancedAlchemy: """AdvancedAlchemy integration for Starlette applications. This class manages SQLAlchemy sessions and engine lifecycle within a Starlette application. It provides middleware for handling transactions based on commit strategies. Args: config (advanced_alchemy.config.asyncio.SQLAlchemyAsyncConfig | advanced_alchemy.config.sync.SQLAlchemySyncConfig): The SQLAlchemy configuration. app (starlette.applications.Starlette | None): The Starlette application instance. Defaults to None. """ def __init__( self, config: Union[ SQLAlchemyAsyncConfig, SQLAlchemySyncConfig, Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]] ], app: Optional["Starlette"] = None, ) -> None: self._config = config if isinstance(config, Sequence) else [config] self._mapped_configs: dict[str, Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]] = self.map_configs() self._app = cast("Optional[Starlette]", None) if app is not None: self.init_app(app) @property def config(self) -> Sequence[Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]: """Current Advanced Alchemy configuration.""" return self._config def init_app(self, app: "Starlette") -> None: """Initializes the Starlette application with SQLAlchemy engine and sessionmaker. Sets up middleware and shutdown handlers for managing the database engine. Args: app (starlette.applications.Starlette): The Starlette application instance. """ self._app = app unique_bind_keys = {config.bind_key for config in self.config} if len(unique_bind_keys) != len(self.config): # pragma: no cover msg = "Please ensure that each config has a unique name for the `bind_key` attribute. The default is `default` and can only be bound to a single engine." raise ImproperConfigurationError(msg) for config in self.config: config.init_app(app) app.state.advanced_alchemy = self original_lifespan = app.router.lifespan_context @asynccontextmanager async def wrapped_lifespan(app: "Starlette") -> AsyncGenerator[Any, None]: # pragma: no cover async with self.lifespan(app), original_lifespan(app) as state: yield state app.router.lifespan_context = wrapped_lifespan @asynccontextmanager async def lifespan(self, app: "Starlette") -> AsyncGenerator[Any, None]: # pragma: no cover """Context manager for lifespan events. Args: app: The starlette application. Yields: None """ await self.on_startup() try: yield finally: await self.on_shutdown() @property def app(self) -> "Starlette": # pragma: no cover """Returns the Starlette application instance. Raises: advanced_alchemy.exceptions.ImproperConfigurationError: If the application is not initialized. Returns: starlette.applications.Starlette: The Starlette application instance. """ if self._app is None: # pragma: no cover msg = "Application not initialized. Did you forget to call init_app?" raise ImproperConfigurationError(msg) return self._app async def on_startup(self) -> None: # pragma: no cover """Initializes the database.""" for config in self.config: await config.on_startup() async def on_shutdown(self) -> None: # pragma: no cover """Handles the shutdown event by disposing of the SQLAlchemy engine. Ensures that all connections are properly closed during application shutdown. Returns: None """ for config in self.config: await config.on_shutdown() with contextlib.suppress(AttributeError, KeyError): delattr(self.app.state, "advanced_alchemy") def map_configs(self) -> dict[str, Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]]: """Maps the configs to the session bind keys.""" mapped_configs: dict[str, Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]] = {} for config in self.config: if config.bind_key is None: config.bind_key = "default" mapped_configs[config.bind_key] = config return mapped_configs def get_config(self, key: Optional[str] = None) -> Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig]: """Get the config for the given key.""" if key is None: key = "default" if key == "default" and len(self.config) == 1: key = self.config[0].bind_key or "default" config = self._mapped_configs.get(key) if config is None: # pragma: no cover msg = f"Config with key {key} not found" raise ImproperConfigurationError(msg) return config def get_async_config(self, key: Optional[str] = None) -> SQLAlchemyAsyncConfig: """Get the async config for the given key.""" config = self.get_config(key) if not isinstance(config, SQLAlchemyAsyncConfig): # pragma: no cover msg = "Expected an async config, but got a sync config" raise ImproperConfigurationError(msg) return config def get_sync_config(self, key: Optional[str] = None) -> SQLAlchemySyncConfig: """Get the sync config for the given key.""" config = self.get_config(key) if not isinstance(config, SQLAlchemySyncConfig): # pragma: no cover msg = "Expected a sync config, but got an async config" raise ImproperConfigurationError(msg) return config @asynccontextmanager async def with_async_session( self, key: Optional[str] = None ) -> AsyncGenerator["AsyncSession", None]: # pragma: no cover """Context manager for getting an async session.""" config = self.get_async_config(key) async with config.get_session() as session: yield session @contextmanager def with_sync_session(self, key: Optional[str] = None) -> Generator["Session", None]: # pragma: no cover """Context manager for getting a sync session.""" config = self.get_sync_config(key) with config.get_session() as session: yield session @overload @staticmethod def _get_session_from_request(request: Request, config: SQLAlchemyAsyncConfig) -> "AsyncSession": ... @overload @staticmethod def _get_session_from_request(request: Request, config: SQLAlchemySyncConfig) -> "Session": ... @staticmethod def _get_session_from_request( request: Request, config: Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig], # pragma: no cover ) -> Union["Session", "AsyncSession"]: # pragma: no cover """Get the session for the given key.""" session = getattr(request.state, config.session_key, None) if session is None: session = config.create_session_maker()() setattr(request.state, config.session_key, session) return session def get_session( self, request: Request, key: Optional[str] = None ) -> Union["Session", "AsyncSession"]: # pragma: no cover """Get the session for the given key.""" config = self.get_config(key) return self._get_session_from_request(request, config) def get_async_session(self, request: Request, key: Optional[str] = None) -> "AsyncSession": # pragma: no cover """Get the async session for the given key.""" config = self.get_async_config(key) return self._get_session_from_request(request, config) def get_sync_session(self, request: Request, key: Optional[str] = None) -> "Session": # pragma: no cover """Get the sync session for the given key.""" config = self.get_sync_config(key) return self._get_session_from_request(request, config) def provide_session( self, key: Optional[str] = None ) -> Callable[[Request], Union["Session", "AsyncSession"]]: # pragma: no cover """Get the session for the given key.""" config = self.get_config(key) def _get_session(request: Request) -> Union["Session", "AsyncSession"]: return self._get_session_from_request(request, config) return _get_session def provide_async_session( self, key: Optional[str] = None ) -> Callable[[Request], "AsyncSession"]: # pragma: no cover """Get the async session for the given key.""" config = self.get_async_config(key) def _get_session(request: Request) -> "AsyncSession": return self._get_session_from_request(request, config) return _get_session def provide_sync_session(self, key: Optional[str] = None) -> Callable[[Request], "Session"]: # pragma: no cover """Get the sync session for the given key.""" config = self.get_sync_config(key) def _get_session(request: Request) -> "Session": return self._get_session_from_request(request, config) return _get_session def get_engine(self, key: Optional[str] = None) -> Union["Engine", "AsyncEngine"]: # pragma: no cover """Get the engine for the given key.""" config = self.get_config(key) return config.get_engine() def get_async_engine(self, key: Optional[str] = None) -> "AsyncEngine": # pragma: no cover """Get the async engine for the given key.""" config = self.get_async_config(key) return config.get_engine() def get_sync_engine(self, key: Optional[str] = None) -> "Engine": # pragma: no cover """Get the sync engine for the given key.""" config = self.get_sync_config(key) return config.get_engine() def provide_engine( self, key: Optional[str] = None ) -> Callable[[], Union["Engine", "AsyncEngine"]]: # pragma: no cover """Get the engine for the given key.""" config = self.get_config(key) def _get_engine() -> Union["Engine", "AsyncEngine"]: return config.get_engine() return _get_engine def provide_async_engine(self, key: Optional[str] = None) -> Callable[[], "AsyncEngine"]: # pragma: no cover """Get the async engine for the given key.""" config = self.get_async_config(key) def _get_engine() -> "AsyncEngine": return config.get_engine() return _get_engine def provide_sync_engine(self, key: Optional[str] = None) -> Callable[[], "Engine"]: # pragma: no cover """Get the sync engine for the given key.""" config = self.get_sync_config(key) def _get_engine() -> "Engine": return config.get_engine() return _get_engine python-advanced-alchemy-1.0.1/advanced_alchemy/filters.py000066400000000000000000000476511476663714600235470ustar00rootroot00000000000000"""SQLAlchemy filter constructs for advanced query operations. This module provides a comprehensive collection of filter datastructures designed to enhance SQLAlchemy query construction. It implements type-safe, reusable filter patterns for common database query operations. Features: Type-safe filter construction, datetime range filtering, collection-based filtering, pagination support, search operations, and customizable ordering. Example: Basic usage with a datetime filter:: import datetime from advanced_alchemy.filters import BeforeAfter filter = BeforeAfter( field_name="created_at", before=datetime.datetime.now(), after=datetime.datetime(2023, 1, 1), ) statement = filter.append_to_statement(select(Model), Model) Note: All filter classes implement the :class:`StatementFilter` ABC, ensuring consistent interface across different filter types. See Also: - :class:`sqlalchemy.sql.expression.Select`: Core SQLAlchemy select expression - :class:`sqlalchemy.orm.Query`: SQLAlchemy ORM query interface - :mod:`advanced_alchemy.base`: Base model definitions """ from abc import ABC, abstractmethod from dataclasses import dataclass from operator import attrgetter from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, Optional, Union, cast from sqlalchemy import BinaryExpression, ColumnElement, Delete, Select, Update, and_, any_, or_, text from sqlalchemy.orm import InstrumentedAttribute from typing_extensions import TypeAlias, TypeVar if TYPE_CHECKING: import datetime from collections import abc from sqlalchemy.sql.dml import ReturningDelete, ReturningUpdate from advanced_alchemy import base __all__ = ( "BeforeAfter", "CollectionFilter", "FilterTypes", "InAnyFilter", "LimitOffset", "NotInCollectionFilter", "NotInSearchFilter", "OnBeforeAfter", "OrderBy", "PaginationFilter", "SearchFilter", "StatementFilter", "StatementFilterT", "StatementTypeT", ) T = TypeVar("T") ModelT = TypeVar("ModelT", bound="base.ModelProtocol") StatementFilterT = TypeVar("StatementFilterT", bound="StatementFilter") StatementTypeT = TypeVar( "StatementTypeT", bound="Union[ReturningDelete[tuple[Any]], ReturningUpdate[tuple[Any]], Select[tuple[Any]], Select[Any], Update, Delete]", ) FilterTypes: TypeAlias = "Union[BeforeAfter, OnBeforeAfter, CollectionFilter[Any], LimitOffset, OrderBy, SearchFilter, NotInCollectionFilter[Any], NotInSearchFilter]" """Aggregate type alias of the types supported for collection filtering.""" class StatementFilter(ABC): """Abstract base class for SQLAlchemy statement filters. This class defines the interface for all filter types in the system. Each filter implementation must provide a method to append its filtering logic to an existing SQLAlchemy statement. """ @abstractmethod def append_to_statement( self, statement: StatementTypeT, model: type[ModelT], *args: Any, **kwargs: Any ) -> StatementTypeT: """Append filter conditions to a SQLAlchemy statement. Args: statement: The SQLAlchemy statement to modify model: The SQLAlchemy model class *args: Additional positional arguments **kwargs: Additional keyword arguments Returns: StatementTypeT: Modified SQLAlchemy statement with filter conditions applied Raises: NotImplementedError: If the concrete class doesn't implement this method Note: This method must be implemented by all concrete filter classes. See Also: :meth:`sqlalchemy.sql.expression.Select.where`: SQLAlchemy where clause """ return statement @staticmethod def _get_instrumented_attr(model: Any, key: "Union[str, InstrumentedAttribute[Any]]") -> InstrumentedAttribute[Any]: """Get SQLAlchemy instrumented attribute from model. Args: model: SQLAlchemy model class or instance key: Attribute name or instrumented attribute Returns: InstrumentedAttribute[Any]: SQLAlchemy instrumented attribute See Also: :class:`sqlalchemy.orm.attributes.InstrumentedAttribute`: SQLAlchemy attribute """ if isinstance(key, str): return cast("InstrumentedAttribute[Any]", getattr(model, key)) return key @dataclass class BeforeAfter(StatementFilter): """DateTime range filter with exclusive bounds. This filter creates date/time range conditions using < and > operators, excluding the boundary values. If either `before` or `after` is None, that boundary condition is not applied. See Also: --------- :class:`OnBeforeAfter` : Inclusive datetime range filtering """ field_name: str """Name of the model attribute to filter on.""" before: "Optional[datetime.datetime]" """Filter results where field is earlier than this value.""" after: "Optional[datetime.datetime]" """Filter results where field is later than this value.""" def append_to_statement(self, statement: StatementTypeT, model: type[ModelT]) -> StatementTypeT: """Apply datetime range conditions to statement. Parameters ---------- statement : StatementTypeT The SQLAlchemy statement to modify model : type[ModelT] The SQLAlchemy model class Returns: -------- StatementTypeT Modified statement with datetime range conditions """ field = self._get_instrumented_attr(model, self.field_name) if self.before is not None: statement = cast("StatementTypeT", statement.where(field < self.before)) if self.after is not None: statement = cast("StatementTypeT", statement.where(field > self.after)) return statement @dataclass class OnBeforeAfter(StatementFilter): """DateTime range filter with inclusive bounds. This filter creates date/time range conditions using <= and >= operators, including the boundary values. If either `on_or_before` or `on_or_after` is None, that boundary condition is not applied. See Also: --------- :class:`BeforeAfter` : Exclusive datetime range filtering """ field_name: str """Name of the model attribute to filter on.""" on_or_before: "Optional[datetime.datetime]" """Filter results where field is on or earlier than this value.""" on_or_after: "Optional[datetime.datetime]" """Filter results where field is on or later than this value.""" def append_to_statement(self, statement: StatementTypeT, model: type[ModelT]) -> StatementTypeT: """Apply inclusive datetime range conditions to statement. Parameters ---------- statement : StatementTypeT The SQLAlchemy statement to modify model : type[ModelT] The SQLAlchemy model class Returns: -------- StatementTypeT Modified statement with inclusive datetime range conditions """ field = self._get_instrumented_attr(model, self.field_name) if self.on_or_before is not None: statement = cast("StatementTypeT", statement.where(field <= self.on_or_before)) if self.on_or_after is not None: statement = cast("StatementTypeT", statement.where(field >= self.on_or_after)) return statement class InAnyFilter(StatementFilter, ABC): """Base class for filters using IN or ANY operators. This abstract class provides common functionality for filters that check membership in a collection using either the SQL IN operator or the ANY operator. """ @dataclass class CollectionFilter(InAnyFilter, Generic[T]): """Data required to construct a WHERE ... IN (...) clause. This filter restricts records based on a field's presence in a collection of values. The filter supports both ``IN`` and ``ANY`` operators for collection membership testing. Use ``prefer_any=True`` in ``append_to_statement`` to use the ``ANY`` operator. """ field_name: str """Name of the model attribute to filter on.""" values: "Union[abc.Collection[T], None]" """Values for the ``IN`` clause. If this is None, no filter is applied. An empty list will force an empty result set (WHERE 1=-1)""" def append_to_statement( self, statement: StatementTypeT, model: type[ModelT], prefer_any: bool = False, ) -> StatementTypeT: """Apply a WHERE ... IN or WHERE ... ANY (...) clause to the statement. Parameters ---------- statement : StatementTypeT The SQLAlchemy statement to modify model : type[ModelT] The SQLAlchemy model class prefer_any : bool, optional If True, uses the SQLAlchemy :func:`any_` operator instead of :func:`in_` for the filter condition Returns: -------- StatementTypeT Modified statement with the appropriate IN conditions """ field = self._get_instrumented_attr(model, self.field_name) if self.values is None: return statement if not self.values: # Return empty result set by forcing a false condition return cast("StatementTypeT", statement.where(text("1=-1"))) if prefer_any: return cast("StatementTypeT", statement.where(any_(self.values) == field)) # type: ignore[arg-type] return cast("StatementTypeT", statement.where(field.in_(self.values))) @dataclass class NotInCollectionFilter(InAnyFilter, Generic[T]): """Data required to construct a WHERE ... NOT IN (...) clause. This filter restricts records based on a field's absence in a collection of values. The filter supports both ``NOT IN`` and ``!= ANY`` operators for collection exclusion. Use ``prefer_any=True`` in ``append_to_statement`` to use the ``ANY`` operator. Parameters ---------- field_name : str Name of the model attribute to filter on values : abc.Collection[T] | None Values for the ``NOT IN`` clause. If this is None or empty, the filter is not applied. """ field_name: str """Name of the model attribute to filter on.""" values: "Union[abc.Collection[T], None]" """Values for the ``NOT IN`` clause. If None or empty, no filter is applied.""" def append_to_statement( self, statement: StatementTypeT, model: type[ModelT], prefer_any: bool = False, ) -> StatementTypeT: """Apply a WHERE ... NOT IN or WHERE ... != ANY(...) clause to the statement. Parameters ---------- statement : StatementTypeT The SQLAlchemy statement to modify model : type[ModelT] The SQLAlchemy model class prefer_any : bool, optional If True, uses the SQLAlchemy :func:`any_` operator instead of :func:`notin_` for the filter condition Returns: -------- StatementTypeT Modified statement with the appropriate NOT IN conditions """ field = self._get_instrumented_attr(model, self.field_name) if not self.values: # If None or empty, we do not modify the statement return statement if prefer_any: return cast("StatementTypeT", statement.where(any_(self.values) != field)) # type: ignore[arg-type] return cast("StatementTypeT", statement.where(field.notin_(self.values))) class PaginationFilter(StatementFilter, ABC): """Abstract base class for pagination filters. Subclasses should implement pagination logic, such as limit/offset or cursor-based pagination. """ @dataclass class LimitOffset(PaginationFilter): """Limit and offset pagination filter. Implements traditional pagination using SQL LIMIT and OFFSET clauses. Only applies to SELECT statements; other statement types are returned unmodified. Note: This filter only modifies SELECT statements. For other statement types (UPDATE, DELETE), the statement is returned unchanged. See Also: - :meth:`sqlalchemy.sql.expression.Select.limit`: SQLAlchemy LIMIT clause - :meth:`sqlalchemy.sql.expression.Select.offset`: SQLAlchemy OFFSET clause """ limit: int """Maximum number of rows to return.""" offset: int """Number of rows to skip before returning results.""" def append_to_statement(self, statement: StatementTypeT, model: type[ModelT]) -> StatementTypeT: """Apply LIMIT/OFFSET pagination to the statement. Args: statement: The SQLAlchemy statement to modify model: The SQLAlchemy model class Returns: StatementTypeT: Modified statement with limit and offset applied Note: Only modifies SELECT statements. Other statement types are returned as-is. See Also: :class:`sqlalchemy.sql.expression.Select`: SQLAlchemy SELECT statement """ if isinstance(statement, Select): return cast("StatementTypeT", statement.limit(self.limit).offset(self.offset)) return statement @dataclass class OrderBy(StatementFilter): """Order by a specific field. Appends an ORDER BY clause to SELECT statements, sorting records by the specified field in ascending or descending order. Note: This filter only modifies SELECT statements. For other statement types, the statement is returned unchanged. See Also: - :meth:`sqlalchemy.sql.expression.Select.order_by`: SQLAlchemy ORDER BY clause - :meth:`sqlalchemy.sql.expression.ColumnElement.asc`: Ascending order - :meth:`sqlalchemy.sql.expression.ColumnElement.desc`: Descending order """ field_name: str """Name of the model attribute to sort on.""" sort_order: Literal["asc", "desc"] = "asc" """Sort direction ("asc" or "desc").""" def append_to_statement(self, statement: StatementTypeT, model: type[ModelT]) -> StatementTypeT: """Append an ORDER BY clause to the statement. Args: statement: The SQLAlchemy statement to modify model: The SQLAlchemy model class Returns: StatementTypeT: Modified statement with an ORDER BY clause Note: Only modifies SELECT statements. Other statement types are returned as-is. See Also: :meth:`sqlalchemy.sql.expression.Select.order_by`: SQLAlchemy ORDER BY """ if not isinstance(statement, Select): return statement field = self._get_instrumented_attr(model, self.field_name) if self.sort_order == "desc": return cast("StatementTypeT", statement.order_by(field.desc())) return cast("StatementTypeT", statement.order_by(field.asc())) @dataclass class SearchFilter(StatementFilter): """Case-sensitive or case-insensitive substring matching filter. Implements text search using SQL LIKE or ILIKE operators. Can search across multiple fields using OR conditions. Note: The search pattern automatically adds wildcards before and after the search value, equivalent to SQL pattern '%value%'. See Also: - :class:`.NotInSearchFilter`: Opposite filter using NOT LIKE/ILIKE - :meth:`sqlalchemy.sql.expression.ColumnOperators.like`: Case-sensitive LIKE - :meth:`sqlalchemy.sql.expression.ColumnOperators.ilike`: Case-insensitive LIKE """ field_name: "Union[str, set[str]]" """Name or set of names of model attributes to search on.""" value: str """Text to match within the field(s).""" ignore_case: "Optional[bool]" = False """Whether to use case-insensitive matching.""" @property def _operator(self) -> "Callable[..., ColumnElement[bool]]": """Return the SQL operator for combining multiple search clauses. Returns: Callable[..., ColumnElement[bool]]: The `or_` operator for OR conditions See Also: :func:`sqlalchemy.sql.expression.or_`: SQLAlchemy OR operator """ return or_ @property def _func(self) -> "attrgetter[Callable[[str], BinaryExpression[bool]]]": """Return the appropriate LIKE or ILIKE operator as a function. Returns: attrgetter: Bound method for LIKE or ILIKE operations See Also: - :meth:`sqlalchemy.sql.expression.ColumnOperators.like`: LIKE operator - :meth:`sqlalchemy.sql.expression.ColumnOperators.ilike`: ILIKE operator """ return attrgetter("ilike" if self.ignore_case else "like") @property def normalized_field_names(self) -> set[str]: """Convert field_name to a set if it's a single string. Returns: set[str]: Set of field names to be searched """ return {self.field_name} if isinstance(self.field_name, str) else self.field_name def get_search_clauses(self, model: type[ModelT]) -> list["BinaryExpression[bool]"]: """Generate the LIKE/ILIKE clauses for all specified fields. Args: model: The SQLAlchemy model class Returns: list[BinaryExpression[bool]]: List of text matching expressions See Also: :class:`sqlalchemy.sql.expression.BinaryExpression`: SQLAlchemy expression """ search_clause: list[BinaryExpression[bool]] = [] for field_name in self.normalized_field_names: field = self._get_instrumented_attr(model, field_name) search_text = f"%{self.value}%" search_clause.append(self._func(field)(search_text)) return search_clause def append_to_statement(self, statement: StatementTypeT, model: type[ModelT]) -> StatementTypeT: """Append a LIKE/ILIKE clause to the statement. Args: statement: The SQLAlchemy statement to modify model: The SQLAlchemy model class Returns: StatementTypeT: Modified statement with text search clauses See Also: :meth:`sqlalchemy.sql.expression.Select.where`: SQLAlchemy WHERE clause """ where_clause = self._operator(*self.get_search_clauses(model)) return cast("StatementTypeT", statement.where(where_clause)) @dataclass class NotInSearchFilter(SearchFilter): """Filter for excluding records that match a substring. Implements negative text search using SQL NOT LIKE or NOT ILIKE operators. Can exclude across multiple fields using AND conditions. Args: field_name: Name or set of names of model attributes to search on value: Text to exclude from the field(s) ignore_case: If True, uses NOT ILIKE for case-insensitive matching Note: Uses AND for multiple fields, meaning records matching any field will be excluded. See Also: - :class:`.SearchFilter`: Opposite filter using LIKE/ILIKE - :meth:`sqlalchemy.sql.expression.ColumnOperators.notlike`: NOT LIKE operator - :meth:`sqlalchemy.sql.expression.ColumnOperators.notilike`: NOT ILIKE operator """ @property def _operator(self) -> Callable[..., ColumnElement[bool]]: """Return the SQL operator for combining multiple negated search clauses. Returns: Callable[..., ColumnElement[bool]]: The `and_` operator for AND conditions See Also: :func:`sqlalchemy.sql.expression.and_`: SQLAlchemy AND operator """ return and_ @property def _func(self) -> "attrgetter[Callable[[str], BinaryExpression[bool]]]": """Return the appropriate NOT LIKE or NOT ILIKE operator as a function. Returns: attrgetter: Bound method for NOT LIKE or NOT ILIKE operations See Also: - :meth:`sqlalchemy.sql.expression.ColumnOperators.notlike`: NOT LIKE - :meth:`sqlalchemy.sql.expression.ColumnOperators.notilike`: NOT ILIKE """ return attrgetter("not_ilike" if self.ignore_case else "not_like") python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/000077500000000000000000000000001476663714600230175ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/__init__.py000066400000000000000000000011761476663714600251350ustar00rootroot00000000000000from advanced_alchemy.mixins.audit import AuditColumns from advanced_alchemy.mixins.bigint import BigIntPrimaryKey from advanced_alchemy.mixins.nanoid import NanoIDPrimaryKey from advanced_alchemy.mixins.sentinel import SentinelMixin from advanced_alchemy.mixins.slug import SlugKey from advanced_alchemy.mixins.unique import UniqueMixin from advanced_alchemy.mixins.uuid import UUIDPrimaryKey, UUIDv6PrimaryKey, UUIDv7PrimaryKey __all__ = ( "AuditColumns", "BigIntPrimaryKey", "NanoIDPrimaryKey", "SentinelMixin", "SlugKey", "UUIDPrimaryKey", "UUIDv6PrimaryKey", "UUIDv7PrimaryKey", "UniqueMixin", ) python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/audit.py000066400000000000000000000017221476663714600245010ustar00rootroot00000000000000import datetime from sqlalchemy.orm import Mapped, declarative_mixin, mapped_column, validates from advanced_alchemy.types import DateTimeUTC @declarative_mixin class AuditColumns: """Created/Updated At Fields Mixin.""" created_at: Mapped[datetime.datetime] = mapped_column( DateTimeUTC(timezone=True), default=lambda: datetime.datetime.now(datetime.timezone.utc), ) """Date/time of instance creation.""" updated_at: Mapped[datetime.datetime] = mapped_column( DateTimeUTC(timezone=True), default=lambda: datetime.datetime.now(datetime.timezone.utc), onupdate=lambda: datetime.datetime.now(datetime.timezone.utc), ) """Date/time of instance last update.""" @validates("created_at", "updated_at") def validate_tz_info(self, _: str, value: datetime.datetime) -> datetime.datetime: if value.tzinfo is None: value = value.replace(tzinfo=datetime.timezone.utc) return value python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/bigint.py000066400000000000000000000010371476663714600246460ustar00rootroot00000000000000from sqlalchemy import Sequence from sqlalchemy.orm import Mapped, declarative_mixin, declared_attr, mapped_column from advanced_alchemy.types import BigIntIdentity @declarative_mixin class BigIntPrimaryKey: """BigInt Primary Key Field Mixin.""" @declared_attr def id(cls) -> Mapped[int]: """BigInt Primary key column.""" return mapped_column( BigIntIdentity, Sequence(f"{cls.__tablename__}_id_seq", optional=False), # type: ignore[attr-defined] primary_key=True, ) python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/nanoid.py000066400000000000000000000013031476663714600246360ustar00rootroot00000000000000from typing import TYPE_CHECKING from sqlalchemy.orm import Mapped, declarative_mixin, mapped_column from advanced_alchemy.mixins.sentinel import SentinelMixin from advanced_alchemy.types import NANOID_INSTALLED if NANOID_INSTALLED and not TYPE_CHECKING: from fastnanoid import ( # type: ignore[import-not-found,unused-ignore] # pyright: ignore[reportMissingImports] generate as nanoid, ) else: from uuid import uuid4 as nanoid # type: ignore[assignment,unused-ignore] @declarative_mixin class NanoIDPrimaryKey(SentinelMixin): """Nano ID Primary Key Field Mixin.""" id: Mapped[str] = mapped_column(default=nanoid, primary_key=True) """Nano ID Primary key column.""" python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/sentinel.py000066400000000000000000000004711476663714600252140ustar00rootroot00000000000000from sqlalchemy.orm import Mapped, declarative_mixin, declared_attr, orm_insert_sentinel @declarative_mixin class SentinelMixin: """Mixin to add a sentinel column for SQLAlchemy models.""" @declared_attr def _sentinel(cls) -> Mapped[int]: return orm_insert_sentinel(name="sa_orm_sentinel") python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/slug.py000066400000000000000000000026171476663714600243510ustar00rootroot00000000000000from typing import TYPE_CHECKING, Any from sqlalchemy import Index, String, UniqueConstraint from sqlalchemy.orm import Mapped, declarative_mixin, declared_attr, mapped_column if TYPE_CHECKING: from sqlalchemy.orm.decl_base import _TableArgsType as TableArgsType # pyright: ignore[reportPrivateUsage] @declarative_mixin class SlugKey: """Slug unique Field Model Mixin.""" @declared_attr def slug(cls) -> Mapped[str]: """Slug field.""" return mapped_column( String(length=100), nullable=False, ) @staticmethod def _create_unique_slug_index(*_: Any, **kwargs: Any) -> bool: return bool(kwargs["dialect"].name.startswith("spanner")) @staticmethod def _create_unique_slug_constraint(*_: Any, **kwargs: Any) -> bool: return not kwargs["dialect"].name.startswith("spanner") @declared_attr.directive @classmethod def __table_args__(cls) -> "TableArgsType": return ( UniqueConstraint( cls.slug, name=f"uq_{cls.__tablename__}_slug", # type: ignore[attr-defined] ).ddl_if(callable_=cls._create_unique_slug_constraint), Index( f"ix_{cls.__tablename__}_slug_unique", # type: ignore[attr-defined] cls.slug, unique=True, ).ddl_if(callable_=cls._create_unique_slug_index), ) python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/unique.py000066400000000000000000000131321476663714600246770ustar00rootroot00000000000000from contextlib import contextmanager from typing import TYPE_CHECKING, Any, Optional, Union from sqlalchemy import ColumnElement, select from sqlalchemy.orm import declarative_mixin from typing_extensions import Self from advanced_alchemy.exceptions import wrap_sqlalchemy_exception if TYPE_CHECKING: from collections.abc import Hashable, Iterator from sqlalchemy import Select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio.scoping import async_scoped_session from sqlalchemy.orm import Session from sqlalchemy.orm.scoping import scoped_session __all__ = ("UniqueMixin",) @declarative_mixin class UniqueMixin: """Mixin for instantiating objects while ensuring uniqueness on some field(s). This is a slightly modified implementation derived from https://github.com/sqlalchemy/sqlalchemy/wiki/UniqueObject """ @classmethod @contextmanager def _prevent_autoflush( cls, session: "Union[AsyncSession, async_scoped_session[AsyncSession], Session, scoped_session[Session]]", ) -> "Iterator[None]": with session.no_autoflush, wrap_sqlalchemy_exception(): yield @classmethod def _check_uniqueness( cls, cache: "Optional[dict[tuple[type[Self], Hashable], Self]]", session: "Union[AsyncSession, async_scoped_session[AsyncSession], Session, scoped_session[Session]]", key: "tuple[type[Self], Hashable]", *args: Any, **kwargs: Any, ) -> "tuple[dict[tuple[type[Self], Hashable], Self], Select[tuple[Self]], Optional[Self]]": if cache is None: cache = {} setattr(session, "_unique_cache", cache) statement = select(cls).where(cls.unique_filter(*args, **kwargs)).limit(2) return cache, statement, cache.get(key) @classmethod async def as_unique_async( cls, session: "Union[AsyncSession, async_scoped_session[AsyncSession]]", *args: Any, **kwargs: Any, ) -> Self: """Instantiate and return a unique object within the provided session based on the given arguments. If an object with the same unique identifier already exists in the session, it is returned from the cache. Args: session (AsyncSession | async_scoped_session[AsyncSession]): SQLAlchemy async session *args (Any): Values used to instantiate the instance if no duplicate exists **kwargs (Any): Values used to instantiate the instance if no duplicate exists Returns: Self: The unique object instance. """ key = cls, cls.unique_hash(*args, **kwargs) cache, statement, obj = cls._check_uniqueness( getattr(session, "_unique_cache", None), session, key, *args, **kwargs, ) if obj: return obj with cls._prevent_autoflush(session): if (obj := (await session.execute(statement)).scalar_one_or_none()) is None: session.add(obj := cls(*args, **kwargs)) cache[key] = obj return obj @classmethod def as_unique_sync( cls, session: "Union[Session, scoped_session[Session]]", *args: Any, **kwargs: Any, ) -> Self: """Instantiate and return a unique object within the provided session based on the given arguments. If an object with the same unique identifier already exists in the session, it is returned from the cache. Args: session (Session | scoped_session[Session]): SQLAlchemy sync session *args (Any): Values used to instantiate the instance if no duplicate exists **kwargs (Any): Values used to instantiate the instance if no duplicate exists Returns: Self: The unique object instance. """ key = cls, cls.unique_hash(*args, **kwargs) cache, statement, obj = cls._check_uniqueness( getattr(session, "_unique_cache", None), session, key, *args, **kwargs, ) if obj: return obj with cls._prevent_autoflush(session): if (obj := session.execute(statement).scalar_one_or_none()) is None: session.add(obj := cls(*args, **kwargs)) cache[key] = obj return obj @classmethod def unique_hash(cls, *args: Any, **kwargs: Any) -> "Hashable": """Generate a unique key based on the provided arguments. This method should be implemented in the subclass. Args: *args (Any): Values passed to the alternate classmethod constructors **kwargs (Any): Values passed to the alternate classmethod constructors Raises: NotImplementedError: If not implemented in the subclass. Returns: Hashable: Any hashable object. """ msg = "Implement this in subclass" raise NotImplementedError(msg) @classmethod def unique_filter(cls, *args: Any, **kwargs: Any) -> "ColumnElement[bool]": """Generate a filter condition for ensuring uniqueness. This method should be implemented in the subclass. Args: *args (Any): Values passed to the alternate classmethod constructors **kwargs (Any): Values passed to the alternate classmethod constructors Raises: NotImplementedError: If not implemented in the subclass. Returns: ColumnElement[bool]: Filter condition to establish the uniqueness. """ msg = "Implement this in subclass" raise NotImplementedError(msg) python-advanced-alchemy-1.0.1/advanced_alchemy/mixins/uuid.py000066400000000000000000000024011476663714600243340ustar00rootroot00000000000000from typing import TYPE_CHECKING from uuid import UUID, uuid4 from sqlalchemy.orm import Mapped, declarative_mixin, mapped_column from advanced_alchemy.mixins.sentinel import SentinelMixin from advanced_alchemy.types import UUID_UTILS_INSTALLED if UUID_UTILS_INSTALLED and not TYPE_CHECKING: from uuid_utils.compat import ( # type: ignore[no-redef,unused-ignore] # pyright: ignore[reportMissingImports] uuid4, uuid6, uuid7, ) else: from uuid import uuid4 # type: ignore[no-redef,unused-ignore] uuid6 = uuid4 # type: ignore[assignment, unused-ignore] uuid7 = uuid4 # type: ignore[assignment, unused-ignore] @declarative_mixin class UUIDPrimaryKey(SentinelMixin): """UUID Primary Key Field Mixin.""" id: Mapped[UUID] = mapped_column(default=uuid4, primary_key=True) """UUID Primary key column.""" @declarative_mixin class UUIDv6PrimaryKey(SentinelMixin): """UUID v6 Primary Key Field Mixin.""" id: Mapped[UUID] = mapped_column(default=uuid6, primary_key=True) """UUID Primary key column.""" @declarative_mixin class UUIDv7PrimaryKey(SentinelMixin): """UUID v7 Primary Key Field Mixin.""" id: Mapped[UUID] = mapped_column(default=uuid7, primary_key=True) """UUID Primary key column.""" python-advanced-alchemy-1.0.1/advanced_alchemy/operations.py000066400000000000000000000024221476663714600242450ustar00rootroot00000000000000"""Advanced database operations for SQLAlchemy. This module provides high-performance database operations that extend beyond basic CRUD functionality. It implements specialized database operations optimized for bulk data handling and schema management. The operations module is designed to work seamlessly with SQLAlchemy Core and ORM, providing efficient implementations for common database operations patterns. Features -------- - Table merging and upsert operations - Dynamic table creation from SELECT statements - Bulk data import/export operations - Optimized copy operations for PostgreSQL - Transaction-safe batch operations Todo: ----- - Implement merge operations with customizable conflict resolution - Add CTAS (Create Table As Select) functionality - Implement bulk copy operations (COPY TO/FROM) for PostgreSQL - Add support for temporary table operations - Implement materialized view operations Notes: ------ This module is designed to be database-agnostic where possible, with specialized optimizations for specific database backends where appropriate. See Also: --------- - :mod:`sqlalchemy.sql.expression` : SQLAlchemy Core expression language - :mod:`sqlalchemy.orm` : SQLAlchemy ORM functionality - :mod:`advanced_alchemy.extensions` : Additional database extensions """ python-advanced-alchemy-1.0.1/advanced_alchemy/py.typed000066400000000000000000000000001476663714600231750ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/repository/000077500000000000000000000000001476663714600237275ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/repository/__init__.py000066400000000000000000000030251476663714600260400ustar00rootroot00000000000000from advanced_alchemy.exceptions import ErrorMessages from advanced_alchemy.repository._async import ( SQLAlchemyAsyncQueryRepository, SQLAlchemyAsyncRepository, SQLAlchemyAsyncRepositoryProtocol, SQLAlchemyAsyncSlugRepository, SQLAlchemyAsyncSlugRepositoryProtocol, ) from advanced_alchemy.repository._sync import ( SQLAlchemySyncQueryRepository, SQLAlchemySyncRepository, SQLAlchemySyncRepositoryProtocol, SQLAlchemySyncSlugRepository, SQLAlchemySyncSlugRepositoryProtocol, ) from advanced_alchemy.repository._util import ( DEFAULT_ERROR_MESSAGE_TEMPLATES, FilterableRepository, FilterableRepositoryProtocol, LoadSpec, get_instrumented_attr, model_from_dict, ) from advanced_alchemy.repository.typing import ModelOrRowMappingT, ModelT, OrderingPair from advanced_alchemy.utils.dataclass import Empty, EmptyType __all__ = ( "DEFAULT_ERROR_MESSAGE_TEMPLATES", "Empty", "EmptyType", "ErrorMessages", "FilterableRepository", "FilterableRepositoryProtocol", "LoadSpec", "ModelOrRowMappingT", "ModelT", "OrderingPair", "SQLAlchemyAsyncQueryRepository", "SQLAlchemyAsyncRepository", "SQLAlchemyAsyncRepositoryProtocol", "SQLAlchemyAsyncSlugRepository", "SQLAlchemyAsyncSlugRepositoryProtocol", "SQLAlchemySyncQueryRepository", "SQLAlchemySyncRepository", "SQLAlchemySyncRepositoryProtocol", "SQLAlchemySyncSlugRepository", "SQLAlchemySyncSlugRepositoryProtocol", "get_instrumented_attr", "model_from_dict", ) python-advanced-alchemy-1.0.1/advanced_alchemy/repository/_async.py000066400000000000000000003062731476663714600255700ustar00rootroot00000000000000import random import string from collections.abc import Iterable, Sequence from typing import ( TYPE_CHECKING, Any, Final, Literal, Optional, Protocol, Union, cast, runtime_checkable, ) from sqlalchemy import ( Delete, Result, Row, Select, TextClause, Update, any_, delete, over, select, text, update, ) from sqlalchemy import func as sql_func from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio.scoping import async_scoped_session from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.orm.strategy_options import _AbstractLoad # pyright: ignore[reportPrivateUsage] from sqlalchemy.sql import ColumnElement from sqlalchemy.sql.dml import ReturningDelete, ReturningUpdate from advanced_alchemy.exceptions import ErrorMessages, NotFoundError, RepositoryError, wrap_sqlalchemy_exception from advanced_alchemy.filters import StatementFilter, StatementTypeT from advanced_alchemy.repository._util import ( DEFAULT_ERROR_MESSAGE_TEMPLATES, FilterableRepository, FilterableRepositoryProtocol, LoadSpec, get_abstract_loader_options, get_instrumented_attr, ) from advanced_alchemy.repository.typing import MISSING, ModelT, OrderingPair, T from advanced_alchemy.utils.dataclass import Empty, EmptyType from advanced_alchemy.utils.text import slugify if TYPE_CHECKING: from sqlalchemy.engine.interfaces import _CoreSingleExecuteParams # pyright: ignore[reportPrivateUsage] DEFAULT_INSERTMANYVALUES_MAX_PARAMETERS: Final = 950 POSTGRES_VERSION_SUPPORTING_MERGE: Final = 15 @runtime_checkable class SQLAlchemyAsyncRepositoryProtocol(FilterableRepositoryProtocol[ModelT], Protocol[ModelT]): """Base Protocol""" id_attribute: str match_fields: Optional[Union[list[str], str]] = None statement: Select[tuple[ModelT]] session: Union[AsyncSession, async_scoped_session[AsyncSession]] auto_expunge: bool auto_refresh: bool auto_commit: bool order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None error_messages: Optional[ErrorMessages] = None wrap_exceptions: bool = True def __init__( self, *, statement: Optional[Select[tuple[ModelT]]] = None, session: Union[AsyncSession, async_scoped_session[AsyncSession]], auto_expunge: bool = False, auto_refresh: bool = True, auto_commit: bool = False, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, wrap_exceptions: bool = True, **kwargs: Any, ) -> None: ... @classmethod def get_id_attribute_value( cls, item: Union[ModelT, type[ModelT]], id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, ) -> Any: ... @classmethod def set_id_attribute_value( cls, item_id: Any, item: ModelT, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, ) -> ModelT: ... @staticmethod def check_not_found(item_or_none: Optional[ModelT]) -> ModelT: ... async def add( self, data: ModelT, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> ModelT: ... async def add_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> Sequence[ModelT]: ... async def delete( self, item_id: Any, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> ModelT: ... async def delete_many( self, item_ids: list[Any], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, chunk_size: Optional[int] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> Sequence[ModelT]: ... async def delete_where( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, load: Optional[LoadSpec] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, execution_options: Optional[dict[str, Any]] = None, sanity_check: bool = True, **kwargs: Any, ) -> Sequence[ModelT]: ... async def exists( self, *filters: Union[StatementFilter, ColumnElement[bool]], load: Optional[LoadSpec] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> bool: ... async def get( self, item_id: Any, *, auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> ModelT: ... async def get_one( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> ModelT: ... async def get_one_or_none( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> Optional[ModelT]: ... async def get_or_upsert( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, upsert: bool = True, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: ... async def get_and_update( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: ... async def count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, load: Optional[LoadSpec] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> int: ... async def update( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> ModelT: ... async def update_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> list[ModelT]: ... def _get_update_many_statement( self, model_type: type[ModelT], supports_returning: bool, loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> Union[Update, ReturningUpdate[tuple[ModelT]]]: ... async def upsert( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_refresh: Optional[bool] = None, match_fields: Optional[Union[list[str], str]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> ModelT: ... async def upsert_many( self, data: list[ModelT], *, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, no_merge: bool = False, match_fields: Optional[Union[list[str], str]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> list[ModelT]: ... async def list_and_count( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, count_with_window_function: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: ... async def list( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, **kwargs: Any, ) -> list[ModelT]: ... @classmethod async def check_health(cls, session: Union[AsyncSession, async_scoped_session[AsyncSession]]) -> bool: ... @runtime_checkable class SQLAlchemyAsyncSlugRepositoryProtocol(SQLAlchemyAsyncRepositoryProtocol[ModelT], Protocol[ModelT]): """Protocol for SQLAlchemy repositories that support slug-based operations. Extends the base repository protocol to add slug-related functionality. Type Parameters: ModelT: The SQLAlchemy model type this repository handles. """ async def get_by_slug( self, slug: str, *, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> Optional[ModelT]: """Get a model instance by its slug. Args: slug: The slug value to search for. error_messages: Optional custom error message templates. load: Specification for eager loading of relationships. execution_options: Options for statement execution. **kwargs: Additional filtering criteria. Returns: ModelT | None: The found model instance or None if not found. """ ... async def get_available_slug( self, value_to_slugify: str, **kwargs: Any, ) -> str: """Generate a unique slug for a given value. Args: value_to_slugify: The string to convert to a slug. **kwargs: Additional parameters for slug generation. Returns: str: A unique slug derived from the input value. """ ... class SQLAlchemyAsyncRepository(SQLAlchemyAsyncRepositoryProtocol[ModelT], FilterableRepository[ModelT]): """Async SQLAlchemy repository implementation. Provides a complete implementation of async database operations using SQLAlchemy, including CRUD operations, filtering, and relationship loading. Type Parameters: ModelT: The SQLAlchemy model type this repository handles. .. seealso:: :class:`~advanced_alchemy.repository._util.FilterableRepository` """ id_attribute: str = "id" """Name of the unique identifier for the model.""" loader_options: Optional[LoadSpec] = None """Default loader options for the repository.""" error_messages: Optional[ErrorMessages] = None """Default error messages for the repository.""" wrap_exceptions: bool = True """Wrap SQLAlchemy exceptions in a ``RepositoryError``. When set to ``False``, the original exception will be raised.""" inherit_lazy_relationships: bool = True """Optionally ignore the default ``lazy`` configuration for model relationships. This is useful for when you want to replace instead of merge the model's loaded relationships with the ones specified in the ``load`` or ``default_loader_options`` configuration.""" merge_loader_options: bool = True """Merges the default loader options with the loader options specified in the ``load`` argument. This is useful for when you want to totally replace instead of merge the model's loaded relationships with the ones specified in the ``load`` or ``default_loader_options`` configuration.""" execution_options: Optional[dict[str, Any]] = None """Default execution options for the repository.""" match_fields: Optional[Union[list[str], str]] = None """List of dialects that prefer to use ``field.id = ANY(:1)`` instead of ``field.id IN (...)``.""" uniquify: bool = False """Optionally apply the ``unique()`` method to results before returning. This is useful for certain SQLAlchemy uses cases such as applying ``contains_eager`` to a query containing a one-to-many relationship """ count_with_window_function: bool = True """Use an analytical window function to count results. This allows the count to be performed in a single query. """ def __init__( self, *, statement: Optional[Select[tuple[ModelT]]] = None, session: Union[AsyncSession, async_scoped_session[AsyncSession]], auto_expunge: bool = False, auto_refresh: bool = True, auto_commit: bool = False, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, wrap_exceptions: bool = True, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, **kwargs: Any, ) -> None: """Repository for SQLAlchemy models. Args: statement: To facilitate customization of the underlying select query. session: Session managing the unit-of-work for the operation. auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. order_by: Set default order options for queries. load: Set default relationships to be loaded execution_options: Set default execution options error_messages: A set of custom error messages to use for operations wrap_exceptions: Wrap SQLAlchemy exceptions in a ``RepositoryError``. When set to ``False``, the original exception will be raised. uniquify: Optionally apply the ``unique()`` method to results before returning. count_with_window_function: When false, list and count will use two queries instead of an analytical window function. **kwargs: Additional arguments. """ self.auto_expunge = auto_expunge self.auto_refresh = auto_refresh self.auto_commit = auto_commit self.order_by = order_by self.session = session self.error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages ) self.wrap_exceptions = wrap_exceptions self.uniquify = self._get_uniquify(uniquify) self.count_with_window_function = ( count_with_window_function if count_with_window_function is not None else self.count_with_window_function ) self._default_loader_options, self._loader_options_have_wildcards = get_abstract_loader_options( loader_options=load if load is not None else self.loader_options, inherit_lazy_relationships=self.inherit_lazy_relationships, merge_with_default=self.merge_loader_options, ) execution_options = execution_options if execution_options is not None else self.execution_options self._default_execution_options = execution_options or {} self.statement = select(self.model_type) if statement is None else statement self._dialect = self.session.bind.dialect if self.session.bind is not None else self.session.get_bind().dialect self._prefer_any = any(self._dialect.name == engine_type for engine_type in self.prefer_any_dialects or ()) def _get_uniquify(self, uniquify: Optional[bool] = None) -> bool: return bool(uniquify) if uniquify is not None else self.uniquify @staticmethod def _get_error_messages( error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, default_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> Optional[ErrorMessages]: if error_messages == Empty: error_messages = None if default_messages == Empty: default_messages = None messages = DEFAULT_ERROR_MESSAGE_TEMPLATES if default_messages and isinstance(default_messages, dict): messages.update(default_messages) if error_messages: messages.update(cast("ErrorMessages", error_messages)) return messages @classmethod def get_id_attribute_value( cls, item: Union[ModelT, type[ModelT]], id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, ) -> Any: """Get value of attribute named as :attr:`id_attribute` on ``item``. Args: item: Anything that should have an attribute named as :attr:`id_attribute` value. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `None`, but can reference any surrogate or candidate key for the table. Returns: The value of attribute on ``item`` named as :attr:`id_attribute`. """ if isinstance(id_attribute, InstrumentedAttribute): id_attribute = id_attribute.key return getattr(item, id_attribute if id_attribute is not None else cls.id_attribute) @classmethod def set_id_attribute_value( cls, item_id: Any, item: ModelT, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, ) -> ModelT: """Return the ``item`` after the ID is set to the appropriate attribute. Args: item_id: Value of ID to be set on instance item: Anything that should have an attribute named as :attr:`id_attribute` value. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `None`, but can reference any surrogate or candidate key for the table. Returns: Item with ``item_id`` set to :attr:`id_attribute` """ if isinstance(id_attribute, InstrumentedAttribute): id_attribute = id_attribute.key setattr(item, id_attribute if id_attribute is not None else cls.id_attribute, item_id) return item @staticmethod def check_not_found(item_or_none: Optional[ModelT]) -> ModelT: """Raise :exc:`advanced_alchemy.exceptions.NotFoundError` if ``item_or_none`` is ``None``. Args: item_or_none: Item (:class:`T `) to be tested for existence. Returns: The item, if it exists. """ if item_or_none is None: msg = "No item found when one was expected" raise NotFoundError(msg) return item_or_none def _get_execution_options( self, execution_options: Optional[dict[str, Any]] = None, ) -> dict[str, Any]: if execution_options is None: return self._default_execution_options return execution_options def _get_loader_options( self, loader_options: Optional[LoadSpec], ) -> Union[tuple[list[_AbstractLoad], bool], tuple[None, bool]]: if loader_options is None: # use the defaults set at initialization return self._default_loader_options, self._loader_options_have_wildcards or self.uniquify return get_abstract_loader_options( loader_options=loader_options, default_loader_options=self._default_loader_options, default_options_have_wildcards=self._loader_options_have_wildcards or self.uniquify, inherit_lazy_relationships=self.inherit_lazy_relationships, merge_with_default=self.merge_loader_options, ) async def add( self, data: ModelT, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> ModelT: """Add ``data`` to the collection. Args: data: Instance to be added to the collection. auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients Returns: The added instance. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): instance = await self._attach_to_session(data) await self._flush_or_commit(auto_commit=auto_commit) await self._refresh(instance, auto_refresh=auto_refresh) self._expunge(instance, auto_expunge=auto_expunge) return instance async def add_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> Sequence[ModelT]: """Add many `data` to the collection. Args: data: list of Instances to be added to the collection. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients Returns: The added instances. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): self.session.add_all(data) await self._flush_or_commit(auto_commit=auto_commit) for datum in data: self._expunge(datum, auto_expunge=auto_expunge) return data async def delete( self, item_id: Any, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Delete instance identified by ``item_id``. Args: item_id: Identifier of instance to be deleted. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The deleted instance. Raises: NotFoundError: If no instance found identified by ``item_id``. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) instance = await self.get( item_id, id_attribute=id_attribute, load=load, execution_options=execution_options, ) await self.session.delete(instance) await self._flush_or_commit(auto_commit=auto_commit) self._expunge(instance, auto_expunge=auto_expunge) return instance async def delete_many( self, item_ids: list[Any], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, chunk_size: Optional[int] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> Sequence[ModelT]: """Delete instance identified by `item_id`. Args: item_ids: Identifier of instance to be deleted. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. chunk_size: Allows customization of the ``insertmanyvalues_max_parameters`` setting for the driver. Defaults to `950` if left unset. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The deleted instances. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) loader_options, _loader_options_have_wildcard = self._get_loader_options(load) id_attribute = get_instrumented_attr( self.model_type, id_attribute if id_attribute is not None else self.id_attribute, ) instances: list[ModelT] = [] if self._prefer_any: chunk_size = len(item_ids) + 1 chunk_size = self._get_insertmanyvalues_max_parameters(chunk_size) for idx in range(0, len(item_ids), chunk_size): chunk = item_ids[idx : min(idx + chunk_size, len(item_ids))] if self._dialect.delete_executemany_returning: instances.extend( await self.session.scalars( self._get_delete_many_statement( statement_type="delete", model_type=self.model_type, id_attribute=id_attribute, id_chunk=chunk, supports_returning=self._dialect.delete_executemany_returning, loader_options=loader_options, execution_options=execution_options, ), ), ) else: instances.extend( await self.session.scalars( self._get_delete_many_statement( statement_type="select", model_type=self.model_type, id_attribute=id_attribute, id_chunk=chunk, supports_returning=self._dialect.delete_executemany_returning, loader_options=loader_options, execution_options=execution_options, ), ), ) await self.session.execute( self._get_delete_many_statement( statement_type="delete", model_type=self.model_type, id_attribute=id_attribute, id_chunk=chunk, supports_returning=self._dialect.delete_executemany_returning, loader_options=loader_options, execution_options=execution_options, ), ) await self._flush_or_commit(auto_commit=auto_commit) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return instances def _get_insertmanyvalues_max_parameters(self, chunk_size: Optional[int] = None) -> int: return chunk_size if chunk_size is not None else DEFAULT_INSERTMANYVALUES_MAX_PARAMETERS async def delete_where( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, sanity_check: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Sequence[ModelT]: """Delete instances specified by referenced kwargs and filters. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients sanity_check: When true, the length of selected instances is compared to the deleted row count load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Arguments to apply to a delete Returns: The deleted instances. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) loader_options, _loader_options_have_wildcard = self._get_loader_options(load) model_type = self.model_type statement = self._get_base_stmt( statement=delete(model_type), loader_options=loader_options, execution_options=execution_options, ) statement = self._filter_select_by_kwargs(statement=statement, kwargs=kwargs) statement = self._apply_filters(*filters, statement=statement, apply_pagination=False) instances: list[ModelT] = [] if self._dialect.delete_executemany_returning: instances.extend(await self.session.scalars(statement.returning(model_type))) else: instances.extend( await self.list( *filters, load=load, execution_options=execution_options, auto_expunge=auto_expunge, **kwargs, ), ) result = await self.session.execute(statement) row_count = getattr(result, "rowcount", -2) if sanity_check and row_count >= 0 and len(instances) != row_count: # pyright: ignore # noqa: PGH003 # backends will return a -1 if they can't determine impacted rowcount # only compare length of selected instances to results if it's >= 0 await self.session.rollback() raise RepositoryError(detail="Deleted count does not match fetched count. Rollback issued.") await self._flush_or_commit(auto_commit=auto_commit) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return instances async def exists( self, *filters: Union[StatementFilter, ColumnElement[bool]], error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> bool: """Return true if the object specified by ``kwargs`` exists. Args: *filters: Types for specific filtering operations. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: True if the instance was found. False if not found.. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) existing = await self.count( *filters, load=load, execution_options=execution_options, error_messages=error_messages, **kwargs, ) return existing > 0 def _get_base_stmt( self, *, statement: StatementTypeT, loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> StatementTypeT: """Get base statement with options applied. Args: statement: The select statement to modify loader_options: Options for loading relationships execution_options: Options for statement execution Returns: Modified select statement """ if loader_options: statement = cast("StatementTypeT", statement.options(*loader_options)) if execution_options: statement = cast("StatementTypeT", statement.execution_options(**execution_options)) return statement def _get_delete_many_statement( self, *, model_type: type[ModelT], id_attribute: InstrumentedAttribute[Any], id_chunk: list[Any], supports_returning: bool, statement_type: Literal["delete", "select"] = "delete", loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> Union[Select[tuple[ModelT]], Delete, ReturningDelete[tuple[ModelT]]]: # Base statement is static statement = self._get_base_stmt( statement=delete(model_type) if statement_type == "delete" else select(model_type), loader_options=loader_options, execution_options=execution_options, ) if execution_options: statement = statement.execution_options(**execution_options) if supports_returning and statement_type != "select": statement = cast("ReturningDelete[tuple[ModelT]]", statement.returning(model_type)) # type: ignore[union-attr,assignment] # pyright: ignore[reportUnknownLambdaType,reportUnknownMemberType,reportAttributeAccessIssue,reportUnknownVariableType] if self._prefer_any: return statement.where(any_(id_chunk) == id_attribute) # type: ignore[arg-type] return statement.where(id_attribute.in_(id_chunk)) # pyright: ignore[reportUnknownMemberType,reportUnknownVariableType] async def get( self, item_id: Any, *, auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Get instance identified by `item_id`. Args: item_id: Identifier of the instance to be retrieved. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The retrieved instance. Raises: NotFoundError: If no instance found identified by `item_id`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) id_attribute = id_attribute if id_attribute is not None else self.id_attribute statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) statement = self._filter_select_by_kwargs(statement, [(id_attribute, item_id)]) instance = (await self._execute(statement, uniquify=loader_options_have_wildcard)).scalar_one_or_none() instance = self.check_not_found(instance) self._expunge(instance, auto_expunge=auto_expunge) return instance async def get_one( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> ModelT: """Get instance identified by ``kwargs``. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: The retrieved instance. Raises: NotFoundError: If no instance found identified by `item_id`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) statement = self._apply_filters(*filters, apply_pagination=False, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) instance = (await self._execute(statement, uniquify=loader_options_have_wildcard)).scalar_one_or_none() instance = self.check_not_found(instance) self._expunge(instance, auto_expunge=auto_expunge) return instance async def get_one_or_none( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Union[ModelT, None]: """Get instance identified by ``kwargs`` or None if not found. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: The retrieved instance or None """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) statement = self._apply_filters(*filters, apply_pagination=False, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) instance = cast( "Result[tuple[ModelT]]", (await self._execute(statement, uniquify=loader_options_have_wildcard)), ).scalar_one_or_none() if instance: self._expunge(instance, auto_expunge=auto_expunge) return instance async def get_or_upsert( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, upsert: bool = True, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Union[bool, None] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: """Get instance identified by ``kwargs`` or create if it doesn't exist. Args: *filters: Types for specific filtering operations. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. upsert: When using match_fields and actual model values differ from `kwargs`, automatically perform an update operation on the model. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: a tuple that includes the instance and whether it needed to be created. When using match_fields and actual model values differ from ``kwargs``, the model value will be updated. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { field_name: kwargs.get(field_name) for field_name in match_fields if kwargs.get(field_name) is not None } else: match_filter = kwargs existing = await self.get_one_or_none( *filters, **match_filter, load=load, execution_options=execution_options, ) if not existing: return ( await self.add( self.model_type(**kwargs), auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, ), True, ) if upsert: for field_name, new_field_value in kwargs.items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: setattr(existing, field_name, new_field_value) existing = await self._attach_to_session(existing, strategy="merge") await self._flush_or_commit(auto_commit=auto_commit) await self._refresh( existing, attribute_names=attribute_names, with_for_update=with_for_update, auto_refresh=auto_refresh, ) self._expunge(existing, auto_expunge=auto_expunge) return existing, False async def get_and_update( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: """Get instance identified by ``kwargs`` and update the model if the arguments are different. Args: *filters: Types for specific filtering operations. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: a tuple that includes the instance and whether it needed to be updated. When using match_fields and actual model values differ from ``kwargs``, the model value will be updated. Raises: NotFoundError: If no instance found identified by `item_id`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { field_name: kwargs.get(field_name) for field_name in match_fields if kwargs.get(field_name) is not None } else: match_filter = kwargs existing = await self.get_one(*filters, **match_filter, load=load, execution_options=execution_options) updated = False for field_name, new_field_value in kwargs.items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: updated = True setattr(existing, field_name, new_field_value) existing = await self._attach_to_session(existing, strategy="merge") await self._flush_or_commit(auto_commit=auto_commit) await self._refresh( existing, attribute_names=attribute_names, with_for_update=with_for_update, auto_refresh=auto_refresh, ) self._expunge(existing, auto_expunge=auto_expunge) return existing, updated async def count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> int: """Get the count of records returned by a query. Args: *filters: Types for specific filtering operations. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: Count of records returned by query, ignoring pagination. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) statement = self._apply_filters(*filters, apply_pagination=False, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) results = await self._execute( statement=self._get_count_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options ), uniquify=loader_options_have_wildcard, ) return cast("int", results.scalar_one()) async def update( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Update instance with the attribute values present on `data`. Args: data: An instance that should have a value for `self.id_attribute` that exists in the collection. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The updated instance. Raises: NotFoundError: If no instance found with same identifier as `data`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): item_id = self.get_id_attribute_value( data, id_attribute=id_attribute, ) # this will raise for not found, and will put the item in the session await self.get(item_id, id_attribute=id_attribute, load=load, execution_options=execution_options) # this will merge the inbound data to the instance we just put in the session instance = await self._attach_to_session(data, strategy="merge") await self._flush_or_commit(auto_commit=auto_commit) await self._refresh( instance, attribute_names=attribute_names, with_for_update=with_for_update, auto_refresh=auto_refresh, ) self._expunge(instance, auto_expunge=auto_expunge) return instance async def update_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: """Update one or more instances with the attribute values present on `data`. This function has an optimized bulk update based on the configured SQL dialect: - For backends supporting `RETURNING` with `executemany`, a single bulk update with returning clause is executed. - For other backends, it does a bulk update and then returns the updated data after a refresh. Args: data: A list of instances to update. Each should have a value for `self.id_attribute` that exists in the collection. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The updated instances. Raises: NotFoundError: If no instance found with same identifier as `data`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) data_to_update: list[dict[str, Any]] = [v.to_dict() if isinstance(v, self.model_type) else v for v in data] # type: ignore[misc] with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) loader_options = self._get_loader_options(load)[0] supports_returning = self._dialect.update_executemany_returning and self._dialect.name != "oracle" statement = self._get_update_many_statement( self.model_type, supports_returning, loader_options=loader_options, execution_options=execution_options, ) if supports_returning: instances = list( await self.session.scalars( statement, cast("_CoreSingleExecuteParams", data_to_update), # this is not correct but the only way # currently to deal with an SQLAlchemy typing issue. See # https://github.com/sqlalchemy/sqlalchemy/discussions/9925 execution_options=execution_options, ), ) await self._flush_or_commit(auto_commit=auto_commit) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return instances await self.session.execute(statement, data_to_update, execution_options=execution_options) await self._flush_or_commit(auto_commit=auto_commit) return data def _get_update_many_statement( self, model_type: type[ModelT], supports_returning: bool, loader_options: Union[list[_AbstractLoad], None], execution_options: Union[dict[str, Any], None], ) -> Union[Update, ReturningUpdate[tuple[ModelT]]]: # Base update statement is static statement = self._get_base_stmt( statement=update(table=model_type), loader_options=loader_options, execution_options=execution_options ) if supports_returning: return statement.returning(model_type) return statement async def list_and_count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, count_with_window_function: Optional[bool] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: """List records with total count. Args: *filters: Types for specific filtering operations. statement: To facilitate customization of the underlying select query. auto_expunge: Remove object from session before returning. count_with_window_function: When false, list and count will use two queries instead of an analytical window function. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: Count of records returned by query, ignoring pagination. """ count_with_window_function = ( count_with_window_function if count_with_window_function is not None else self.count_with_window_function ) self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) if self._dialect.name in {"spanner", "spanner+spanner"} or not count_with_window_function: return await self._list_and_count_basic( *filters, auto_expunge=auto_expunge, statement=statement, load=load, execution_options=execution_options, order_by=order_by, error_messages=error_messages, **kwargs, ) return await self._list_and_count_window( *filters, auto_expunge=auto_expunge, statement=statement, load=load, execution_options=execution_options, error_messages=error_messages, order_by=order_by, **kwargs, ) def _expunge(self, instance: ModelT, auto_expunge: Optional[bool]) -> None: if auto_expunge is None: auto_expunge = self.auto_expunge return self.session.expunge(instance) if auto_expunge else None async def _flush_or_commit(self, auto_commit: Optional[bool]) -> None: if auto_commit is None: auto_commit = self.auto_commit return await self.session.commit() if auto_commit else await self.session.flush() async def _refresh( self, instance: ModelT, auto_refresh: Optional[bool], attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, ) -> None: if auto_refresh is None: auto_refresh = self.auto_refresh return ( await self.session.refresh( instance=instance, attribute_names=attribute_names, with_for_update=with_for_update, ) if auto_refresh else None ) async def _list_and_count_window( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: """List records with total count. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. order_by: List[OrderingPair] | OrderingPair | None = None, error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options **kwargs: Instance attribute value filters. Returns: Count of records returned by query using an analytical window function, ignoring pagination. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) if order_by is None: order_by = self.order_by or [] statement = self._apply_order_by(statement=statement, order_by=order_by) statement = self._apply_filters(*filters, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) result = await self._execute( statement.add_columns(over(sql_func.count())), uniquify=loader_options_have_wildcard ) count: int = 0 instances: list[ModelT] = [] for i, (instance, count_value) in enumerate(result): self._expunge(instance, auto_expunge=auto_expunge) instances.append(instance) if i == 0: count = count_value return instances, count async def _list_and_count_basic( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: """List records with total count. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options **kwargs: Instance attribute value filters. Returns: Count of records returned by query using 2 queries, ignoring pagination. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) if order_by is None: order_by = self.order_by or [] statement = self._apply_order_by(statement=statement, order_by=order_by) statement = self._apply_filters(*filters, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) count_result = await self.session.execute( self._get_count_stmt( statement, loader_options=loader_options, execution_options=execution_options, ), ) count = count_result.scalar_one() result = await self._execute(statement, uniquify=loader_options_have_wildcard) instances: list[ModelT] = [] for (instance,) in result: self._expunge(instance, auto_expunge=auto_expunge) instances.append(instance) return instances, count def _get_count_stmt( self, statement: Select[tuple[ModelT]], loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> Select[tuple[int]]: # Count statement transformations are static return ( statement.with_only_columns(sql_func.count(text("1")), maintain_column_froms=True) .limit(None) .offset(None) .order_by(None) ) async def upsert( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_refresh: Optional[bool] = None, match_fields: Optional[Union[list[str], str]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Modify or create instance. Updates instance with the attribute values present on `data`, or creates a new instance if one doesn't exist. Args: data: Instance to update existing, or be created. Identifier used to determine if an existing instance exists is the value of an attribute on `data` named as value of `self.id_attribute`. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The updated or created instance. Raises: NotFoundError: If no instance found with same identifier as `data`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { field_name: getattr(data, field_name, None) for field_name in match_fields if getattr(data, field_name, None) is not None } elif getattr(data, self.id_attribute, None) is not None: match_filter = {self.id_attribute: getattr(data, self.id_attribute, None)} else: match_filter = data.to_dict(exclude={self.id_attribute}) existing = await self.get_one_or_none(load=load, execution_options=execution_options, **match_filter) if not existing: return await self.add( data, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): for field_name, new_field_value in data.to_dict(exclude={self.id_attribute}).items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: setattr(existing, field_name, new_field_value) instance = await self._attach_to_session(existing, strategy="merge") await self._flush_or_commit(auto_commit=auto_commit) await self._refresh( instance, attribute_names=attribute_names, with_for_update=with_for_update, auto_refresh=auto_refresh, ) self._expunge(instance, auto_expunge=auto_expunge) return instance async def upsert_many( self, data: list[ModelT], *, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, no_merge: bool = False, match_fields: Optional[Union[list[str], str]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: """Modify or create multiple instances. Update instances with the attribute values present on `data`, or create a new instance if one doesn't exist. !!! tip In most cases, you will want to set `match_fields` to the combination of attributes, excluded the primary key, that define uniqueness for a row. Args: data: Instance to update existing, or be created. Identifier used to determine if an existing instance exists is the value of an attribute on ``data`` named as value of :attr:`id_attribute`. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. no_merge: Skip the usage of optimized Merge statements match_fields: a list of keys to use to match the existing model. When empty, automatically uses ``self.id_attribute`` (`id` by default) to match . error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The updated or created instance. Raises: NotFoundError: If no instance found with same identifier as ``data``. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) instances: list[ModelT] = [] data_to_update: list[ModelT] = [] data_to_insert: list[ModelT] = [] match_fields = self._get_match_fields(match_fields=match_fields) if match_fields is None: match_fields = [self.id_attribute] match_filter: list[Union[StatementFilter, ColumnElement[bool]]] = [] if match_fields: for field_name in match_fields: field = get_instrumented_attr(self.model_type, field_name) matched_values = [ field_data for datum in data if (field_data := getattr(datum, field_name)) is not None ] match_filter.append(any_(matched_values) == field if self._prefer_any else field.in_(matched_values)) # type: ignore[arg-type] with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): existing_objs = await self.list( *match_filter, load=load, execution_options=execution_options, auto_expunge=False, ) for field_name in match_fields: field = get_instrumented_attr(self.model_type, field_name) matched_values = list( {getattr(datum, field_name) for datum in existing_objs if datum}, # ensure the list is unique ) match_filter.append(any_(matched_values) == field if self._prefer_any else field.in_(matched_values)) # type: ignore[arg-type] existing_ids = self._get_object_ids(existing_objs=existing_objs) data = self._merge_on_match_fields(data, existing_objs, match_fields) for datum in data: if getattr(datum, self.id_attribute, None) in existing_ids: data_to_update.append(datum) else: data_to_insert.append(datum) if data_to_insert: instances.extend( await self.add_many(data_to_insert, auto_commit=False, auto_expunge=False), ) if data_to_update: instances.extend( await self.update_many( data_to_update, auto_commit=False, auto_expunge=False, load=load, execution_options=execution_options, ), ) await self._flush_or_commit(auto_commit=auto_commit) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return instances def _get_object_ids(self, existing_objs: list[ModelT]) -> list[Any]: return [obj_id for datum in existing_objs if (obj_id := getattr(datum, self.id_attribute)) is not None] def _get_match_fields( self, match_fields: Optional[Union[list[str], str]] = None, id_attribute: Optional[str] = None, ) -> Optional[list[str]]: id_attribute = id_attribute or self.id_attribute match_fields = match_fields or self.match_fields if isinstance(match_fields, str): match_fields = [match_fields] return match_fields def _merge_on_match_fields( self, data: list[ModelT], existing_data: list[ModelT], match_fields: Optional[Union[list[str], str]] = None, ) -> list[ModelT]: match_fields = self._get_match_fields(match_fields=match_fields) if match_fields is None: match_fields = [self.id_attribute] for existing_datum in existing_data: for _row_id, datum in enumerate(data): match = all( getattr(datum, field_name) == getattr(existing_datum, field_name) for field_name in match_fields ) if match and getattr(existing_datum, self.id_attribute) is not None: setattr(datum, self.id_attribute, getattr(existing_datum, self.id_attribute)) return data async def list( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> list[ModelT]: """Get a list of instances, optionally filtered. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: The list of instances, after filtering applied. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) if order_by is None: order_by = self.order_by or [] statement = self._apply_order_by(statement=statement, order_by=order_by) statement = self._apply_filters(*filters, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) result = await self._execute(statement, uniquify=loader_options_have_wildcard) instances = list(result.scalars()) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return cast("list[ModelT]", instances) @classmethod async def check_health(cls, session: Union[AsyncSession, async_scoped_session[AsyncSession]]) -> bool: """Perform a health check on the database. Args: session: through which we run a check statement Returns: ``True`` if healthy. """ with wrap_sqlalchemy_exception(): return ( # type: ignore[no-any-return] await session.execute(cls._get_health_check_statement(session)) ).scalar_one() == 1 @staticmethod def _get_health_check_statement(session: Union[AsyncSession, async_scoped_session[AsyncSession]]) -> TextClause: if session.bind and session.bind.dialect.name == "oracle": return text("SELECT 1 FROM DUAL") return text("SELECT 1") async def _attach_to_session( self, model: ModelT, strategy: Literal["add", "merge"] = "add", load: bool = True ) -> ModelT: """Attach detached instance to the session. Args: model: The instance to be attached to the session. strategy: How the instance should be attached. - "add": New instance added to session - "merge": Instance merged with existing, or new one added. load: Boolean, when False, merge switches into a "high performance" mode which causes it to forego emitting history events as well as all database access. This flag is used for cases such as transferring graphs of objects into a session from a second level cache, or to transfer just-loaded objects into the session owned by a worker thread or process without re-querying the database. Returns: Instance attached to the session - if `"merge"` strategy, may not be same instance that was provided. """ if strategy == "add": self.session.add(model) return model if strategy == "merge": return await self.session.merge(model, load=load) msg = "Unexpected value for `strategy`, must be `'add'` or `'merge'`" # type: ignore[unreachable] raise ValueError(msg) async def _execute( self, statement: Select[Any], uniquify: bool = False, ) -> Result[Any]: result = await self.session.execute(statement) if uniquify or self.uniquify: result = result.unique() return result class SQLAlchemyAsyncSlugRepository( SQLAlchemyAsyncRepository[ModelT], SQLAlchemyAsyncSlugRepositoryProtocol[ModelT], ): """Extends the repository to include slug model features..""" async def get_by_slug( self, slug: str, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Optional[ModelT]: """Select record by slug value.""" return await self.get_one_or_none( slug=slug, load=load, execution_options=execution_options, error_messages=error_messages, uniquify=uniquify, ) async def get_available_slug( self, value_to_slugify: str, **kwargs: Any, ) -> str: """Get a unique slug for the supplied value. If the value is found to exist, a random 4 digit character is appended to the end. Override this method to change the default behavior Args: value_to_slugify (str): A string that should be converted to a unique slug. **kwargs: stuff Returns: str: a unique slug for the supplied value. This is safe for URLs and other unique identifiers. """ slug = slugify(value_to_slugify) if await self._is_slug_unique(slug): return slug random_string = "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) # noqa: S311 return f"{slug}-{random_string}" async def _is_slug_unique( self, slug: str, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> bool: return await self.exists(slug=slug, load=load, execution_options=execution_options, **kwargs) is False class SQLAlchemyAsyncQueryRepository: """SQLAlchemy Query Repository. This is a loosely typed helper to query for when you need to select data in ways that don't align to the normal repository pattern. """ error_messages: Optional[ErrorMessages] = None def __init__( self, *, session: Union[AsyncSession, async_scoped_session[AsyncSession]], error_messages: Optional[ErrorMessages] = None, **kwargs: Any, ) -> None: """Repository pattern for SQLAlchemy models. Args: session: Session managing the unit-of-work for the operation. error_messages: A set of error messages to use for operations. **kwargs: Additional arguments. """ super().__init__(**kwargs) self.session = session self.error_messages = error_messages self._dialect = self.session.bind.dialect if self.session.bind is not None else self.session.get_bind().dialect async def get_one( self, statement: Select[tuple[Any]], **kwargs: Any, ) -> Row[Any]: """Get instance identified by ``kwargs``. Args: statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: The retrieved instance. Raises: NotFoundError: If no instance found identified by `item_id`. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = self._filter_statement_by_kwargs(statement, **kwargs) instance = (await self.execute(statement)).scalar_one_or_none() return self.check_not_found(instance) async def get_one_or_none( self, statement: Select[Any], **kwargs: Any, ) -> Optional[Row[Any]]: """Get instance identified by ``kwargs`` or None if not found. Args: statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: The retrieved instance or None """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = self._filter_statement_by_kwargs(statement, **kwargs) instance = (await self.execute(statement)).scalar_one_or_none() return instance or None async def count(self, statement: Select[Any], **kwargs: Any) -> int: """Get the count of records returned by a query. Args: statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: Count of records returned by query, ignoring pagination. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = statement.with_only_columns(sql_func.count(text("1")), maintain_column_froms=True).order_by( None, ) statement = self._filter_statement_by_kwargs(statement, **kwargs) results = await self.execute(statement) return results.scalar_one() # type: ignore # noqa: PGH003 async def list_and_count( self, statement: Select[Any], count_with_window_function: Optional[bool] = None, **kwargs: Any, ) -> tuple[list[Row[Any]], int]: """List records with total count. Args: statement: To facilitate customization of the underlying select query. count_with_window_function: Force list and count to use two queries instead of an analytical window function. **kwargs: Instance attribute value filters. Returns: Count of records returned by query, ignoring pagination. """ if self._dialect.name in {"spanner", "spanner+spanner"} or count_with_window_function: return await self._list_and_count_basic(statement=statement, **kwargs) return await self._list_and_count_window(statement=statement, **kwargs) async def _list_and_count_window( self, statement: Select[Any], **kwargs: Any, ) -> tuple[list[Row[Any]], int]: """List records with total count. Args: *filters: Types for specific filtering operations. statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: Count of records returned by query using an analytical window function, ignoring pagination. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = statement.add_columns(over(sql_func.count(text("1")))) statement = self._filter_statement_by_kwargs(statement, **kwargs) result = await self.execute(statement) count: int = 0 instances: list[Row[Any]] = [] for i, (instance, count_value) in enumerate(result): instances.append(instance) if i == 0: count = count_value return instances, count def _get_count_stmt(self, statement: Select[Any]) -> Select[Any]: return statement.with_only_columns(sql_func.count(text("1")), maintain_column_froms=True).order_by(None) # pyright: ignore[reportUnknownVariable] async def _list_and_count_basic( self, statement: Select[Any], **kwargs: Any, ) -> tuple[list[Row[Any]], int]: """List records with total count. Args: statement: To facilitate customization of the underlying select query. . **kwargs: Instance attribute value filters. Returns: Count of records returned by query using 2 queries, ignoring pagination. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = self._filter_statement_by_kwargs(statement, **kwargs) count_result = await self.session.execute(self._get_count_stmt(statement)) count = count_result.scalar_one() result = await self.execute(statement) instances: list[Row[Any]] = [] for (instance,) in result: instances.append(instance) return instances, count async def list(self, statement: Select[Any], **kwargs: Any) -> list[Row[Any]]: """Get a list of instances, optionally filtered. Args: statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: The list of instances, after filtering applied. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = self._filter_statement_by_kwargs(statement, **kwargs) result = await self.execute(statement) return list(result.all()) def _filter_statement_by_kwargs( self, statement: Select[Any], /, **kwargs: Any, ) -> Select[Any]: """Filter the collection by kwargs. Args: statement: statement to filter **kwargs: key/value pairs such that objects remaining in the statement after filtering have the property that their attribute named `key` has value equal to `value`. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): return statement.filter_by(**kwargs) # the following is all sqlalchemy implementation detail, and shouldn't be directly accessed @staticmethod def check_not_found(item_or_none: Optional[T]) -> T: """Raise :class:`NotFoundError` if ``item_or_none`` is ``None``. Args: item_or_none: Item to be tested for existence. Returns: The item, if it exists. """ if item_or_none is None: msg = "No item found when one was expected" raise NotFoundError(msg) return item_or_none async def execute( self, statement: Union[ ReturningDelete[tuple[Any]], ReturningUpdate[tuple[Any]], Select[tuple[Any]], Update, Delete, Select[Any] ], ) -> Result[Any]: return await self.session.execute(statement) python-advanced-alchemy-1.0.1/advanced_alchemy/repository/_sync.py000066400000000000000000003045041476663714600254220ustar00rootroot00000000000000# Do not edit this file directly. It has been autogenerated from # advanced_alchemy/repository/_async.py import random import string from collections.abc import Iterable, Sequence from typing import ( TYPE_CHECKING, Any, Final, Literal, Optional, Protocol, Union, cast, runtime_checkable, ) from sqlalchemy import ( Delete, Result, Row, Select, TextClause, Update, any_, delete, over, select, text, update, ) from sqlalchemy import func as sql_func from sqlalchemy.orm import InstrumentedAttribute, Session from sqlalchemy.orm.scoping import scoped_session from sqlalchemy.orm.strategy_options import _AbstractLoad # pyright: ignore[reportPrivateUsage] from sqlalchemy.sql import ColumnElement from sqlalchemy.sql.dml import ReturningDelete, ReturningUpdate from advanced_alchemy.exceptions import ErrorMessages, NotFoundError, RepositoryError, wrap_sqlalchemy_exception from advanced_alchemy.filters import StatementFilter, StatementTypeT from advanced_alchemy.repository._util import ( DEFAULT_ERROR_MESSAGE_TEMPLATES, FilterableRepository, FilterableRepositoryProtocol, LoadSpec, get_abstract_loader_options, get_instrumented_attr, ) from advanced_alchemy.repository.typing import MISSING, ModelT, OrderingPair, T from advanced_alchemy.utils.dataclass import Empty, EmptyType from advanced_alchemy.utils.text import slugify if TYPE_CHECKING: from sqlalchemy.engine.interfaces import _CoreSingleExecuteParams # pyright: ignore[reportPrivateUsage] DEFAULT_INSERTMANYVALUES_MAX_PARAMETERS: Final = 950 POSTGRES_VERSION_SUPPORTING_MERGE: Final = 15 @runtime_checkable class SQLAlchemySyncRepositoryProtocol(FilterableRepositoryProtocol[ModelT], Protocol[ModelT]): """Base Protocol""" id_attribute: str match_fields: Optional[Union[list[str], str]] = None statement: Select[tuple[ModelT]] session: Union[Session, scoped_session[Session]] auto_expunge: bool auto_refresh: bool auto_commit: bool order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None error_messages: Optional[ErrorMessages] = None wrap_exceptions: bool = True def __init__( self, *, statement: Optional[Select[tuple[ModelT]]] = None, session: Union[Session, scoped_session[Session]], auto_expunge: bool = False, auto_refresh: bool = True, auto_commit: bool = False, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, wrap_exceptions: bool = True, **kwargs: Any, ) -> None: ... @classmethod def get_id_attribute_value( cls, item: Union[ModelT, type[ModelT]], id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, ) -> Any: ... @classmethod def set_id_attribute_value( cls, item_id: Any, item: ModelT, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, ) -> ModelT: ... @staticmethod def check_not_found(item_or_none: Optional[ModelT]) -> ModelT: ... def add( self, data: ModelT, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> ModelT: ... def add_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> Sequence[ModelT]: ... def delete( self, item_id: Any, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> ModelT: ... def delete_many( self, item_ids: list[Any], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, chunk_size: Optional[int] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> Sequence[ModelT]: ... def delete_where( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, load: Optional[LoadSpec] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, execution_options: Optional[dict[str, Any]] = None, sanity_check: bool = True, **kwargs: Any, ) -> Sequence[ModelT]: ... def exists( self, *filters: Union[StatementFilter, ColumnElement[bool]], load: Optional[LoadSpec] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> bool: ... def get( self, item_id: Any, *, auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> ModelT: ... def get_one( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> ModelT: ... def get_one_or_none( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> Optional[ModelT]: ... def get_or_upsert( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, upsert: bool = True, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: ... def get_and_update( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: ... def count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, load: Optional[LoadSpec] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> int: ... def update( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> ModelT: ... def update_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> list[ModelT]: ... def _get_update_many_statement( self, model_type: type[ModelT], supports_returning: bool, loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> Union[Update, ReturningUpdate[tuple[ModelT]]]: ... def upsert( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_refresh: Optional[bool] = None, match_fields: Optional[Union[list[str], str]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> ModelT: ... def upsert_many( self, data: list[ModelT], *, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, no_merge: bool = False, match_fields: Optional[Union[list[str], str]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, ) -> list[ModelT]: ... def list_and_count( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, count_with_window_function: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: ... def list( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, **kwargs: Any, ) -> list[ModelT]: ... @classmethod def check_health(cls, session: Union[Session, scoped_session[Session]]) -> bool: ... @runtime_checkable class SQLAlchemySyncSlugRepositoryProtocol(SQLAlchemySyncRepositoryProtocol[ModelT], Protocol[ModelT]): """Protocol for SQLAlchemy repositories that support slug-based operations. Extends the base repository protocol to add slug-related functionality. Type Parameters: ModelT: The SQLAlchemy model type this repository handles. """ def get_by_slug( self, slug: str, *, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> Optional[ModelT]: """Get a model instance by its slug. Args: slug: The slug value to search for. error_messages: Optional custom error message templates. load: Specification for eager loading of relationships. execution_options: Options for statement execution. **kwargs: Additional filtering criteria. Returns: ModelT | None: The found model instance or None if not found. """ ... def get_available_slug( self, value_to_slugify: str, **kwargs: Any, ) -> str: """Generate a unique slug for a given value. Args: value_to_slugify: The string to convert to a slug. **kwargs: Additional parameters for slug generation. Returns: str: A unique slug derived from the input value. """ ... class SQLAlchemySyncRepository(SQLAlchemySyncRepositoryProtocol[ModelT], FilterableRepository[ModelT]): """Async SQLAlchemy repository implementation. Provides a complete implementation of async database operations using SQLAlchemy, including CRUD operations, filtering, and relationship loading. Type Parameters: ModelT: The SQLAlchemy model type this repository handles. .. seealso:: :class:`~advanced_alchemy.repository._util.FilterableRepository` """ id_attribute: str = "id" """Name of the unique identifier for the model.""" loader_options: Optional[LoadSpec] = None """Default loader options for the repository.""" error_messages: Optional[ErrorMessages] = None """Default error messages for the repository.""" wrap_exceptions: bool = True """Wrap SQLAlchemy exceptions in a ``RepositoryError``. When set to ``False``, the original exception will be raised.""" inherit_lazy_relationships: bool = True """Optionally ignore the default ``lazy`` configuration for model relationships. This is useful for when you want to replace instead of merge the model's loaded relationships with the ones specified in the ``load`` or ``default_loader_options`` configuration.""" merge_loader_options: bool = True """Merges the default loader options with the loader options specified in the ``load`` argument. This is useful for when you want to totally replace instead of merge the model's loaded relationships with the ones specified in the ``load`` or ``default_loader_options`` configuration.""" execution_options: Optional[dict[str, Any]] = None """Default execution options for the repository.""" match_fields: Optional[Union[list[str], str]] = None """List of dialects that prefer to use ``field.id = ANY(:1)`` instead of ``field.id IN (...)``.""" uniquify: bool = False """Optionally apply the ``unique()`` method to results before returning. This is useful for certain SQLAlchemy uses cases such as applying ``contains_eager`` to a query containing a one-to-many relationship """ count_with_window_function: bool = True """Use an analytical window function to count results. This allows the count to be performed in a single query. """ def __init__( self, *, statement: Optional[Select[tuple[ModelT]]] = None, session: Union[Session, scoped_session[Session]], auto_expunge: bool = False, auto_refresh: bool = True, auto_commit: bool = False, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, wrap_exceptions: bool = True, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, **kwargs: Any, ) -> None: """Repository for SQLAlchemy models. Args: statement: To facilitate customization of the underlying select query. session: Session managing the unit-of-work for the operation. auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. order_by: Set default order options for queries. load: Set default relationships to be loaded execution_options: Set default execution options error_messages: A set of custom error messages to use for operations wrap_exceptions: Wrap SQLAlchemy exceptions in a ``RepositoryError``. When set to ``False``, the original exception will be raised. uniquify: Optionally apply the ``unique()`` method to results before returning. count_with_window_function: When false, list and count will use two queries instead of an analytical window function. **kwargs: Additional arguments. """ self.auto_expunge = auto_expunge self.auto_refresh = auto_refresh self.auto_commit = auto_commit self.order_by = order_by self.session = session self.error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages ) self.wrap_exceptions = wrap_exceptions self.uniquify = self._get_uniquify(uniquify) self.count_with_window_function = ( count_with_window_function if count_with_window_function is not None else self.count_with_window_function ) self._default_loader_options, self._loader_options_have_wildcards = get_abstract_loader_options( loader_options=load if load is not None else self.loader_options, inherit_lazy_relationships=self.inherit_lazy_relationships, merge_with_default=self.merge_loader_options, ) execution_options = execution_options if execution_options is not None else self.execution_options self._default_execution_options = execution_options or {} self.statement = select(self.model_type) if statement is None else statement self._dialect = self.session.bind.dialect if self.session.bind is not None else self.session.get_bind().dialect self._prefer_any = any(self._dialect.name == engine_type for engine_type in self.prefer_any_dialects or ()) def _get_uniquify(self, uniquify: Optional[bool] = None) -> bool: return bool(uniquify) if uniquify is not None else self.uniquify @staticmethod def _get_error_messages( error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, default_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> Optional[ErrorMessages]: if error_messages == Empty: error_messages = None if default_messages == Empty: default_messages = None messages = DEFAULT_ERROR_MESSAGE_TEMPLATES if default_messages and isinstance(default_messages, dict): messages.update(default_messages) if error_messages: messages.update(cast("ErrorMessages", error_messages)) return messages @classmethod def get_id_attribute_value( cls, item: Union[ModelT, type[ModelT]], id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, ) -> Any: """Get value of attribute named as :attr:`id_attribute` on ``item``. Args: item: Anything that should have an attribute named as :attr:`id_attribute` value. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `None`, but can reference any surrogate or candidate key for the table. Returns: The value of attribute on ``item`` named as :attr:`id_attribute`. """ if isinstance(id_attribute, InstrumentedAttribute): id_attribute = id_attribute.key return getattr(item, id_attribute if id_attribute is not None else cls.id_attribute) @classmethod def set_id_attribute_value( cls, item_id: Any, item: ModelT, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, ) -> ModelT: """Return the ``item`` after the ID is set to the appropriate attribute. Args: item_id: Value of ID to be set on instance item: Anything that should have an attribute named as :attr:`id_attribute` value. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `None`, but can reference any surrogate or candidate key for the table. Returns: Item with ``item_id`` set to :attr:`id_attribute` """ if isinstance(id_attribute, InstrumentedAttribute): id_attribute = id_attribute.key setattr(item, id_attribute if id_attribute is not None else cls.id_attribute, item_id) return item @staticmethod def check_not_found(item_or_none: Optional[ModelT]) -> ModelT: """Raise :exc:`advanced_alchemy.exceptions.NotFoundError` if ``item_or_none`` is ``None``. Args: item_or_none: Item (:class:`T `) to be tested for existence. Returns: The item, if it exists. """ if item_or_none is None: msg = "No item found when one was expected" raise NotFoundError(msg) return item_or_none def _get_execution_options( self, execution_options: Optional[dict[str, Any]] = None, ) -> dict[str, Any]: if execution_options is None: return self._default_execution_options return execution_options def _get_loader_options( self, loader_options: Optional[LoadSpec], ) -> Union[tuple[list[_AbstractLoad], bool], tuple[None, bool]]: if loader_options is None: # use the defaults set at initialization return self._default_loader_options, self._loader_options_have_wildcards or self.uniquify return get_abstract_loader_options( loader_options=loader_options, default_loader_options=self._default_loader_options, default_options_have_wildcards=self._loader_options_have_wildcards or self.uniquify, inherit_lazy_relationships=self.inherit_lazy_relationships, merge_with_default=self.merge_loader_options, ) def add( self, data: ModelT, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> ModelT: """Add ``data`` to the collection. Args: data: Instance to be added to the collection. auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients Returns: The added instance. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): instance = self._attach_to_session(data) self._flush_or_commit(auto_commit=auto_commit) self._refresh(instance, auto_refresh=auto_refresh) self._expunge(instance, auto_expunge=auto_expunge) return instance def add_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, ) -> Sequence[ModelT]: """Add many `data` to the collection. Args: data: list of Instances to be added to the collection. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients Returns: The added instances. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): self.session.add_all(data) self._flush_or_commit(auto_commit=auto_commit) for datum in data: self._expunge(datum, auto_expunge=auto_expunge) return data def delete( self, item_id: Any, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Delete instance identified by ``item_id``. Args: item_id: Identifier of instance to be deleted. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The deleted instance. Raises: NotFoundError: If no instance found identified by ``item_id``. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) instance = self.get( item_id, id_attribute=id_attribute, load=load, execution_options=execution_options, ) self.session.delete(instance) self._flush_or_commit(auto_commit=auto_commit) self._expunge(instance, auto_expunge=auto_expunge) return instance def delete_many( self, item_ids: list[Any], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, chunk_size: Optional[int] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> Sequence[ModelT]: """Delete instance identified by `item_id`. Args: item_ids: Identifier of instance to be deleted. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. chunk_size: Allows customization of the ``insertmanyvalues_max_parameters`` setting for the driver. Defaults to `950` if left unset. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The deleted instances. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) loader_options, _loader_options_have_wildcard = self._get_loader_options(load) id_attribute = get_instrumented_attr( self.model_type, id_attribute if id_attribute is not None else self.id_attribute, ) instances: list[ModelT] = [] if self._prefer_any: chunk_size = len(item_ids) + 1 chunk_size = self._get_insertmanyvalues_max_parameters(chunk_size) for idx in range(0, len(item_ids), chunk_size): chunk = item_ids[idx : min(idx + chunk_size, len(item_ids))] if self._dialect.delete_executemany_returning: instances.extend( self.session.scalars( self._get_delete_many_statement( statement_type="delete", model_type=self.model_type, id_attribute=id_attribute, id_chunk=chunk, supports_returning=self._dialect.delete_executemany_returning, loader_options=loader_options, execution_options=execution_options, ), ), ) else: instances.extend( self.session.scalars( self._get_delete_many_statement( statement_type="select", model_type=self.model_type, id_attribute=id_attribute, id_chunk=chunk, supports_returning=self._dialect.delete_executemany_returning, loader_options=loader_options, execution_options=execution_options, ), ), ) self.session.execute( self._get_delete_many_statement( statement_type="delete", model_type=self.model_type, id_attribute=id_attribute, id_chunk=chunk, supports_returning=self._dialect.delete_executemany_returning, loader_options=loader_options, execution_options=execution_options, ), ) self._flush_or_commit(auto_commit=auto_commit) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return instances def _get_insertmanyvalues_max_parameters(self, chunk_size: Optional[int] = None) -> int: return chunk_size if chunk_size is not None else DEFAULT_INSERTMANYVALUES_MAX_PARAMETERS def delete_where( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, sanity_check: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Sequence[ModelT]: """Delete instances specified by referenced kwargs and filters. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients sanity_check: When true, the length of selected instances is compared to the deleted row count load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Arguments to apply to a delete Returns: The deleted instances. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) loader_options, _loader_options_have_wildcard = self._get_loader_options(load) model_type = self.model_type statement = self._get_base_stmt( statement=delete(model_type), loader_options=loader_options, execution_options=execution_options, ) statement = self._filter_select_by_kwargs(statement=statement, kwargs=kwargs) statement = self._apply_filters(*filters, statement=statement, apply_pagination=False) instances: list[ModelT] = [] if self._dialect.delete_executemany_returning: instances.extend(self.session.scalars(statement.returning(model_type))) else: instances.extend( self.list( *filters, load=load, execution_options=execution_options, auto_expunge=auto_expunge, **kwargs, ), ) result = self.session.execute(statement) row_count = getattr(result, "rowcount", -2) if sanity_check and row_count >= 0 and len(instances) != row_count: # pyright: ignore # noqa: PGH003 # backends will return a -1 if they can't determine impacted rowcount # only compare length of selected instances to results if it's >= 0 self.session.rollback() raise RepositoryError(detail="Deleted count does not match fetched count. Rollback issued.") self._flush_or_commit(auto_commit=auto_commit) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return instances def exists( self, *filters: Union[StatementFilter, ColumnElement[bool]], error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> bool: """Return true if the object specified by ``kwargs`` exists. Args: *filters: Types for specific filtering operations. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: True if the instance was found. False if not found.. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) existing = self.count( *filters, load=load, execution_options=execution_options, error_messages=error_messages, **kwargs, ) return existing > 0 def _get_base_stmt( self, *, statement: StatementTypeT, loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> StatementTypeT: """Get base statement with options applied. Args: statement: The select statement to modify loader_options: Options for loading relationships execution_options: Options for statement execution Returns: Modified select statement """ if loader_options: statement = cast("StatementTypeT", statement.options(*loader_options)) if execution_options: statement = cast("StatementTypeT", statement.execution_options(**execution_options)) return statement def _get_delete_many_statement( self, *, model_type: type[ModelT], id_attribute: InstrumentedAttribute[Any], id_chunk: list[Any], supports_returning: bool, statement_type: Literal["delete", "select"] = "delete", loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> Union[Select[tuple[ModelT]], Delete, ReturningDelete[tuple[ModelT]]]: # Base statement is static statement = self._get_base_stmt( statement=delete(model_type) if statement_type == "delete" else select(model_type), loader_options=loader_options, execution_options=execution_options, ) if execution_options: statement = statement.execution_options(**execution_options) if supports_returning and statement_type != "select": statement = cast("ReturningDelete[tuple[ModelT]]", statement.returning(model_type)) # type: ignore[union-attr,assignment] # pyright: ignore[reportUnknownLambdaType,reportUnknownMemberType,reportAttributeAccessIssue,reportUnknownVariableType] if self._prefer_any: return statement.where(any_(id_chunk) == id_attribute) # type: ignore[arg-type] return statement.where(id_attribute.in_(id_chunk)) # pyright: ignore[reportUnknownMemberType,reportUnknownVariableType] def get( self, item_id: Any, *, auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Get instance identified by `item_id`. Args: item_id: Identifier of the instance to be retrieved. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The retrieved instance. Raises: NotFoundError: If no instance found identified by `item_id`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) id_attribute = id_attribute if id_attribute is not None else self.id_attribute statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) statement = self._filter_select_by_kwargs(statement, [(id_attribute, item_id)]) instance = (self._execute(statement, uniquify=loader_options_have_wildcard)).scalar_one_or_none() instance = self.check_not_found(instance) self._expunge(instance, auto_expunge=auto_expunge) return instance def get_one( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> ModelT: """Get instance identified by ``kwargs``. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: The retrieved instance. Raises: NotFoundError: If no instance found identified by `item_id`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) statement = self._apply_filters(*filters, apply_pagination=False, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) instance = (self._execute(statement, uniquify=loader_options_have_wildcard)).scalar_one_or_none() instance = self.check_not_found(instance) self._expunge(instance, auto_expunge=auto_expunge) return instance def get_one_or_none( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Union[ModelT, None]: """Get instance identified by ``kwargs`` or None if not found. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: The retrieved instance or None """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) statement = self._apply_filters(*filters, apply_pagination=False, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) instance = cast( "Result[tuple[ModelT]]", (self._execute(statement, uniquify=loader_options_have_wildcard)), ).scalar_one_or_none() if instance: self._expunge(instance, auto_expunge=auto_expunge) return instance def get_or_upsert( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, upsert: bool = True, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Union[bool, None] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: """Get instance identified by ``kwargs`` or create if it doesn't exist. Args: *filters: Types for specific filtering operations. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. upsert: When using match_fields and actual model values differ from `kwargs`, automatically perform an update operation on the model. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: a tuple that includes the instance and whether it needed to be created. When using match_fields and actual model values differ from ``kwargs``, the model value will be updated. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { field_name: kwargs.get(field_name) for field_name in match_fields if kwargs.get(field_name) is not None } else: match_filter = kwargs existing = self.get_one_or_none( *filters, **match_filter, load=load, execution_options=execution_options, ) if not existing: return ( self.add( self.model_type(**kwargs), auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, ), True, ) if upsert: for field_name, new_field_value in kwargs.items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: setattr(existing, field_name, new_field_value) existing = self._attach_to_session(existing, strategy="merge") self._flush_or_commit(auto_commit=auto_commit) self._refresh( existing, attribute_names=attribute_names, with_for_update=with_for_update, auto_refresh=auto_refresh, ) self._expunge(existing, auto_expunge=auto_expunge) return existing, False def get_and_update( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: """Get instance identified by ``kwargs`` and update the model if the arguments are different. Args: *filters: Types for specific filtering operations. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: a tuple that includes the instance and whether it needed to be updated. When using match_fields and actual model values differ from ``kwargs``, the model value will be updated. Raises: NotFoundError: If no instance found identified by `item_id`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { field_name: kwargs.get(field_name) for field_name in match_fields if kwargs.get(field_name) is not None } else: match_filter = kwargs existing = self.get_one(*filters, **match_filter, load=load, execution_options=execution_options) updated = False for field_name, new_field_value in kwargs.items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: updated = True setattr(existing, field_name, new_field_value) existing = self._attach_to_session(existing, strategy="merge") self._flush_or_commit(auto_commit=auto_commit) self._refresh( existing, attribute_names=attribute_names, with_for_update=with_for_update, auto_refresh=auto_refresh, ) self._expunge(existing, auto_expunge=auto_expunge) return existing, updated def count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> int: """Get the count of records returned by a query. Args: *filters: Types for specific filtering operations. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: Count of records returned by query, ignoring pagination. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) statement = self._apply_filters(*filters, apply_pagination=False, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) results = self._execute( statement=self._get_count_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options ), uniquify=loader_options_have_wildcard, ) return cast("int", results.scalar_one()) def update( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Update instance with the attribute values present on `data`. Args: data: An instance that should have a value for `self.id_attribute` that exists in the collection. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The updated instance. Raises: NotFoundError: If no instance found with same identifier as `data`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): item_id = self.get_id_attribute_value( data, id_attribute=id_attribute, ) # this will raise for not found, and will put the item in the session self.get(item_id, id_attribute=id_attribute, load=load, execution_options=execution_options) # this will merge the inbound data to the instance we just put in the session instance = self._attach_to_session(data, strategy="merge") self._flush_or_commit(auto_commit=auto_commit) self._refresh( instance, attribute_names=attribute_names, with_for_update=with_for_update, auto_refresh=auto_refresh, ) self._expunge(instance, auto_expunge=auto_expunge) return instance def update_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: """Update one or more instances with the attribute values present on `data`. This function has an optimized bulk update based on the configured SQL dialect: - For backends supporting `RETURNING` with `executemany`, a single bulk update with returning clause is executed. - For other backends, it does a bulk update and then returns the updated data after a refresh. Args: data: A list of instances to update. Each should have a value for `self.id_attribute` that exists in the collection. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The updated instances. Raises: NotFoundError: If no instance found with same identifier as `data`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) data_to_update: list[dict[str, Any]] = [v.to_dict() if isinstance(v, self.model_type) else v for v in data] # type: ignore[misc] with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) loader_options = self._get_loader_options(load)[0] supports_returning = self._dialect.update_executemany_returning and self._dialect.name != "oracle" statement = self._get_update_many_statement( self.model_type, supports_returning, loader_options=loader_options, execution_options=execution_options, ) if supports_returning: instances = list( self.session.scalars( statement, cast("_CoreSingleExecuteParams", data_to_update), # this is not correct but the only way # currently to deal with an SQLAlchemy typing issue. See # https://github.com/sqlalchemy/sqlalchemy/discussions/9925 execution_options=execution_options, ), ) self._flush_or_commit(auto_commit=auto_commit) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return instances self.session.execute(statement, data_to_update, execution_options=execution_options) self._flush_or_commit(auto_commit=auto_commit) return data def _get_update_many_statement( self, model_type: type[ModelT], supports_returning: bool, loader_options: Union[list[_AbstractLoad], None], execution_options: Union[dict[str, Any], None], ) -> Union[Update, ReturningUpdate[tuple[ModelT]]]: # Base update statement is static statement = self._get_base_stmt( statement=update(table=model_type), loader_options=loader_options, execution_options=execution_options ) if supports_returning: return statement.returning(model_type) return statement def list_and_count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, count_with_window_function: Optional[bool] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: """List records with total count. Args: *filters: Types for specific filtering operations. statement: To facilitate customization of the underlying select query. auto_expunge: Remove object from session before returning. count_with_window_function: When false, list and count will use two queries instead of an analytical window function. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: Count of records returned by query, ignoring pagination. """ count_with_window_function = ( count_with_window_function if count_with_window_function is not None else self.count_with_window_function ) self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) if self._dialect.name in {"spanner", "spanner+spanner"} or not count_with_window_function: return self._list_and_count_basic( *filters, auto_expunge=auto_expunge, statement=statement, load=load, execution_options=execution_options, order_by=order_by, error_messages=error_messages, **kwargs, ) return self._list_and_count_window( *filters, auto_expunge=auto_expunge, statement=statement, load=load, execution_options=execution_options, error_messages=error_messages, order_by=order_by, **kwargs, ) def _expunge(self, instance: ModelT, auto_expunge: Optional[bool]) -> None: if auto_expunge is None: auto_expunge = self.auto_expunge return self.session.expunge(instance) if auto_expunge else None def _flush_or_commit(self, auto_commit: Optional[bool]) -> None: if auto_commit is None: auto_commit = self.auto_commit return self.session.commit() if auto_commit else self.session.flush() def _refresh( self, instance: ModelT, auto_refresh: Optional[bool], attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, ) -> None: if auto_refresh is None: auto_refresh = self.auto_refresh return ( self.session.refresh( instance=instance, attribute_names=attribute_names, with_for_update=with_for_update, ) if auto_refresh else None ) def _list_and_count_window( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: """List records with total count. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. order_by: List[OrderingPair] | OrderingPair | None = None, error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options **kwargs: Instance attribute value filters. Returns: Count of records returned by query using an analytical window function, ignoring pagination. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) if order_by is None: order_by = self.order_by or [] statement = self._apply_order_by(statement=statement, order_by=order_by) statement = self._apply_filters(*filters, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) result = self._execute(statement.add_columns(over(sql_func.count())), uniquify=loader_options_have_wildcard) count: int = 0 instances: list[ModelT] = [] for i, (instance, count_value) in enumerate(result): self._expunge(instance, auto_expunge=auto_expunge) instances.append(instance) if i == 0: count = count_value return instances, count def _list_and_count_basic( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: """List records with total count. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options **kwargs: Instance attribute value filters. Returns: Count of records returned by query using 2 queries, ignoring pagination. """ error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) if order_by is None: order_by = self.order_by or [] statement = self._apply_order_by(statement=statement, order_by=order_by) statement = self._apply_filters(*filters, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) count_result = self.session.execute( self._get_count_stmt( statement, loader_options=loader_options, execution_options=execution_options, ), ) count = count_result.scalar_one() result = self._execute(statement, uniquify=loader_options_have_wildcard) instances: list[ModelT] = [] for (instance,) in result: self._expunge(instance, auto_expunge=auto_expunge) instances.append(instance) return instances, count def _get_count_stmt( self, statement: Select[tuple[ModelT]], loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> Select[tuple[int]]: # Count statement transformations are static return ( statement.with_only_columns(sql_func.count(text("1")), maintain_column_froms=True) .limit(None) .offset(None) .order_by(None) ) def upsert( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_refresh: Optional[bool] = None, match_fields: Optional[Union[list[str], str]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Modify or create instance. Updates instance with the attribute values present on `data`, or creates a new instance if one doesn't exist. Args: data: Instance to update existing, or be created. Identifier used to determine if an existing instance exists is the value of an attribute on `data` named as value of `self.id_attribute`. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The updated or created instance. Raises: NotFoundError: If no instance found with same identifier as `data`. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { field_name: getattr(data, field_name, None) for field_name in match_fields if getattr(data, field_name, None) is not None } elif getattr(data, self.id_attribute, None) is not None: match_filter = {self.id_attribute: getattr(data, self.id_attribute, None)} else: match_filter = data.to_dict(exclude={self.id_attribute}) existing = self.get_one_or_none(load=load, execution_options=execution_options, **match_filter) if not existing: return self.add( data, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): for field_name, new_field_value in data.to_dict(exclude={self.id_attribute}).items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: setattr(existing, field_name, new_field_value) instance = self._attach_to_session(existing, strategy="merge") self._flush_or_commit(auto_commit=auto_commit) self._refresh( instance, attribute_names=attribute_names, with_for_update=with_for_update, auto_refresh=auto_refresh, ) self._expunge(instance, auto_expunge=auto_expunge) return instance def upsert_many( self, data: list[ModelT], *, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, no_merge: bool = False, match_fields: Optional[Union[list[str], str]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: """Modify or create multiple instances. Update instances with the attribute values present on `data`, or create a new instance if one doesn't exist. !!! tip In most cases, you will want to set `match_fields` to the combination of attributes, excluded the primary key, that define uniqueness for a row. Args: data: Instance to update existing, or be created. Identifier used to determine if an existing instance exists is the value of an attribute on ``data`` named as value of :attr:`id_attribute`. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. no_merge: Skip the usage of optimized Merge statements match_fields: a list of keys to use to match the existing model. When empty, automatically uses ``self.id_attribute`` (`id` by default) to match . error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: The updated or created instance. Raises: NotFoundError: If no instance found with same identifier as ``data``. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) instances: list[ModelT] = [] data_to_update: list[ModelT] = [] data_to_insert: list[ModelT] = [] match_fields = self._get_match_fields(match_fields=match_fields) if match_fields is None: match_fields = [self.id_attribute] match_filter: list[Union[StatementFilter, ColumnElement[bool]]] = [] if match_fields: for field_name in match_fields: field = get_instrumented_attr(self.model_type, field_name) matched_values = [ field_data for datum in data if (field_data := getattr(datum, field_name)) is not None ] match_filter.append(any_(matched_values) == field if self._prefer_any else field.in_(matched_values)) # type: ignore[arg-type] with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): existing_objs = self.list( *match_filter, load=load, execution_options=execution_options, auto_expunge=False, ) for field_name in match_fields: field = get_instrumented_attr(self.model_type, field_name) matched_values = list( {getattr(datum, field_name) for datum in existing_objs if datum}, # ensure the list is unique ) match_filter.append(any_(matched_values) == field if self._prefer_any else field.in_(matched_values)) # type: ignore[arg-type] existing_ids = self._get_object_ids(existing_objs=existing_objs) data = self._merge_on_match_fields(data, existing_objs, match_fields) for datum in data: if getattr(datum, self.id_attribute, None) in existing_ids: data_to_update.append(datum) else: data_to_insert.append(datum) if data_to_insert: instances.extend( self.add_many(data_to_insert, auto_commit=False, auto_expunge=False), ) if data_to_update: instances.extend( self.update_many( data_to_update, auto_commit=False, auto_expunge=False, load=load, execution_options=execution_options, ), ) self._flush_or_commit(auto_commit=auto_commit) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return instances def _get_object_ids(self, existing_objs: list[ModelT]) -> list[Any]: return [obj_id for datum in existing_objs if (obj_id := getattr(datum, self.id_attribute)) is not None] def _get_match_fields( self, match_fields: Optional[Union[list[str], str]] = None, id_attribute: Optional[str] = None, ) -> Optional[list[str]]: id_attribute = id_attribute or self.id_attribute match_fields = match_fields or self.match_fields if isinstance(match_fields, str): match_fields = [match_fields] return match_fields def _merge_on_match_fields( self, data: list[ModelT], existing_data: list[ModelT], match_fields: Optional[Union[list[str], str]] = None, ) -> list[ModelT]: match_fields = self._get_match_fields(match_fields=match_fields) if match_fields is None: match_fields = [self.id_attribute] for existing_datum in existing_data: for _row_id, datum in enumerate(data): match = all( getattr(datum, field_name) == getattr(existing_datum, field_name) for field_name in match_fields ) if match and getattr(existing_datum, self.id_attribute) is not None: setattr(datum, self.id_attribute, getattr(existing_datum, self.id_attribute)) return data def list( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Optional[Select[tuple[ModelT]]] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> list[ModelT]: """Get a list of instances, optionally filtered. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: The list of instances, after filtering applied. """ self.uniquify = self._get_uniquify(uniquify) error_messages = self._get_error_messages( error_messages=error_messages, default_messages=self.error_messages, ) with wrap_sqlalchemy_exception(error_messages=error_messages, dialect_name=self._dialect.name): execution_options = self._get_execution_options(execution_options) statement = self.statement if statement is None else statement loader_options, loader_options_have_wildcard = self._get_loader_options(load) statement = self._get_base_stmt( statement=statement, loader_options=loader_options, execution_options=execution_options, ) if order_by is None: order_by = self.order_by or [] statement = self._apply_order_by(statement=statement, order_by=order_by) statement = self._apply_filters(*filters, statement=statement) statement = self._filter_select_by_kwargs(statement, kwargs) result = self._execute(statement, uniquify=loader_options_have_wildcard) instances = list(result.scalars()) for instance in instances: self._expunge(instance, auto_expunge=auto_expunge) return cast("list[ModelT]", instances) @classmethod def check_health(cls, session: Union[Session, scoped_session[Session]]) -> bool: """Perform a health check on the database. Args: session: through which we run a check statement Returns: ``True`` if healthy. """ with wrap_sqlalchemy_exception(): return ( # type: ignore[no-any-return] session.execute(cls._get_health_check_statement(session)) ).scalar_one() == 1 @staticmethod def _get_health_check_statement(session: Union[Session, scoped_session[Session]]) -> TextClause: if session.bind and session.bind.dialect.name == "oracle": return text("SELECT 1 FROM DUAL") return text("SELECT 1") def _attach_to_session(self, model: ModelT, strategy: Literal["add", "merge"] = "add", load: bool = True) -> ModelT: """Attach detached instance to the session. Args: model: The instance to be attached to the session. strategy: How the instance should be attached. - "add": New instance added to session - "merge": Instance merged with existing, or new one added. load: Boolean, when False, merge switches into a "high performance" mode which causes it to forego emitting history events as well as all database access. This flag is used for cases such as transferring graphs of objects into a session from a second level cache, or to transfer just-loaded objects into the session owned by a worker thread or process without re-querying the database. Returns: Instance attached to the session - if `"merge"` strategy, may not be same instance that was provided. """ if strategy == "add": self.session.add(model) return model if strategy == "merge": return self.session.merge(model, load=load) msg = "Unexpected value for `strategy`, must be `'add'` or `'merge'`" # type: ignore[unreachable] raise ValueError(msg) def _execute( self, statement: Select[Any], uniquify: bool = False, ) -> Result[Any]: result = self.session.execute(statement) if uniquify or self.uniquify: result = result.unique() return result class SQLAlchemySyncSlugRepository( SQLAlchemySyncRepository[ModelT], SQLAlchemySyncSlugRepositoryProtocol[ModelT], ): """Extends the repository to include slug model features..""" def get_by_slug( self, slug: str, error_messages: Optional[Union[ErrorMessages, EmptyType]] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Optional[ModelT]: """Select record by slug value.""" return self.get_one_or_none( slug=slug, load=load, execution_options=execution_options, error_messages=error_messages, uniquify=uniquify, ) def get_available_slug( self, value_to_slugify: str, **kwargs: Any, ) -> str: """Get a unique slug for the supplied value. If the value is found to exist, a random 4 digit character is appended to the end. Override this method to change the default behavior Args: value_to_slugify (str): A string that should be converted to a unique slug. **kwargs: stuff Returns: str: a unique slug for the supplied value. This is safe for URLs and other unique identifiers. """ slug = slugify(value_to_slugify) if self._is_slug_unique(slug): return slug random_string = "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) # noqa: S311 return f"{slug}-{random_string}" def _is_slug_unique( self, slug: str, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, **kwargs: Any, ) -> bool: return self.exists(slug=slug, load=load, execution_options=execution_options, **kwargs) is False class SQLAlchemySyncQueryRepository: """SQLAlchemy Query Repository. This is a loosely typed helper to query for when you need to select data in ways that don't align to the normal repository pattern. """ error_messages: Optional[ErrorMessages] = None def __init__( self, *, session: Union[Session, scoped_session[Session]], error_messages: Optional[ErrorMessages] = None, **kwargs: Any, ) -> None: """Repository pattern for SQLAlchemy models. Args: session: Session managing the unit-of-work for the operation. error_messages: A set of error messages to use for operations. **kwargs: Additional arguments. """ super().__init__(**kwargs) self.session = session self.error_messages = error_messages self._dialect = self.session.bind.dialect if self.session.bind is not None else self.session.get_bind().dialect def get_one( self, statement: Select[tuple[Any]], **kwargs: Any, ) -> Row[Any]: """Get instance identified by ``kwargs``. Args: statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: The retrieved instance. Raises: NotFoundError: If no instance found identified by `item_id`. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = self._filter_statement_by_kwargs(statement, **kwargs) instance = (self.execute(statement)).scalar_one_or_none() return self.check_not_found(instance) def get_one_or_none( self, statement: Select[Any], **kwargs: Any, ) -> Optional[Row[Any]]: """Get instance identified by ``kwargs`` or None if not found. Args: statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: The retrieved instance or None """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = self._filter_statement_by_kwargs(statement, **kwargs) instance = (self.execute(statement)).scalar_one_or_none() return instance or None def count(self, statement: Select[Any], **kwargs: Any) -> int: """Get the count of records returned by a query. Args: statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: Count of records returned by query, ignoring pagination. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = statement.with_only_columns(sql_func.count(text("1")), maintain_column_froms=True).order_by( None, ) statement = self._filter_statement_by_kwargs(statement, **kwargs) results = self.execute(statement) return results.scalar_one() # type: ignore # noqa: PGH003 def list_and_count( self, statement: Select[Any], count_with_window_function: Optional[bool] = None, **kwargs: Any, ) -> tuple[list[Row[Any]], int]: """List records with total count. Args: statement: To facilitate customization of the underlying select query. count_with_window_function: Force list and count to use two queries instead of an analytical window function. **kwargs: Instance attribute value filters. Returns: Count of records returned by query, ignoring pagination. """ if self._dialect.name in {"spanner", "spanner+spanner"} or count_with_window_function: return self._list_and_count_basic(statement=statement, **kwargs) return self._list_and_count_window(statement=statement, **kwargs) def _list_and_count_window( self, statement: Select[Any], **kwargs: Any, ) -> tuple[list[Row[Any]], int]: """List records with total count. Args: *filters: Types for specific filtering operations. statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: Count of records returned by query using an analytical window function, ignoring pagination. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = statement.add_columns(over(sql_func.count(text("1")))) statement = self._filter_statement_by_kwargs(statement, **kwargs) result = self.execute(statement) count: int = 0 instances: list[Row[Any]] = [] for i, (instance, count_value) in enumerate(result): instances.append(instance) if i == 0: count = count_value return instances, count def _get_count_stmt(self, statement: Select[Any]) -> Select[Any]: return statement.with_only_columns(sql_func.count(text("1")), maintain_column_froms=True).order_by(None) # pyright: ignore[reportUnknownVariable] def _list_and_count_basic( self, statement: Select[Any], **kwargs: Any, ) -> tuple[list[Row[Any]], int]: """List records with total count. Args: statement: To facilitate customization of the underlying select query. . **kwargs: Instance attribute value filters. Returns: Count of records returned by query using 2 queries, ignoring pagination. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = self._filter_statement_by_kwargs(statement, **kwargs) count_result = self.session.execute(self._get_count_stmt(statement)) count = count_result.scalar_one() result = self.execute(statement) instances: list[Row[Any]] = [] for (instance,) in result: instances.append(instance) return instances, count def list(self, statement: Select[Any], **kwargs: Any) -> list[Row[Any]]: """Get a list of instances, optionally filtered. Args: statement: To facilitate customization of the underlying select query. **kwargs: Instance attribute value filters. Returns: The list of instances, after filtering applied. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): statement = self._filter_statement_by_kwargs(statement, **kwargs) result = self.execute(statement) return list(result.all()) def _filter_statement_by_kwargs( self, statement: Select[Any], /, **kwargs: Any, ) -> Select[Any]: """Filter the collection by kwargs. Args: statement: statement to filter **kwargs: key/value pairs such that objects remaining in the statement after filtering have the property that their attribute named `key` has value equal to `value`. """ with wrap_sqlalchemy_exception(error_messages=self.error_messages): return statement.filter_by(**kwargs) # the following is all sqlalchemy implementation detail, and shouldn't be directly accessed @staticmethod def check_not_found(item_or_none: Optional[T]) -> T: """Raise :class:`NotFoundError` if ``item_or_none`` is ``None``. Args: item_or_none: Item to be tested for existence. Returns: The item, if it exists. """ if item_or_none is None: msg = "No item found when one was expected" raise NotFoundError(msg) return item_or_none def execute( self, statement: Union[ ReturningDelete[tuple[Any]], ReturningUpdate[tuple[Any]], Select[tuple[Any]], Update, Delete, Select[Any] ], ) -> Result[Any]: return self.session.execute(statement) python-advanced-alchemy-1.0.1/advanced_alchemy/repository/_util.py000066400000000000000000000326541476663714600254270ustar00rootroot00000000000000from collections.abc import Iterable, Sequence from typing import Any, Literal, Optional, Protocol, Union, cast, overload from sqlalchemy import ( Delete, Dialect, Select, Update, ) from sqlalchemy.orm import ( InstrumentedAttribute, MapperProperty, RelationshipProperty, joinedload, lazyload, selectinload, ) from sqlalchemy.orm.strategy_options import ( _AbstractLoad, # pyright: ignore[reportPrivateUsage] # pyright: ignore[reportPrivateUsage] ) from sqlalchemy.sql import ColumnElement, ColumnExpressionArgument from sqlalchemy.sql.base import ExecutableOption from sqlalchemy.sql.dml import ReturningDelete, ReturningUpdate from typing_extensions import TypeAlias from advanced_alchemy.base import ModelProtocol from advanced_alchemy.exceptions import ErrorMessages from advanced_alchemy.exceptions import wrap_sqlalchemy_exception as _wrap_sqlalchemy_exception from advanced_alchemy.filters import ( InAnyFilter, PaginationFilter, StatementFilter, StatementTypeT, ) from advanced_alchemy.repository.typing import ModelT, OrderingPair WhereClauseT = ColumnExpressionArgument[bool] SingleLoad: TypeAlias = Union[ _AbstractLoad, Literal["*"], InstrumentedAttribute[Any], RelationshipProperty[Any], MapperProperty[Any], ] LoadCollection: TypeAlias = Sequence[Union[SingleLoad, Sequence[SingleLoad]]] ExecutableOptions: TypeAlias = Sequence[ExecutableOption] LoadSpec: TypeAlias = Union[LoadCollection, SingleLoad, ExecutableOption, ExecutableOptions] OrderByT: TypeAlias = Union[ str, InstrumentedAttribute[Any], RelationshipProperty[Any], ] # NOTE: For backward compatibility with Litestar - this is imported from here within the litestar codebase. wrap_sqlalchemy_exception = _wrap_sqlalchemy_exception DEFAULT_ERROR_MESSAGE_TEMPLATES: ErrorMessages = { "integrity": "There was a data validation error during processing", "foreign_key": "A foreign key is missing or invalid", "multiple_rows": "Multiple matching rows found", "duplicate_key": "A record matching the supplied data already exists.", "other": "There was an error during data processing", "check_constraint": "The data failed a check constraint during processing", "not_found": "The requested resource was not found", } """Default error messages for repository errors.""" def get_instrumented_attr( model: type[ModelProtocol], key: Union[str, InstrumentedAttribute[Any]], ) -> InstrumentedAttribute[Any]: """Get an instrumented attribute from a model. Args: model: SQLAlchemy model class. key: Either a string attribute name or an :class:`sqlalchemy.orm.InstrumentedAttribute`. Returns: :class:`sqlalchemy.orm.InstrumentedAttribute`: The instrumented attribute from the model. """ if isinstance(key, str): return cast("InstrumentedAttribute[Any]", getattr(model, key)) return key def model_from_dict(model: type[ModelT], **kwargs: Any) -> ModelT: """Create an ORM model instance from a dictionary of attributes. Args: model: The SQLAlchemy model class to instantiate. **kwargs: Keyword arguments containing model attribute values. Returns: ModelT: A new instance of the model populated with the provided values. """ data = { column_name: kwargs[column_name] for column_name in model.__mapper__.columns.keys() # noqa: SIM118 # pyright: ignore[reportUnknownMemberType] if column_name in kwargs } return model(**data) def get_abstract_loader_options( loader_options: Union[LoadSpec, None], default_loader_options: Union[list[_AbstractLoad], None] = None, default_options_have_wildcards: bool = False, merge_with_default: bool = True, inherit_lazy_relationships: bool = True, cycle_count: int = 0, ) -> tuple[list[_AbstractLoad], bool]: """Generate SQLAlchemy loader options for eager loading relationships. Args: loader_options :class:`~advanced_alchemy.repository.typing.LoadSpec`|:class:`None` Specification for how to load relationships. Can be: - None: Use defaults - :class:`sqlalchemy.orm.strategy_options._AbstractLoad`: Direct SQLAlchemy loader option - :class:`sqlalchemy.orm.InstrumentedAttribute`: Model relationship attribute - :class:`sqlalchemy.orm.RelationshipProperty`: SQLAlchemy relationship - str: "*" for wildcard loading - :class:`typing.Sequence` of the above default_loader_options: :class:`typing.Sequence` of :class:`sqlalchemy.orm.strategy_options._AbstractLoad` loader options to start with. default_options_have_wildcards: Whether the default options contain wildcards. merge_with_default: Whether to merge the default options with the loader options. inherit_lazy_relationships: Whether to inherit the ``lazy`` configuration from the model's relationships. cycle_count: Number of times this function has been called recursively. Returns: tuple[:class:`list`[:class:`sqlalchemy.orm.strategy_options._AbstractLoad`], bool]: A tuple containing: - :class:`list` of :class:`sqlalchemy.orm.strategy_options._AbstractLoad` SQLAlchemy loader option objects - Boolean indicating if any wildcard loaders are present """ loads: list[_AbstractLoad] = [] if cycle_count == 0 and not inherit_lazy_relationships: loads.append(lazyload("*")) if cycle_count == 0 and merge_with_default and default_loader_options is not None: loads.extend(default_loader_options) options_have_wildcards = default_options_have_wildcards if loader_options is None: return (loads, options_have_wildcards) if isinstance(loader_options, _AbstractLoad): return ([loader_options], options_have_wildcards) if isinstance(loader_options, InstrumentedAttribute): loader_options = [loader_options.property] if isinstance(loader_options, RelationshipProperty): class_ = loader_options.class_attribute return ( [selectinload(class_)] if loader_options.uselist else [joinedload(class_, innerjoin=loader_options.innerjoin)], options_have_wildcards if loader_options.uselist else True, ) if isinstance(loader_options, str) and loader_options == "*": options_have_wildcards = True return ([joinedload("*")], options_have_wildcards) if isinstance(loader_options, (list, tuple)): for attribute in loader_options: # pyright: ignore[reportUnknownVariableType] if isinstance(attribute, (list, tuple)): load_chain, options_have_wildcards = get_abstract_loader_options( loader_options=attribute, # pyright: ignore[reportUnknownArgumentType] default_options_have_wildcards=options_have_wildcards, inherit_lazy_relationships=inherit_lazy_relationships, merge_with_default=merge_with_default, cycle_count=cycle_count + 1, ) loader = load_chain[-1] for sub_load in load_chain[-2::-1]: loader = sub_load.options(loader) loads.append(loader) else: load_chain, options_have_wildcards = get_abstract_loader_options( loader_options=attribute, # pyright: ignore[reportUnknownArgumentType] default_options_have_wildcards=options_have_wildcards, inherit_lazy_relationships=inherit_lazy_relationships, merge_with_default=merge_with_default, cycle_count=cycle_count + 1, ) loads.extend(load_chain) return (loads, options_have_wildcards) class FilterableRepositoryProtocol(Protocol[ModelT]): """Protocol defining the interface for filterable repositories. This protocol defines the required attributes and methods that any filterable repository implementation must provide. """ model_type: type[ModelT] """The SQLAlchemy model class this repository manages.""" class FilterableRepository(FilterableRepositoryProtocol[ModelT]): """Default implementation of a filterable repository. Provides core filtering, ordering and pagination functionality for SQLAlchemy models. """ model_type: type[ModelT] """The SQLAlchemy model class this repository manages.""" prefer_any_dialects: Optional[tuple[str]] = ("postgresql",) """List of dialects that prefer to use ``field.id = ANY(:1)`` instead of ``field.id IN (...)``.""" order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None """List or single :class:`~advanced_alchemy.repository.typing.OrderingPair` to use for sorting.""" _prefer_any: bool = False """Whether to prefer ANY() over IN() in queries.""" _dialect: Dialect """The SQLAlchemy :class:`sqlalchemy.dialects.Dialect` being used.""" @overload def _apply_filters( self, *filters: Union[StatementFilter, ColumnElement[bool]], apply_pagination: bool = True, statement: Select[tuple[ModelT]], ) -> Select[tuple[ModelT]]: ... @overload def _apply_filters( self, *filters: Union[StatementFilter, ColumnElement[bool]], apply_pagination: bool = True, statement: Delete, ) -> Delete: ... @overload def _apply_filters( self, *filters: Union[StatementFilter, ColumnElement[bool]], apply_pagination: bool = True, statement: Union[ReturningDelete[tuple[ModelT]], ReturningUpdate[tuple[ModelT]]], ) -> Union[ReturningDelete[tuple[ModelT]], ReturningUpdate[tuple[ModelT]]]: ... @overload def _apply_filters( self, *filters: Union[StatementFilter, ColumnElement[bool]], apply_pagination: bool = True, statement: Update, ) -> Update: ... def _apply_filters( self, *filters: Union[StatementFilter, ColumnElement[bool]], apply_pagination: bool = True, statement: StatementTypeT, ) -> StatementTypeT: """Apply filters to a SQL statement. Args: *filters: Filter conditions to apply. apply_pagination: Whether to apply pagination filters. statement: The base SQL statement to filter. Returns: StatementTypeT: The filtered SQL statement. """ for filter_ in filters: if isinstance(filter_, (PaginationFilter,)): if apply_pagination: statement = filter_.append_to_statement(statement, self.model_type) elif isinstance(filter_, (InAnyFilter,)): statement = filter_.append_to_statement(statement, self.model_type) elif isinstance(filter_, ColumnElement): statement = cast("StatementTypeT", statement.where(filter_)) else: statement = filter_.append_to_statement(statement, self.model_type) return statement def _filter_select_by_kwargs( self, statement: StatementTypeT, kwargs: Union[dict[Any, Any], Iterable[tuple[Any, Any]]], ) -> StatementTypeT: """Filter a statement using keyword arguments. Args: statement: :class:`sqlalchemy.sql.Select` The SQL statement to filter. kwargs: Dictionary or iterable of tuples containing filter criteria. Keys should be model attribute names, values are what to filter for. Returns: StatementTypeT: The filtered SQL statement. """ for key, val in dict(kwargs).items(): field = get_instrumented_attr(self.model_type, key) statement = cast("StatementTypeT", statement.where(field == val)) return statement def _apply_order_by( self, statement: StatementTypeT, order_by: Union[ list[tuple[Union[str, InstrumentedAttribute[Any]], bool]], tuple[Union[str, InstrumentedAttribute[Any]], bool], ], ) -> StatementTypeT: """Apply ordering to a SQL statement. Args: statement: The SQL statement to order. order_by: Ordering specification. Either a single tuple or list of tuples where: - First element is the field name or :class:`sqlalchemy.orm.InstrumentedAttribute` to order by - Second element is a boolean indicating descending (True) or ascending (False) Returns: StatementTypeT: The ordered SQL statement. """ if not isinstance(order_by, list): order_by = [order_by] for order_field, is_desc in order_by: field = get_instrumented_attr(self.model_type, order_field) statement = self._order_by_attribute(statement, field, is_desc) return statement def _order_by_attribute( self, statement: StatementTypeT, field: InstrumentedAttribute[Any], is_desc: bool, ) -> StatementTypeT: """Apply ordering by a single attribute to a SQL statement. Args: statement: The SQL statement to order. field: The model attribute to order by. is_desc: Whether to order in descending (True) or ascending (False) order. Returns: StatementTypeT: The ordered SQL statement. """ if not isinstance(statement, Select): return statement return cast("StatementTypeT", statement.order_by(field.desc() if is_desc else field.asc())) python-advanced-alchemy-1.0.1/advanced_alchemy/repository/memory/000077500000000000000000000000001476663714600252375ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/repository/memory/__init__.py000066400000000000000000000006241476663714600273520ustar00rootroot00000000000000from advanced_alchemy.repository.memory._async import SQLAlchemyAsyncMockRepository, SQLAlchemyAsyncMockSlugRepository from advanced_alchemy.repository.memory._sync import SQLAlchemySyncMockRepository, SQLAlchemySyncMockSlugRepository __all__ = [ "SQLAlchemyAsyncMockRepository", "SQLAlchemyAsyncMockSlugRepository", "SQLAlchemySyncMockRepository", "SQLAlchemySyncMockSlugRepository", ] python-advanced-alchemy-1.0.1/advanced_alchemy/repository/memory/_async.py000066400000000000000000000727731476663714600271050ustar00rootroot00000000000000import datetime import random import re import string from collections import abc from collections.abc import Iterable from typing import Any, Optional, Union, cast, overload from unittest.mock import create_autospec from sqlalchemy import ( ColumnElement, Dialect, Select, StatementLambdaElement, Update, ) from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio.scoping import async_scoped_session from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.orm.strategy_options import _AbstractLoad # pyright: ignore[reportPrivateUsage] from sqlalchemy.sql.dml import ReturningUpdate from typing_extensions import Self from advanced_alchemy.exceptions import ErrorMessages, IntegrityError, NotFoundError, RepositoryError from advanced_alchemy.filters import ( BeforeAfter, CollectionFilter, LimitOffset, NotInCollectionFilter, NotInSearchFilter, OnBeforeAfter, OrderBy, SearchFilter, StatementFilter, ) from advanced_alchemy.repository._async import SQLAlchemyAsyncRepositoryProtocol, SQLAlchemyAsyncSlugRepositoryProtocol from advanced_alchemy.repository._util import DEFAULT_ERROR_MESSAGE_TEMPLATES, LoadSpec from advanced_alchemy.repository.memory.base import ( AnyObject, InMemoryStore, SQLAlchemyInMemoryStore, SQLAlchemyMultiStore, ) from advanced_alchemy.repository.typing import MISSING, ModelT, OrderingPair from advanced_alchemy.utils.dataclass import Empty, EmptyType from advanced_alchemy.utils.text import slugify class SQLAlchemyAsyncMockRepository(SQLAlchemyAsyncRepositoryProtocol[ModelT]): """In memory repository.""" __database__: SQLAlchemyMultiStore[ModelT] = SQLAlchemyMultiStore(SQLAlchemyInMemoryStore) __database_registry__: dict[type[Self], SQLAlchemyMultiStore[ModelT]] = {} loader_options: Optional[LoadSpec] = None """Default loader options for the repository.""" execution_options: Optional[dict[str, Any]] = None """Default execution options for the repository.""" model_type: type[ModelT] id_attribute: Any = "id" match_fields: Optional[Union[list[str], str]] = None uniquify: bool = False _exclude_kwargs: set[str] = { "statement", "session", "auto_expunge", "auto_refresh", "auto_commit", "attribute_names", "with_for_update", "count_with_window_function", "loader_options", "execution_options", "order_by", "load", "error_messages", "wrap_exceptions", "uniquify", } def __init__( self, *, statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, session: Union[AsyncSession, async_scoped_session[AsyncSession]], auto_expunge: bool = False, auto_refresh: bool = True, auto_commit: bool = False, order_by: Union[list[OrderingPair], OrderingPair, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, wrap_exceptions: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> None: self.session = session self.statement = create_autospec("Select[Tuple[ModelT]]", instance=True) self.auto_expunge = auto_expunge self.auto_refresh = auto_refresh self.auto_commit = auto_commit self.error_messages = self._get_error_messages(error_messages=error_messages) self.wrap_exceptions = wrap_exceptions self.order_by = order_by self._dialect: Dialect = create_autospec(Dialect, instance=True) self._dialect.name = "mock" self.__filtered_store__: InMemoryStore[ModelT] = self.__database__.store_type() self._default_options: Any = [] self._default_execution_options: Any = {} self._loader_options: Any = [] self._loader_options_have_wildcards = False self.uniquify = bool(uniquify) def __init_subclass__(cls) -> None: cls.__database_registry__[cls] = cls.__database__ # pyright: ignore[reportGeneralTypeIssues,reportUnknownMemberType] @staticmethod def _get_error_messages( error_messages: Union[ErrorMessages, None, EmptyType] = Empty, default_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> Optional[ErrorMessages]: if error_messages == Empty: error_messages = None default_messages = cast( "Optional[ErrorMessages]", default_messages if default_messages != Empty else DEFAULT_ERROR_MESSAGE_TEMPLATES, ) if error_messages is not None and default_messages is not None: default_messages.update(cast("ErrorMessages", error_messages)) return default_messages @classmethod def __database_add__(cls, identity: Any, data: ModelT) -> ModelT: return cast("ModelT", cls.__database__.add(identity, data)) # pyright: ignore[reportUnnecessaryCast,reportGeneralTypeIssues] @classmethod def __database_clear__(cls) -> None: for database in cls.__database_registry__.values(): # pyright: ignore[reportGeneralTypeIssues,reportUnknownMemberType] database.remove_all() @overload def __collection__(self) -> InMemoryStore[ModelT]: ... @overload def __collection__(self, identity: type[AnyObject]) -> InMemoryStore[AnyObject]: ... def __collection__( self, identity: Optional[type[AnyObject]] = None, ) -> Union[InMemoryStore[AnyObject], InMemoryStore[ModelT]]: if identity: return self.__database__.store(identity) return self.__filtered_store__ or self.__database__.store(self.model_type) @staticmethod def check_not_found(item_or_none: Union[ModelT, None]) -> ModelT: if item_or_none is None: msg = "No item found when one was expected" raise NotFoundError(msg) return item_or_none @classmethod def get_id_attribute_value( cls, item: Union[ModelT, type[ModelT]], id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, ) -> Any: """Get value of attribute named as :attr:`id_attribute` on ``item``. Args: item: Anything that should have an attribute named as :attr:`id_attribute` value. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `None`, but can reference any surrogate or candidate key for the table. Returns: The value of attribute on ``item`` named as :attr:`id_attribute`. """ if isinstance(id_attribute, InstrumentedAttribute): id_attribute = id_attribute.key return getattr(item, id_attribute if id_attribute is not None else cls.id_attribute) @classmethod def set_id_attribute_value( cls, item_id: Any, item: ModelT, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, ) -> ModelT: """Return the ``item`` after the ID is set to the appropriate attribute. Args: item_id: Value of ID to be set on instance item: Anything that should have an attribute named as :attr:`id_attribute` value. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `None`, but can reference any surrogate or candidate key for the table. Returns: Item with ``item_id`` set to :attr:`id_attribute` """ if isinstance(id_attribute, InstrumentedAttribute): id_attribute = id_attribute.key setattr(item, id_attribute if id_attribute is not None else cls.id_attribute, item_id) return item def _exclude_unused_kwargs(self, kwargs: dict[str, Any]) -> dict[str, Any]: return {key: value for key, value in kwargs.items() if key not in self._exclude_kwargs} def _apply_limit_offset_pagination(self, result: list[ModelT], limit: int, offset: int) -> list[ModelT]: return result[offset:limit] def _filter_in_collection( self, result: list[ModelT], field_name: str, values: abc.Collection[Any], ) -> list[ModelT]: return [item for item in result if getattr(item, field_name) in values] def _filter_not_in_collection( self, result: list[ModelT], field_name: str, values: abc.Collection[Any], ) -> list[ModelT]: if not values: return result return [item for item in result if getattr(item, field_name) not in values] def _filter_on_datetime_field( self, result: list[ModelT], field_name: str, before: Optional[datetime.datetime] = None, after: Optional[datetime.datetime] = None, on_or_before: Optional[datetime.datetime] = None, on_or_after: Optional[datetime.datetime] = None, ) -> list[ModelT]: result_: list[ModelT] = [] for item in result: attr: datetime.datetime = getattr(item, field_name) if before is not None and attr < before: result_.append(item) if after is not None and attr > after: result_.append(item) if on_or_before is not None and attr <= on_or_before: result_.append(item) if on_or_after is not None and attr >= on_or_after: result_.append(item) return result_ def _filter_by_like( self, result: list[ModelT], field_name: Union[str, set[str]], value: str, ignore_case: bool, ) -> list[ModelT]: pattern = re.compile(rf".*{value}.*", re.IGNORECASE) if ignore_case else re.compile(rf".*{value}.*") fields = {field_name} if isinstance(field_name, str) else field_name items: list[ModelT] = [] for field in fields: items.extend( [ item for item in result if isinstance(getattr(item, field), str) and pattern.match(getattr(item, field)) ], ) return list(set(items)) def _filter_by_not_like( self, result: list[ModelT], field_name: Union[str, set[str]], value: str, ignore_case: bool, ) -> list[ModelT]: pattern = re.compile(rf".*{value}.*", re.IGNORECASE) if ignore_case else re.compile(rf".*{value}.*") fields = {field_name} if isinstance(field_name, str) else field_name items: list[ModelT] = [] for field in fields: items.extend( [ item for item in result if isinstance(getattr(item, field), str) and pattern.match(getattr(item, field)) ], ) return list(set(result).difference(set(items))) def _filter_result_by_kwargs( self, result: Iterable[ModelT], /, kwargs: Union[dict[Any, Any], Iterable[tuple[Any, Any]]], ) -> list[ModelT]: kwargs_: dict[Any, Any] = kwargs if isinstance(kwargs, dict) else dict(*kwargs) kwargs_ = self._exclude_unused_kwargs(kwargs_) try: return [item for item in result if all(getattr(item, field) == value for field, value in kwargs_.items())] except AttributeError as error: raise RepositoryError from error def _order_by(self, result: list[ModelT], field_name: str, sort_desc: bool = False) -> list[ModelT]: return sorted(result, key=lambda item: getattr(item, field_name), reverse=sort_desc) def _apply_filters( self, result: list[ModelT], *filters: Union[StatementFilter, ColumnElement[bool]], apply_pagination: bool = True, ) -> list[ModelT]: for filter_ in filters: if isinstance(filter_, LimitOffset): if apply_pagination: result = self._apply_limit_offset_pagination(result, filter_.limit, filter_.offset) elif isinstance(filter_, BeforeAfter): result = self._filter_on_datetime_field( result, field_name=filter_.field_name, before=filter_.before, after=filter_.after, ) elif isinstance(filter_, OnBeforeAfter): result = self._filter_on_datetime_field( result, field_name=filter_.field_name, on_or_before=filter_.on_or_before, on_or_after=filter_.on_or_after, ) elif isinstance(filter_, NotInCollectionFilter): if filter_.values is not None: # pyright: ignore # noqa: PGH003 result = self._filter_not_in_collection(result, filter_.field_name, filter_.values) # pyright: ignore # noqa: PGH003 elif isinstance(filter_, CollectionFilter): if filter_.values is not None: # pyright: ignore # noqa: PGH003 result = self._filter_in_collection(result, filter_.field_name, filter_.values) # pyright: ignore # noqa: PGH003 elif isinstance(filter_, OrderBy): result = self._order_by( result, filter_.field_name, sort_desc=filter_.sort_order == "desc", ) elif isinstance(filter_, NotInSearchFilter): result = self._filter_by_not_like( result, filter_.field_name, value=filter_.value, ignore_case=bool(filter_.ignore_case), ) elif isinstance(filter_, SearchFilter): result = self._filter_by_like( result, filter_.field_name, value=filter_.value, ignore_case=bool(filter_.ignore_case), ) elif not isinstance(filter_, ColumnElement): msg = f"Unexpected filter: {filter_}" raise RepositoryError(msg) return result def _get_match_fields( self, match_fields: Union[list[str], str, None], id_attribute: Optional[str] = None, ) -> Optional[list[str]]: id_attribute = id_attribute or self.id_attribute match_fields = match_fields or self.match_fields if isinstance(match_fields, str): match_fields = [match_fields] return match_fields async def _list_and_count_basic( self, *filters: Union[StatementFilter, ColumnElement[bool]], **kwargs: Any, ) -> tuple[list[ModelT], int]: result = await self.list(*filters, **kwargs) return result, len(result) async def _list_and_count_window( self, *filters: Union[StatementFilter, ColumnElement[bool]], **kwargs: Any, ) -> tuple[list[ModelT], int]: return await self._list_and_count_basic(*filters, **kwargs) def _find_or_raise_not_found(self, id_: Any) -> ModelT: return self.check_not_found(self.__collection__().get_or_none(id_)) def _find_one_or_raise_error(self, result: list[ModelT]) -> ModelT: if not result: msg = "No item found when one was expected" raise IntegrityError(msg) if len(result) > 1: msg = "Multiple objects when one was expected" raise IntegrityError(msg) return result[0] def _get_update_many_statement( self, model_type: type[ModelT], supports_returning: bool, loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> Union[Update, ReturningUpdate[tuple[ModelT]]]: return self.statement # type: ignore[no-any-return] # pyright: ignore[reportReturnType] @classmethod async def check_health(cls, session: Union[AsyncSession, async_scoped_session[AsyncSession]]) -> bool: return True async def get( self, item_id: Any, *, auto_expunge: Optional[bool] = None, statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: return self._find_or_raise_not_found(item_id) async def get_one( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> ModelT: return self.check_not_found(await self.get_one_or_none(**kwargs)) async def get_one_or_none( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Union[ModelT, None]: result = self._filter_result_by_kwargs(self.__collection__().list(), kwargs) if len(result) > 1: msg = "Multiple objects when one was expected" raise IntegrityError(msg) return result[0] if result else None async def get_or_upsert( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Union[list[str], str, None] = None, upsert: bool = True, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: kwargs_ = self._exclude_unused_kwargs(kwargs) if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { # sourcery skip: remove-none-from-default-get field_name: kwargs_.get(field_name, None) for field_name in match_fields if kwargs_.get(field_name, None) is not None } else: match_filter = kwargs_ existing = await self.get_one_or_none(**match_filter) if not existing: return (await self.add(self.model_type(**kwargs_)), True) if upsert: for field_name, new_field_value in kwargs_.items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: setattr(existing, field_name, new_field_value) existing = await self.update(existing) return existing, False async def get_and_update( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Union[list[str], str, None] = None, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: kwargs_ = self._exclude_unused_kwargs(kwargs) if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { # sourcery skip: remove-none-from-default-get field_name: kwargs_.get(field_name, None) for field_name in match_fields if kwargs_.get(field_name, None) is not None } else: match_filter = kwargs_ existing = await self.get_one(**match_filter) updated = False for field_name, new_field_value in kwargs_.items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: updated = True setattr(existing, field_name, new_field_value) existing = await self.update(existing) return existing, updated async def exists( self, *filters: "Union[StatementFilter, ColumnElement[bool]]", uniquify: Optional[bool] = None, **kwargs: Any, ) -> bool: existing = await self.count(*filters, **kwargs) return existing > 0 async def count( self, *filters: "Union[StatementFilter, ColumnElement[bool]]", uniquify: Optional[bool] = None, **kwargs: Any, ) -> int: result = self._apply_filters(self.__collection__().list(), *filters) return len(self._filter_result_by_kwargs(result, kwargs)) async def add( self, data: ModelT, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> ModelT: try: self.__database__.add(self.model_type, data) except KeyError as exc: msg = "Item already exist in collection" raise IntegrityError(msg) from exc return data async def add_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> list[ModelT]: for obj in data: await self.add(obj) # pyright: ignore[reportCallIssue] return data async def update( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: self._find_or_raise_not_found(self.__collection__().key(data)) return self.__collection__().update(data) async def update_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: return [self.__collection__().update(obj) for obj in data if obj in self.__collection__()] async def delete( self, item_id: Any, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: try: return self._find_or_raise_not_found(item_id) finally: self.__collection__().remove(item_id) async def delete_many( self, item_ids: list[Any], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, chunk_size: Optional[int] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: deleted: list[ModelT] = [] for id_ in item_ids: if obj := self.__collection__().get_or_none(id_): deleted.append(obj) self.__collection__().remove(id_) return deleted async def delete_where( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, sanity_check: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> list[ModelT]: result = self.__collection__().list() result = self._apply_filters(result, *filters) models = self._filter_result_by_kwargs(result, kwargs) item_ids = [getattr(model, self.id_attribute) for model in models] return await self.delete_many(item_ids=item_ids) async def upsert( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_refresh: Optional[bool] = None, match_fields: Union[list[str], str, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: # sourcery skip: assign-if-exp, reintroduce-else if data in self.__collection__(): return await self.update(data) return await self.add(data) async def upsert_many( self, data: list[ModelT], *, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, no_merge: bool = False, match_fields: Union[list[str], str, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: return [await self.upsert(item) for item in data] async def list_and_count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, auto_expunge: Optional[bool] = None, count_with_window_function: Optional[bool] = None, order_by: Union[list[OrderingPair], OrderingPair, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: return await self._list_and_count_basic(*filters, **kwargs) async def list( self, *filters: Union[StatementFilter, ColumnElement[bool]], uniquify: Optional[bool] = None, **kwargs: Any, ) -> list[ModelT]: result = self.__collection__().list() result = self._apply_filters(result, *filters) return self._filter_result_by_kwargs(result, kwargs) class SQLAlchemyAsyncMockSlugRepository( SQLAlchemyAsyncMockRepository[ModelT], SQLAlchemyAsyncSlugRepositoryProtocol[ModelT], ): async def get_by_slug( self, slug: str, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Union[ModelT, None]: """Select record by slug value.""" return await self.get_one_or_none(slug=slug) async def get_available_slug( self, value_to_slugify: str, **kwargs: Any, ) -> str: """Get a unique slug for the supplied value. If the value is found to exist, a random 4 digit character is appended to the end. Override this method to change the default behavior Args: value_to_slugify (str): A string that should be converted to a unique slug. **kwargs: stuff Returns: str: a unique slug for the supplied value. This is safe for URLs and other unique identifiers. """ slug = slugify(value_to_slugify) if await self._is_slug_unique(slug): return slug random_string = "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) # noqa: S311 return f"{slug}-{random_string}" async def _is_slug_unique( self, slug: str, **kwargs: Any, ) -> bool: return await self.exists(slug=slug) is False python-advanced-alchemy-1.0.1/advanced_alchemy/repository/memory/_sync.py000066400000000000000000000724221476663714600267330ustar00rootroot00000000000000# Do not edit this file directly. It has been autogenerated from # advanced_alchemy/repository/memory/_async.py import datetime import random import re import string from collections import abc from collections.abc import Iterable from typing import Any, Optional, Union, cast, overload from unittest.mock import create_autospec from sqlalchemy import ( ColumnElement, Dialect, Select, StatementLambdaElement, Update, ) from sqlalchemy.orm import InstrumentedAttribute, Session from sqlalchemy.orm.scoping import scoped_session from sqlalchemy.orm.strategy_options import _AbstractLoad # pyright: ignore[reportPrivateUsage] from sqlalchemy.sql.dml import ReturningUpdate from typing_extensions import Self from advanced_alchemy.exceptions import ErrorMessages, IntegrityError, NotFoundError, RepositoryError from advanced_alchemy.filters import ( BeforeAfter, CollectionFilter, LimitOffset, NotInCollectionFilter, NotInSearchFilter, OnBeforeAfter, OrderBy, SearchFilter, StatementFilter, ) from advanced_alchemy.repository._sync import SQLAlchemySyncRepositoryProtocol, SQLAlchemySyncSlugRepositoryProtocol from advanced_alchemy.repository._util import DEFAULT_ERROR_MESSAGE_TEMPLATES, LoadSpec from advanced_alchemy.repository.memory.base import ( AnyObject, InMemoryStore, SQLAlchemyInMemoryStore, SQLAlchemyMultiStore, ) from advanced_alchemy.repository.typing import MISSING, ModelT, OrderingPair from advanced_alchemy.utils.dataclass import Empty, EmptyType from advanced_alchemy.utils.text import slugify class SQLAlchemySyncMockRepository(SQLAlchemySyncRepositoryProtocol[ModelT]): """In memory repository.""" __database__: SQLAlchemyMultiStore[ModelT] = SQLAlchemyMultiStore(SQLAlchemyInMemoryStore) __database_registry__: dict[type[Self], SQLAlchemyMultiStore[ModelT]] = {} loader_options: Optional[LoadSpec] = None """Default loader options for the repository.""" execution_options: Optional[dict[str, Any]] = None """Default execution options for the repository.""" model_type: type[ModelT] id_attribute: Any = "id" match_fields: Optional[Union[list[str], str]] = None uniquify: bool = False _exclude_kwargs: set[str] = { "statement", "session", "auto_expunge", "auto_refresh", "auto_commit", "attribute_names", "with_for_update", "count_with_window_function", "loader_options", "execution_options", "order_by", "load", "error_messages", "wrap_exceptions", "uniquify", } def __init__( self, *, statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, session: Union[Session, scoped_session[Session]], auto_expunge: bool = False, auto_refresh: bool = True, auto_commit: bool = False, order_by: Union[list[OrderingPair], OrderingPair, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, wrap_exceptions: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> None: self.session = session self.statement = create_autospec("Select[Tuple[ModelT]]", instance=True) self.auto_expunge = auto_expunge self.auto_refresh = auto_refresh self.auto_commit = auto_commit self.error_messages = self._get_error_messages(error_messages=error_messages) self.wrap_exceptions = wrap_exceptions self.order_by = order_by self._dialect: Dialect = create_autospec(Dialect, instance=True) self._dialect.name = "mock" self.__filtered_store__: InMemoryStore[ModelT] = self.__database__.store_type() self._default_options: Any = [] self._default_execution_options: Any = {} self._loader_options: Any = [] self._loader_options_have_wildcards = False self.uniquify = bool(uniquify) def __init_subclass__(cls) -> None: cls.__database_registry__[cls] = cls.__database__ # pyright: ignore[reportGeneralTypeIssues,reportUnknownMemberType] @staticmethod def _get_error_messages( error_messages: Union[ErrorMessages, None, EmptyType] = Empty, default_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> Optional[ErrorMessages]: if error_messages == Empty: error_messages = None default_messages = cast( "Optional[ErrorMessages]", default_messages if default_messages != Empty else DEFAULT_ERROR_MESSAGE_TEMPLATES, ) if error_messages is not None and default_messages is not None: default_messages.update(cast("ErrorMessages", error_messages)) return default_messages @classmethod def __database_add__(cls, identity: Any, data: ModelT) -> ModelT: return cast("ModelT", cls.__database__.add(identity, data)) # pyright: ignore[reportUnnecessaryCast,reportGeneralTypeIssues] @classmethod def __database_clear__(cls) -> None: for database in cls.__database_registry__.values(): # pyright: ignore[reportGeneralTypeIssues,reportUnknownMemberType] database.remove_all() @overload def __collection__(self) -> InMemoryStore[ModelT]: ... @overload def __collection__(self, identity: type[AnyObject]) -> InMemoryStore[AnyObject]: ... def __collection__( self, identity: Optional[type[AnyObject]] = None, ) -> Union[InMemoryStore[AnyObject], InMemoryStore[ModelT]]: if identity: return self.__database__.store(identity) return self.__filtered_store__ or self.__database__.store(self.model_type) @staticmethod def check_not_found(item_or_none: Union[ModelT, None]) -> ModelT: if item_or_none is None: msg = "No item found when one was expected" raise NotFoundError(msg) return item_or_none @classmethod def get_id_attribute_value( cls, item: Union[ModelT, type[ModelT]], id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, ) -> Any: """Get value of attribute named as :attr:`id_attribute` on ``item``. Args: item: Anything that should have an attribute named as :attr:`id_attribute` value. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `None`, but can reference any surrogate or candidate key for the table. Returns: The value of attribute on ``item`` named as :attr:`id_attribute`. """ if isinstance(id_attribute, InstrumentedAttribute): id_attribute = id_attribute.key return getattr(item, id_attribute if id_attribute is not None else cls.id_attribute) @classmethod def set_id_attribute_value( cls, item_id: Any, item: ModelT, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, ) -> ModelT: """Return the ``item`` after the ID is set to the appropriate attribute. Args: item_id: Value of ID to be set on instance item: Anything that should have an attribute named as :attr:`id_attribute` value. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `None`, but can reference any surrogate or candidate key for the table. Returns: Item with ``item_id`` set to :attr:`id_attribute` """ if isinstance(id_attribute, InstrumentedAttribute): id_attribute = id_attribute.key setattr(item, id_attribute if id_attribute is not None else cls.id_attribute, item_id) return item def _exclude_unused_kwargs(self, kwargs: dict[str, Any]) -> dict[str, Any]: return {key: value for key, value in kwargs.items() if key not in self._exclude_kwargs} def _apply_limit_offset_pagination(self, result: list[ModelT], limit: int, offset: int) -> list[ModelT]: return result[offset:limit] def _filter_in_collection( self, result: list[ModelT], field_name: str, values: abc.Collection[Any], ) -> list[ModelT]: return [item for item in result if getattr(item, field_name) in values] def _filter_not_in_collection( self, result: list[ModelT], field_name: str, values: abc.Collection[Any], ) -> list[ModelT]: if not values: return result return [item for item in result if getattr(item, field_name) not in values] def _filter_on_datetime_field( self, result: list[ModelT], field_name: str, before: Optional[datetime.datetime] = None, after: Optional[datetime.datetime] = None, on_or_before: Optional[datetime.datetime] = None, on_or_after: Optional[datetime.datetime] = None, ) -> list[ModelT]: result_: list[ModelT] = [] for item in result: attr: datetime.datetime = getattr(item, field_name) if before is not None and attr < before: result_.append(item) if after is not None and attr > after: result_.append(item) if on_or_before is not None and attr <= on_or_before: result_.append(item) if on_or_after is not None and attr >= on_or_after: result_.append(item) return result_ def _filter_by_like( self, result: list[ModelT], field_name: Union[str, set[str]], value: str, ignore_case: bool, ) -> list[ModelT]: pattern = re.compile(rf".*{value}.*", re.IGNORECASE) if ignore_case else re.compile(rf".*{value}.*") fields = {field_name} if isinstance(field_name, str) else field_name items: list[ModelT] = [] for field in fields: items.extend( [ item for item in result if isinstance(getattr(item, field), str) and pattern.match(getattr(item, field)) ], ) return list(set(items)) def _filter_by_not_like( self, result: list[ModelT], field_name: Union[str, set[str]], value: str, ignore_case: bool, ) -> list[ModelT]: pattern = re.compile(rf".*{value}.*", re.IGNORECASE) if ignore_case else re.compile(rf".*{value}.*") fields = {field_name} if isinstance(field_name, str) else field_name items: list[ModelT] = [] for field in fields: items.extend( [ item for item in result if isinstance(getattr(item, field), str) and pattern.match(getattr(item, field)) ], ) return list(set(result).difference(set(items))) def _filter_result_by_kwargs( self, result: Iterable[ModelT], /, kwargs: Union[dict[Any, Any], Iterable[tuple[Any, Any]]], ) -> list[ModelT]: kwargs_: dict[Any, Any] = kwargs if isinstance(kwargs, dict) else dict(*kwargs) kwargs_ = self._exclude_unused_kwargs(kwargs_) try: return [item for item in result if all(getattr(item, field) == value for field, value in kwargs_.items())] except AttributeError as error: raise RepositoryError from error def _order_by(self, result: list[ModelT], field_name: str, sort_desc: bool = False) -> list[ModelT]: return sorted(result, key=lambda item: getattr(item, field_name), reverse=sort_desc) def _apply_filters( self, result: list[ModelT], *filters: Union[StatementFilter, ColumnElement[bool]], apply_pagination: bool = True, ) -> list[ModelT]: for filter_ in filters: if isinstance(filter_, LimitOffset): if apply_pagination: result = self._apply_limit_offset_pagination(result, filter_.limit, filter_.offset) elif isinstance(filter_, BeforeAfter): result = self._filter_on_datetime_field( result, field_name=filter_.field_name, before=filter_.before, after=filter_.after, ) elif isinstance(filter_, OnBeforeAfter): result = self._filter_on_datetime_field( result, field_name=filter_.field_name, on_or_before=filter_.on_or_before, on_or_after=filter_.on_or_after, ) elif isinstance(filter_, NotInCollectionFilter): if filter_.values is not None: # pyright: ignore # noqa: PGH003 result = self._filter_not_in_collection(result, filter_.field_name, filter_.values) # pyright: ignore # noqa: PGH003 elif isinstance(filter_, CollectionFilter): if filter_.values is not None: # pyright: ignore # noqa: PGH003 result = self._filter_in_collection(result, filter_.field_name, filter_.values) # pyright: ignore # noqa: PGH003 elif isinstance(filter_, OrderBy): result = self._order_by( result, filter_.field_name, sort_desc=filter_.sort_order == "desc", ) elif isinstance(filter_, NotInSearchFilter): result = self._filter_by_not_like( result, filter_.field_name, value=filter_.value, ignore_case=bool(filter_.ignore_case), ) elif isinstance(filter_, SearchFilter): result = self._filter_by_like( result, filter_.field_name, value=filter_.value, ignore_case=bool(filter_.ignore_case), ) elif not isinstance(filter_, ColumnElement): msg = f"Unexpected filter: {filter_}" raise RepositoryError(msg) return result def _get_match_fields( self, match_fields: Union[list[str], str, None], id_attribute: Optional[str] = None, ) -> Optional[list[str]]: id_attribute = id_attribute or self.id_attribute match_fields = match_fields or self.match_fields if isinstance(match_fields, str): match_fields = [match_fields] return match_fields def _list_and_count_basic( self, *filters: Union[StatementFilter, ColumnElement[bool]], **kwargs: Any, ) -> tuple[list[ModelT], int]: result = self.list(*filters, **kwargs) return result, len(result) def _list_and_count_window( self, *filters: Union[StatementFilter, ColumnElement[bool]], **kwargs: Any, ) -> tuple[list[ModelT], int]: return self._list_and_count_basic(*filters, **kwargs) def _find_or_raise_not_found(self, id_: Any) -> ModelT: return self.check_not_found(self.__collection__().get_or_none(id_)) def _find_one_or_raise_error(self, result: list[ModelT]) -> ModelT: if not result: msg = "No item found when one was expected" raise IntegrityError(msg) if len(result) > 1: msg = "Multiple objects when one was expected" raise IntegrityError(msg) return result[0] def _get_update_many_statement( self, model_type: type[ModelT], supports_returning: bool, loader_options: Optional[list[_AbstractLoad]], execution_options: Optional[dict[str, Any]], ) -> Union[Update, ReturningUpdate[tuple[ModelT]]]: return self.statement # type: ignore[no-any-return] # pyright: ignore[reportReturnType] @classmethod def check_health(cls, session: Union[Session, scoped_session[Session]]) -> bool: return True def get( self, item_id: Any, *, auto_expunge: Optional[bool] = None, statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: return self._find_or_raise_not_found(item_id) def get_one( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> ModelT: return self.check_not_found(self.get_one_or_none(**kwargs)) def get_one_or_none( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_expunge: Optional[bool] = None, statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Union[ModelT, None]: result = self._filter_result_by_kwargs(self.__collection__().list(), kwargs) if len(result) > 1: msg = "Multiple objects when one was expected" raise IntegrityError(msg) return result[0] if result else None def get_or_upsert( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Union[list[str], str, None] = None, upsert: bool = True, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: kwargs_ = self._exclude_unused_kwargs(kwargs) if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { # sourcery skip: remove-none-from-default-get field_name: kwargs_.get(field_name, None) for field_name in match_fields if kwargs_.get(field_name, None) is not None } else: match_filter = kwargs_ existing = self.get_one_or_none(**match_filter) if not existing: return (self.add(self.model_type(**kwargs_)), True) if upsert: for field_name, new_field_value in kwargs_.items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: setattr(existing, field_name, new_field_value) existing = self.update(existing) return existing, False def get_and_update( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Union[list[str], str, None] = None, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: kwargs_ = self._exclude_unused_kwargs(kwargs) if match_fields := self._get_match_fields(match_fields=match_fields): match_filter = { # sourcery skip: remove-none-from-default-get field_name: kwargs_.get(field_name, None) for field_name in match_fields if kwargs_.get(field_name, None) is not None } else: match_filter = kwargs_ existing = self.get_one(**match_filter) updated = False for field_name, new_field_value in kwargs_.items(): field = getattr(existing, field_name, MISSING) if field is not MISSING and field != new_field_value: updated = True setattr(existing, field_name, new_field_value) existing = self.update(existing) return existing, updated def exists( self, *filters: "Union[StatementFilter, ColumnElement[bool]]", uniquify: Optional[bool] = None, **kwargs: Any, ) -> bool: existing = self.count(*filters, **kwargs) return existing > 0 def count( self, *filters: "Union[StatementFilter, ColumnElement[bool]]", uniquify: Optional[bool] = None, **kwargs: Any, ) -> int: result = self._apply_filters(self.__collection__().list(), *filters) return len(self._filter_result_by_kwargs(result, kwargs)) def add( self, data: ModelT, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> ModelT: try: self.__database__.add(self.model_type, data) except KeyError as exc: msg = "Item already exist in collection" raise IntegrityError(msg) from exc return data def add_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> list[ModelT]: for obj in data: self.add(obj) # pyright: ignore[reportCallIssue] return data def update( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: self._find_or_raise_not_found(self.__collection__().key(data)) return self.__collection__().update(data) def update_many( self, data: list[ModelT], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: return [self.__collection__().update(obj) for obj in data if obj in self.__collection__()] def delete( self, item_id: Any, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: try: return self._find_or_raise_not_found(item_id) finally: self.__collection__().remove(item_id) def delete_many( self, item_ids: list[Any], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Union[str, InstrumentedAttribute[Any], None] = None, chunk_size: Optional[int] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: deleted: list[ModelT] = [] for id_ in item_ids: if obj := self.__collection__().get_or_none(id_): deleted.append(obj) self.__collection__().remove(id_) return deleted def delete_where( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, sanity_check: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> list[ModelT]: result = self.__collection__().list() result = self._apply_filters(result, *filters) models = self._filter_result_by_kwargs(result, kwargs) item_ids = [getattr(model, self.id_attribute) for model in models] return self.delete_many(item_ids=item_ids) def upsert( self, data: ModelT, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_refresh: Optional[bool] = None, match_fields: Union[list[str], str, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: # sourcery skip: assign-if-exp, reintroduce-else if data in self.__collection__(): return self.update(data) return self.add(data) def upsert_many( self, data: list[ModelT], *, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, no_merge: bool = False, match_fields: Union[list[str], str, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> list[ModelT]: return [self.upsert(item) for item in data] def list_and_count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Union[Select[tuple[ModelT]], StatementLambdaElement, None] = None, auto_expunge: Optional[bool] = None, count_with_window_function: Optional[bool] = None, order_by: Union[list[OrderingPair], OrderingPair, None] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[list[ModelT], int]: return self._list_and_count_basic(*filters, **kwargs) def list( self, *filters: Union[StatementFilter, ColumnElement[bool]], uniquify: Optional[bool] = None, **kwargs: Any, ) -> list[ModelT]: result = self.__collection__().list() result = self._apply_filters(result, *filters) return self._filter_result_by_kwargs(result, kwargs) class SQLAlchemySyncMockSlugRepository( SQLAlchemySyncMockRepository[ModelT], SQLAlchemySyncSlugRepositoryProtocol[ModelT], ): def get_by_slug( self, slug: str, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Union[ModelT, None]: """Select record by slug value.""" return self.get_one_or_none(slug=slug) def get_available_slug( self, value_to_slugify: str, **kwargs: Any, ) -> str: """Get a unique slug for the supplied value. If the value is found to exist, a random 4 digit character is appended to the end. Override this method to change the default behavior Args: value_to_slugify (str): A string that should be converted to a unique slug. **kwargs: stuff Returns: str: a unique slug for the supplied value. This is safe for URLs and other unique identifiers. """ slug = slugify(value_to_slugify) if self._is_slug_unique(slug): return slug random_string = "".join(random.choices(string.ascii_lowercase + string.digits, k=4)) # noqa: S311 return f"{slug}-{random_string}" def _is_slug_unique( self, slug: str, **kwargs: Any, ) -> bool: return self.exists(slug=slug) is False python-advanced-alchemy-1.0.1/advanced_alchemy/repository/memory/base.py000066400000000000000000000301701476663714600265240ustar00rootroot00000000000000# ruff: noqa: PD011 import builtins import contextlib from collections import defaultdict from inspect import isclass, signature from typing import TYPE_CHECKING, Any, Generic, Union, cast, overload from sqlalchemy import ColumnElement, inspect from sqlalchemy.orm import RelationshipProperty, Session, class_mapper, object_mapper from typing_extensions import TypeVar from advanced_alchemy.exceptions import AdvancedAlchemyError from advanced_alchemy.repository.typing import _MISSING, MISSING, ModelT # pyright: ignore[reportPrivateUsage] if TYPE_CHECKING: from collections.abc import Iterable from sqlalchemy.orm import Mapper CollectionT = TypeVar("CollectionT") T = TypeVar("T") AnyObject = TypeVar("AnyObject", bound="Any") class _NotSet: pass class InMemoryStore(Generic[T]): def __init__(self) -> None: self._store: dict[Any, T] = {} def _resolve_key(self, key: Any) -> Any: """Test different key representations Args: key: The key to test Raises: KeyError: Raised if key is not present Returns: The key representation that is present in the store """ for key_ in (key, str(key)): if key_ in self._store: return key_ raise KeyError def key(self, obj: T) -> Any: return hash(obj) def add(self, obj: T) -> T: if (key := self.key(obj)) not in self._store: self._store[key] = obj return obj raise KeyError def update(self, obj: T) -> T: key = self._resolve_key(self.key(obj)) self._store[key] = obj return obj @overload def get(self, key: Any, default: type[_NotSet] = _NotSet) -> T: ... @overload def get(self, key: Any, default: AnyObject) -> "Union[T, AnyObject]": ... def get( self, key: Any, default: "Union[AnyObject, type[_NotSet]]" = _NotSet ) -> "Union[T, AnyObject]": # pragma: no cover """Get the object identified by `key`, or return `default` if set or raise a `KeyError` otherwise Args: key: The key to test default: Value to return if key is not present. Defaults to _NotSet. Raises: KeyError: Raised if key is not present Returns: The object identified by key """ try: key = self._resolve_key(key) except KeyError as error: if isclass(default) and not issubclass(default, _NotSet): # pyright: ignore[reportUnnecessaryIsInstance] return cast("AnyObject", default) raise KeyError from error return self._store[key] def get_or_none(self, key: Any, default: Any = _NotSet) -> "Union[T, None]": return self.get(key) if default is _NotSet else self.get(key, default) def remove(self, key: Any) -> T: return self._store.pop(self._resolve_key(key)) def list(self) -> list[T]: return list(self._store.values()) def remove_all(self) -> None: self._store = {} def __contains__(self, obj: T) -> bool: try: self._resolve_key(self.key(obj)) except KeyError: return False else: return True def __bool__(self) -> bool: return bool(self._store) class MultiStore(Generic[T]): def __init__(self, store_type: "type[InMemoryStore[T]]") -> None: self.store_type = store_type self._store: defaultdict[Any, InMemoryStore[T]] = defaultdict(store_type) def add(self, identity: Any, obj: T) -> T: return self._store[identity].add(obj) def store(self, identity: Any) -> "InMemoryStore[T]": return self._store[identity] def identity(self, obj: T) -> Any: return type(obj) def remove_all(self) -> None: self._store = defaultdict(self.store_type) class SQLAlchemyInMemoryStore(InMemoryStore[ModelT]): id_attribute: str = "id" def _update_relationship(self, data: ModelT, ref: ModelT) -> None: # pragma: no cover """Set relationship data fields targeting ref class to ref. Example: ```python class Parent(Base): child = relationship("Child") class Child(Base): pass ``` If data and ref are respectively a `Parent` and `Child` instances, then `data.child` will be set to `ref` Args: data: Model instance on which to update relationships ref: Target model instance to set on data relationships """ ref_mapper = object_mapper(ref) for relationship in object_mapper(data).relationships: local = next(iter(relationship.local_columns)) remote = next(iter(relationship.remote_side)) if not local.key or not remote.key: msg = f"Cannot update relationship {relationship} for model {ref_mapper.class_}" raise AdvancedAlchemyError(msg) value = getattr(data, relationship.key) if not value and relationship.mapper.class_ is ref_mapper.class_: if relationship.uselist: for elem in value: if local_value := getattr(data, local.key): setattr(elem, remote.key, local_value) else: setattr(data, relationship.key, ref) def _update_fks(self, data: ModelT) -> None: # pragma: no cover """Update foreign key fields according to their corresponding relationships. This make sure that `data.child_id` == `data.child.id` or `data.children[0].parent_id` == `data.id` Args: data: Instance to be updated """ ref_mapper = object_mapper(data) for relationship in ref_mapper.relationships: if value := getattr(data, relationship.key): local = next(iter(relationship.local_columns)) remote = next(iter(relationship.remote_side)) if not local.key or not remote.key: msg = f"Cannot update relationship {relationship} for model {ref_mapper.class_}" raise AdvancedAlchemyError(msg) if relationship.uselist: for elem in value: if local_value := getattr(data, local.key): setattr(elem, remote.key, local_value) self._update_relationship(elem, data) # Remove duplicates added by orm when updating list items if isinstance(value, list): setattr(data, relationship.key, type(value)(set(value))) # pyright: ignore[reportUnknownArgumentType] else: if remote_value := getattr(value, remote.key): setattr(data, local.key, remote_value) self._update_relationship(value, data) def _set_defaults(self, data: ModelT) -> None: # pragma: no cover """Set fields with dynamic defaults. Args: data: Instance to be updated """ for elem in object_mapper(data).c: default = getattr(elem, "default", MISSING) value = getattr(data, elem.key, MISSING) # If value is MISSING, it may be a declared_attr whose name can't be # determined from the column/relationship element returned if value is not MISSING and not value and not isinstance(default, _MISSING) and default is not None: if default.is_scalar: default_value: Any = default.arg elif default.is_callable: default_callable = default.arg.__func__ if isinstance(default.arg, staticmethod) else default.arg # pyright: ignore[reportUnknownMemberType] if ( # Eager test because inspect.signature() does not # recognize builtins hasattr(builtins, default_callable.__name__) # If present, context contains information about the current # statement and can be used to access values from other columns. # As we can't reproduce such context in Pydantic, we don't want # include a default_factory in that case. or "context" not in signature(default_callable).parameters ): default_value = default.arg({}) # pyright: ignore[reportUnknownMemberType, reportCallIssue] else: continue else: continue setattr(data, elem.key, default_value) def changed_attrs(self, data: ModelT) -> "Iterable[str]": # pragma: no cover res: list[str] = [] mapper = inspect(data) if mapper is None: msg = f"Cannot inspect {data.__class__} model" raise AdvancedAlchemyError(msg) attrs = class_mapper(data.__class__).column_attrs for attr in attrs: hist = getattr(mapper.attrs, attr.key).history if hist.has_changes(): res.append(attr.key) return res def key(self, obj: ModelT) -> str: return str(getattr(obj, self.id_attribute)) def add(self, obj: ModelT) -> ModelT: self._set_defaults(obj) self._update_fks(obj) return super().add(obj) def update(self, obj: ModelT) -> ModelT: existing = self.get(self.key(obj)) for attr in self.changed_attrs(obj): setattr(existing, attr, getattr(obj, attr)) self._update_fks(existing) return super().update(existing) class SQLAlchemyMultiStore(MultiStore[ModelT]): def _new_instances(self, instance: ModelT) -> "Iterable[ModelT]": session = Session() session.add(instance) relations = list(session.new) session.expunge_all() return relations def _set_relationships_for_fks(self, data: ModelT) -> None: # pragma: no cover """Set relationships matching newly added foreign keys on the instance. Example: ```python class Parent(Base): id: Mapped[UUID] class Child(Base): id: Mapped[UUID] parent_id: Mapped[UUID] = mapped_column(ForeignKey("parent.id")) parent: Mapped[Parent] = relationship(Parent) ``` If `data` is a Child instance and `parent_id` is set, `parent` will be set to the matching Parent instance if found in the repository Args: data: The model to update """ obj_mapper = object_mapper(data) mappers: dict[str, Mapper[Any]] = {} column_relationships: dict[ColumnElement[Any], RelationshipProperty[Any]] = {} for mapper in obj_mapper.registry.mappers: for table in mapper.tables: mappers[table.name] = mapper for relationship in obj_mapper.relationships: for column in relationship.local_columns: column_relationships[column] = relationship # sourcery skip: assign-if-exp if state := inspect(data): new_attrs: dict[str, Any] = state.dict else: new_attrs = {} for column in obj_mapper.columns: if column.key not in new_attrs or not column.foreign_keys: continue remote_mapper = mappers[next(iter(column.foreign_keys))._table_key()] # noqa: SLF001 # pyright: ignore[reportPrivateUsage] try: obj = self.store(remote_mapper.class_).get(new_attrs.get(column.key, None)) except KeyError: continue with contextlib.suppress(KeyError): setattr(data, column_relationships[column].key, obj) def add(self, identity: Any, obj: ModelT) -> ModelT: for relation in self._new_instances(obj): instance_type = self.identity(relation) self._set_relationships_for_fks(relation) if relation in self.store(instance_type): continue self.store(instance_type).add(relation) return obj python-advanced-alchemy-1.0.1/advanced_alchemy/repository/typing.py000066400000000000000000000051501476663714600256140ustar00rootroot00000000000000from typing import TYPE_CHECKING, Any, Union from sqlalchemy.orm import InstrumentedAttribute from typing_extensions import TypeAlias, TypeVar if TYPE_CHECKING: from sqlalchemy import RowMapping, Select from advanced_alchemy import base from advanced_alchemy.repository._async import SQLAlchemyAsyncRepository from advanced_alchemy.repository._sync import SQLAlchemySyncRepository from advanced_alchemy.repository.memory._async import SQLAlchemyAsyncMockRepository from advanced_alchemy.repository.memory._sync import SQLAlchemySyncMockRepository __all__ = ( "MISSING", "ModelOrRowMappingT", "ModelT", "OrderingPair", "RowMappingT", "RowT", "SQLAlchemyAsyncRepositoryT", "SQLAlchemySyncRepositoryT", "SelectT", "T", ) T = TypeVar("T") ModelT = TypeVar("ModelT", bound="base.ModelProtocol") """Type variable for SQLAlchemy models. :class:`~advanced_alchemy.base.ModelProtocol` """ SelectT = TypeVar("SelectT", bound="Select[Any]") """Type variable for SQLAlchemy select statements. :class:`~sqlalchemy.sql.Select` """ RowT = TypeVar("RowT", bound=tuple[Any, ...]) """Type variable for rows. :class:`~sqlalchemy.engine.Row` """ RowMappingT = TypeVar("RowMappingT", bound="RowMapping") """Type variable for row mappings. :class:`~sqlalchemy.engine.RowMapping` """ ModelOrRowMappingT = TypeVar("ModelOrRowMappingT", bound="Union[base.ModelProtocol, RowMapping]") """Type variable for models or row mappings. :class:`~advanced_alchemy.base.ModelProtocol` | :class:`~sqlalchemy.engine.RowMapping` """ SQLAlchemySyncRepositoryT = TypeVar( "SQLAlchemySyncRepositoryT", bound="Union[SQLAlchemySyncRepository[Any], SQLAlchemySyncMockRepository[Any]]", default="Any", ) """Type variable for synchronous SQLAlchemy repositories. :class:`~advanced_alchemy.repository.SQLAlchemySyncRepository` """ SQLAlchemyAsyncRepositoryT = TypeVar( "SQLAlchemyAsyncRepositoryT", bound="Union[SQLAlchemyAsyncRepository[Any], SQLAlchemyAsyncMockRepository[Any]]", default="Any", ) """Type variable for asynchronous SQLAlchemy repositories. :class:`~advanced_alchemy.repository.SQLAlchemyAsyncRepository` """ OrderingPair: TypeAlias = tuple[Union[str, InstrumentedAttribute[Any]], bool] """Type alias for ordering pairs. A tuple of (column, ascending) where: - column: Union[str, :class:`sqlalchemy.orm.InstrumentedAttribute`] - ascending: bool This type is used to specify ordering criteria for repository queries. """ class _MISSING: """Placeholder for missing values.""" MISSING = _MISSING() """Missing value placeholder. :class:`~advanced_alchemy.repository.typing._MISSING` """ python-advanced-alchemy-1.0.1/advanced_alchemy/service/000077500000000000000000000000001476663714600231505ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/service/__init__.py000066400000000000000000000044631476663714600252700ustar00rootroot00000000000000from advanced_alchemy.repository import ( DEFAULT_ERROR_MESSAGE_TEMPLATES, Empty, EmptyType, ErrorMessages, LoadSpec, ModelOrRowMappingT, ModelT, OrderingPair, model_from_dict, ) from advanced_alchemy.service._async import ( SQLAlchemyAsyncQueryService, SQLAlchemyAsyncRepositoryReadService, SQLAlchemyAsyncRepositoryService, ) from advanced_alchemy.service._sync import ( SQLAlchemySyncQueryService, SQLAlchemySyncRepositoryReadService, SQLAlchemySyncRepositoryService, ) from advanced_alchemy.service._util import ResultConverter, find_filter from advanced_alchemy.service.pagination import OffsetPagination from advanced_alchemy.service.typing import ( FilterTypeT, ModelDictListT, ModelDictT, ModelDTOT, SupportedSchemaModel, is_dict, is_dict_with_field, is_dict_without_field, is_dto_data, is_msgspec_struct, is_msgspec_struct_with_field, is_msgspec_struct_without_field, is_pydantic_model, is_pydantic_model_with_field, is_pydantic_model_without_field, is_schema, is_schema_or_dict, is_schema_or_dict_with_field, is_schema_or_dict_without_field, is_schema_with_field, is_schema_without_field, schema_dump, ) __all__ = ( "DEFAULT_ERROR_MESSAGE_TEMPLATES", "Empty", "EmptyType", "ErrorMessages", "FilterTypeT", "LoadSpec", "ModelDTOT", "ModelDictListT", "ModelDictT", "ModelOrRowMappingT", "ModelT", "OffsetPagination", "OrderingPair", "ResultConverter", "SQLAlchemyAsyncQueryService", "SQLAlchemyAsyncRepositoryReadService", "SQLAlchemyAsyncRepositoryService", "SQLAlchemySyncQueryService", "SQLAlchemySyncRepositoryReadService", "SQLAlchemySyncRepositoryService", "SupportedSchemaModel", "find_filter", "is_dict", "is_dict_with_field", "is_dict_without_field", "is_dto_data", "is_msgspec_struct", "is_msgspec_struct_with_field", "is_msgspec_struct_without_field", "is_pydantic_model", "is_pydantic_model_with_field", "is_pydantic_model_without_field", "is_schema", "is_schema_or_dict", "is_schema_or_dict_with_field", "is_schema_or_dict_without_field", "is_schema_with_field", "is_schema_without_field", "model_from_dict", "schema_dump", ) python-advanced-alchemy-1.0.1/advanced_alchemy/service/_async.py000066400000000000000000001352421476663714600250050ustar00rootroot00000000000000"""Service object implementation for SQLAlchemy. RepositoryService object is generic on the domain model type which should be a SQLAlchemy model. """ from collections.abc import AsyncIterator, Iterable, Sequence from contextlib import asynccontextmanager from functools import cached_property from typing import Any, ClassVar, Generic, Optional, Union, cast from sqlalchemy import Select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio.scoping import async_scoped_session from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.sql import ColumnElement from typing_extensions import Self from advanced_alchemy.config.asyncio import SQLAlchemyAsyncConfig from advanced_alchemy.exceptions import AdvancedAlchemyError, ErrorMessages, ImproperConfigurationError, RepositoryError from advanced_alchemy.filters import StatementFilter from advanced_alchemy.repository import ( SQLAlchemyAsyncQueryRepository, ) from advanced_alchemy.repository._util import LoadSpec, model_from_dict from advanced_alchemy.repository.typing import ModelT, OrderingPair, SQLAlchemyAsyncRepositoryT from advanced_alchemy.service._util import ResultConverter from advanced_alchemy.service.typing import ( BulkModelDictT, ModelDictListT, ModelDictT, is_dict, is_dto_data, is_msgspec_struct, is_pydantic_model, ) from advanced_alchemy.utils.dataclass import Empty, EmptyType class SQLAlchemyAsyncQueryService(ResultConverter): """Simple service to execute the basic Query repository..""" def __init__( self, session: Union[AsyncSession, async_scoped_session[AsyncSession]], **repo_kwargs: Any, ) -> None: """Configure the service object. Args: session: Session managing the unit-of-work for the operation. **repo_kwargs: Optional configuration values to pass into the repository """ self.repository = SQLAlchemyAsyncQueryRepository( session=session, **repo_kwargs, ) @classmethod @asynccontextmanager async def new( cls, session: Optional[Union[AsyncSession, async_scoped_session[AsyncSession]]] = None, config: Optional[SQLAlchemyAsyncConfig] = None, ) -> AsyncIterator[Self]: """Context manager that returns instance of service object. Handles construction of the database session._create_select_for_model Returns: The service object instance. """ if not config and not session: raise AdvancedAlchemyError(detail="Please supply an optional configuration or session to use.") if session: yield cls(session=session) elif config: async with config.get_session() as db_session: yield cls(session=db_session) class SQLAlchemyAsyncRepositoryReadService(ResultConverter, Generic[ModelT, SQLAlchemyAsyncRepositoryT]): """Service object that operates on a repository object.""" repository_type: type[SQLAlchemyAsyncRepositoryT] """Type of the repository to use.""" loader_options: ClassVar[Optional[LoadSpec]] = None """Default loader options for the repository.""" execution_options: ClassVar[Optional[dict[str, Any]]] = None """Default execution options for the repository.""" match_fields: ClassVar[Optional[Union[list[str], str]]] = None """List of dialects that prefer to use ``field.id = ANY(:1)`` instead of ``field.id IN (...)``.""" uniquify: ClassVar[bool] = False """Optionally apply the ``unique()`` method to results before returning.""" count_with_window_function: ClassVar[bool] = True """Use an analytical window function to count results. This allows the count to be performed in a single query.""" _repository_instance: SQLAlchemyAsyncRepositoryT def __init__( self, session: Union[AsyncSession, async_scoped_session[AsyncSession]], *, statement: Optional[Select[Any]] = None, auto_expunge: bool = False, auto_refresh: bool = True, auto_commit: bool = False, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, wrap_exceptions: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, **repo_kwargs: Any, ) -> None: """Configure the service object. Args: session: Session managing the unit-of-work for the operation. statement: To facilitate customization of the underlying select query. auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. order_by: Set default order options for queries. error_messages: A set of custom error messages to use for operations wrap_exceptions: Wrap exceptions in a RepositoryError load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. count_with_window_function: When false, list and count will use two queries instead of an analytical window function. **repo_kwargs: passed as keyword args to repo instantiation. """ load = load if load is not None else self.loader_options execution_options = execution_options if execution_options is not None else self.execution_options count_with_window_function = ( count_with_window_function if count_with_window_function is not None else self.count_with_window_function ) self._repository_instance: SQLAlchemyAsyncRepositoryT = self.repository_type( # type: ignore[assignment] session=session, statement=statement, auto_expunge=auto_expunge, auto_refresh=auto_refresh, auto_commit=auto_commit, order_by=order_by, error_messages=error_messages, wrap_exceptions=wrap_exceptions, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), count_with_window_function=count_with_window_function, **repo_kwargs, ) def _get_uniquify(self, uniquify: Optional[bool] = None) -> bool: return bool(uniquify or self.uniquify) @property def repository(self) -> SQLAlchemyAsyncRepositoryT: """Return the repository instance.""" if not self._repository_instance: msg = "Repository not initialized" raise ImproperConfigurationError(msg) return self._repository_instance @cached_property def model_type(self) -> type[ModelT]: """Return the model type.""" return cast("type[ModelT]", self.repository.model_type) async def count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> int: """Count of records returned by query. Args: *filters: arguments for filtering. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: key value pairs of filter types. Returns: A count of the collection, filtered, but ignoring pagination. """ return await self.repository.count( *filters, statement=statement, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ) async def exists( self, *filters: Union[StatementFilter, ColumnElement[bool]], error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> bool: """Wrap repository exists operation. Args: *filters: Types for specific filtering operations. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Keyword arguments for attribute based filtering. Returns: Representation of instance with identifier `item_id`. """ return await self.repository.exists( *filters, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ) async def get( self, item_id: Any, *, statement: Optional[Select[tuple[ModelT]]] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Wrap repository scalar operation. Args: item_id: Identifier of instance to be retrieved. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of instance with identifier `item_id`. """ return cast( "ModelT", await self.repository.get( item_id=item_id, auto_expunge=auto_expunge, statement=statement, id_attribute=id_attribute, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) async def get_one( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, load: Optional[LoadSpec] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> ModelT: """Wrap repository scalar operation. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: Representation of instance with identifier `item_id`. """ return cast( "ModelT", await self.repository.get_one( *filters, auto_expunge=auto_expunge, statement=statement, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) async def get_one_or_none( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Optional[ModelT]: """Wrap repository scalar operation. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: Representation of instance with identifier `item_id`. """ return cast( "Optional[ModelT]", await self.repository.get_one_or_none( *filters, auto_expunge=auto_expunge, statement=statement, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) async def to_model_on_create(self, data: "ModelDictT[ModelT]") -> "ModelDictT[ModelT]": """Convenience method to allow for custom behavior on create. Args: data: The data to be converted to a model. Returns: The data to be converted to a model. """ return data async def to_model_on_update(self, data: "ModelDictT[ModelT]") -> "ModelDictT[ModelT]": """Convenience method to allow for custom behavior on update. Args: data: The data to be converted to a model. Returns: The data to be converted to a model. """ return data async def to_model_on_delete(self, data: "ModelDictT[ModelT]") -> "ModelDictT[ModelT]": """Convenience method to allow for custom behavior on delete. Args: data: The data to be converted to a model. Returns: The data to be converted to a model. """ return data async def to_model_on_upsert(self, data: "ModelDictT[ModelT]") -> "ModelDictT[ModelT]": """Convenience method to allow for custom behavior on upsert. Args: data: The data to be converted to a model. Returns: The data to be converted to a model. """ return data async def to_model( self, data: "ModelDictT[ModelT]", operation: Optional[str] = None, ) -> ModelT: """Parse and Convert input into a model. Args: data: Representations to be created. operation: Optional operation flag so that you can provide behavior based on CRUD operation Returns: Representation of created instances. """ operation_map = { "create": self.to_model_on_create, "update": self.to_model_on_update, "delete": self.to_model_on_delete, "upsert": self.to_model_on_upsert, } if operation and (op := operation_map.get(operation)): data = await op(data) if is_dict(data): return model_from_dict(model=self.model_type, **data) if is_pydantic_model(data): return model_from_dict( model=self.model_type, **data.model_dump(exclude_unset=True), ) if is_msgspec_struct(data): from msgspec import UNSET return model_from_dict( model=self.model_type, **{f: val for f in data.__struct_fields__ if (val := getattr(data, f, None)) != UNSET}, ) if is_dto_data(data): return cast("ModelT", data.create_instance()) return cast("ModelT", data) async def list_and_count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, count_with_window_function: Optional[bool] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[Sequence[ModelT], int]: """List of records and total count returned by query. Args: *filters: Types for specific filtering operations. statement: To facilitate customization of the underlying select query. auto_expunge: Remove object from session before returning. count_with_window_function: When false, list and count will use two queries instead of an analytical window function. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: List of instances and count of total collection, ignoring pagination. """ return cast( "tuple[Sequence[ModelT], int]", await self.repository.list_and_count( *filters, statement=statement, auto_expunge=auto_expunge, count_with_window_function=count_with_window_function, order_by=order_by, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) @classmethod @asynccontextmanager async def new( cls, session: Optional[Union[AsyncSession, async_scoped_session[AsyncSession]]] = None, statement: Optional[Select[tuple[ModelT]]] = None, config: Optional[SQLAlchemyAsyncConfig] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, ) -> AsyncIterator[Self]: """Context manager that returns instance of service object. Handles construction of the database session._create_select_for_model Returns: The service object instance. """ if not config and not session: raise AdvancedAlchemyError(detail="Please supply an optional configuration or session to use.") if session: yield cls( statement=statement, session=session, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=uniquify, count_with_window_function=count_with_window_function, ) elif config: async with config.get_session() as db_session: yield cls( statement=statement, session=db_session, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=uniquify, count_with_window_function=count_with_window_function, ) async def list( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Sequence[ModelT]: """Wrap repository scalars operation. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: The list of instances retrieved from the repository. """ return cast( "Sequence[ModelT]", await self.repository.list( *filters, statement=statement, auto_expunge=auto_expunge, order_by=order_by, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) class SQLAlchemyAsyncRepositoryService( SQLAlchemyAsyncRepositoryReadService[ModelT, SQLAlchemyAsyncRepositoryT], Generic[ModelT, SQLAlchemyAsyncRepositoryT], ): """Service object that operates on a repository object.""" async def create( self, data: "ModelDictT[ModelT]", *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> "ModelT": """Wrap repository instance creation. Args: data: Representation to be created. auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients Returns: Representation of created instance. """ data = await self.to_model(data, "create") return cast( "ModelT", await self.repository.add( data=data, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, error_messages=error_messages, ), ) async def create_many( self, data: "BulkModelDictT[ModelT]", *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> Sequence[ModelT]: """Wrap repository bulk instance creation. Args: data: Representations to be created. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of created instances. """ if is_dto_data(data): data = data.create_instance() data = [(await self.to_model(datum, "create")) for datum in cast("ModelDictListT[ModelT]", data)] return cast( "Sequence[ModelT]", await self.repository.add_many( data=cast("list[ModelT]", data), # pyright: ignore[reportUnnecessaryCast] auto_commit=auto_commit, auto_expunge=auto_expunge, error_messages=error_messages, ), ) async def update( self, data: "ModelDictT[ModelT]", item_id: Optional[Any] = None, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> "ModelT": """Wrap repository update operation. Args: data: Representation to be updated. item_id: Identifier of item to be updated. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Updated representation. """ data = await self.to_model(data, "update") if ( item_id is None and self.repository.get_id_attribute_value( # pyright: ignore[reportUnknownMemberType] item=data, id_attribute=id_attribute, ) is None ): msg = ( "Could not identify ID attribute value. One of the following is required: " f"``item_id`` or ``data.{id_attribute or self.repository.id_attribute}``" ) raise RepositoryError(msg) if item_id is not None: data = self.repository.set_id_attribute_value(item_id=item_id, item=data, id_attribute=id_attribute) # pyright: ignore[reportUnknownMemberType] return cast( "ModelT", await self.repository.update( data=data, attribute_names=attribute_names, with_for_update=with_for_update, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, id_attribute=id_attribute, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) async def update_many( self, data: "BulkModelDictT[ModelT]", *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> Sequence[ModelT]: """Wrap repository bulk instance update. Args: data: Representations to be updated. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of updated instances. """ if is_dto_data(data): data = data.create_instance() data = [(await self.to_model(datum, "update")) for datum in cast("ModelDictListT[ModelT]", data)] return cast( "Sequence[ModelT]", await self.repository.update_many( cast("list[ModelT]", data), # pyright: ignore[reportUnnecessaryCast] auto_commit=auto_commit, auto_expunge=auto_expunge, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) async def upsert( self, data: "ModelDictT[ModelT]", item_id: Optional[Any] = None, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_refresh: Optional[bool] = None, match_fields: Optional[Union[list[str], str]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Wrap repository upsert operation. Args: data: Instance to update existing, or be created. Identifier used to determine if an existing instance exists is the value of an attribute on `data` named as value of `self.id_attribute`. item_id: Identifier of the object for upsert. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Updated or created representation. """ data = await self.to_model(data, "upsert") item_id = item_id if item_id is not None else self.repository.get_id_attribute_value(item=data) # pyright: ignore[reportUnknownMemberType] if item_id is not None: self.repository.set_id_attribute_value(item_id, data) # pyright: ignore[reportUnknownMemberType] return cast( "ModelT", await self.repository.upsert( data=data, attribute_names=attribute_names, with_for_update=with_for_update, auto_expunge=auto_expunge, auto_commit=auto_commit, auto_refresh=auto_refresh, match_fields=match_fields, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) async def upsert_many( self, data: "BulkModelDictT[ModelT]", *, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, no_merge: bool = False, match_fields: Optional[Union[list[str], str]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> Sequence[ModelT]: """Wrap repository upsert operation. Args: data: Instance to update existing, or be created. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. no_merge: Skip the usage of optimized Merge statements (**reserved for future use**) match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Updated or created representation. """ if is_dto_data(data): data = data.create_instance() data = [(await self.to_model(datum, "upsert")) for datum in cast("ModelDictListT[ModelT]", data)] return cast( "Sequence[ModelT]", await self.repository.upsert_many( data=cast("list[ModelT]", data), # pyright: ignore[reportUnnecessaryCast] auto_expunge=auto_expunge, auto_commit=auto_commit, no_merge=no_merge, match_fields=match_fields, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) async def get_or_upsert( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, upsert: bool = True, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: """Wrap repository instance creation. Args: *filters: Types for specific filtering operations. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. upsert: When using match_fields and actual model values differ from `kwargs`, perform an update operation on the model. create: Should a model be created. If no model is found, an exception is raised. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: Representation of created instance. """ match_fields = match_fields or self.match_fields validated_model = await self.to_model(kwargs, "create") return cast( "tuple[ModelT, bool]", await self.repository.get_or_upsert( *filters, match_fields=match_fields, upsert=upsert, attribute_names=attribute_names, with_for_update=with_for_update, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **validated_model.to_dict(), ), ) async def get_and_update( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: """Wrap repository instance creation. Args: *filters: Types for specific filtering operations. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: Representation of updated instance. """ match_fields = match_fields or self.match_fields validated_model = await self.to_model(kwargs, "update") return cast( "tuple[ModelT, bool]", await self.repository.get_and_update( *filters, match_fields=match_fields, attribute_names=attribute_names, with_for_update=with_for_update, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **validated_model.to_dict(), ), ) async def delete( self, item_id: Any, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Wrap repository delete operation. Args: item_id: Identifier of instance to be deleted. auto_commit: Commit objects before returning. auto_expunge: Remove object from session before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of the deleted instance. """ return cast( "ModelT", await self.repository.delete( item_id=item_id, auto_commit=auto_commit, auto_expunge=auto_expunge, id_attribute=id_attribute, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) async def delete_many( self, item_ids: list[Any], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, chunk_size: Optional[int] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> Sequence[ModelT]: """Wrap repository bulk instance deletion. Args: item_ids: Identifier of instance to be deleted. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. chunk_size: Allows customization of the ``insertmanyvalues_max_parameters`` setting for the driver. Defaults to `950` if left unset. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of removed instances. """ return cast( "Sequence[ModelT]", await self.repository.delete_many( item_ids=item_ids, auto_commit=auto_commit, auto_expunge=auto_expunge, id_attribute=id_attribute, chunk_size=chunk_size, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) async def delete_where( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, sanity_check: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Sequence[ModelT]: """Wrap repository scalars operation. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients sanity_check: When true, the length of selected instances is compared to the deleted row count load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: The list of instances deleted from the repository. """ return cast( "Sequence[ModelT]", await self.repository.delete_where( *filters, auto_commit=auto_commit, auto_expunge=auto_expunge, error_messages=error_messages, sanity_check=sanity_check, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) python-advanced-alchemy-1.0.1/advanced_alchemy/service/_sync.py000066400000000000000000001344241476663714600246450ustar00rootroot00000000000000# Do not edit this file directly. It has been autogenerated from # advanced_alchemy/service/_async.py """Service object implementation for SQLAlchemy. RepositoryService object is generic on the domain model type which should be a SQLAlchemy model. """ from collections.abc import Iterable, Iterator, Sequence from contextlib import contextmanager from functools import cached_property from typing import Any, ClassVar, Generic, Optional, Union, cast from sqlalchemy import Select from sqlalchemy.orm import InstrumentedAttribute, Session from sqlalchemy.orm.scoping import scoped_session from sqlalchemy.sql import ColumnElement from typing_extensions import Self from advanced_alchemy.config.sync import SQLAlchemySyncConfig from advanced_alchemy.exceptions import AdvancedAlchemyError, ErrorMessages, ImproperConfigurationError, RepositoryError from advanced_alchemy.filters import StatementFilter from advanced_alchemy.repository import SQLAlchemySyncQueryRepository from advanced_alchemy.repository._util import LoadSpec, model_from_dict from advanced_alchemy.repository.typing import ModelT, OrderingPair, SQLAlchemySyncRepositoryT from advanced_alchemy.service._util import ResultConverter from advanced_alchemy.service.typing import ( BulkModelDictT, ModelDictListT, ModelDictT, is_dict, is_dto_data, is_msgspec_struct, is_pydantic_model, ) from advanced_alchemy.utils.dataclass import Empty, EmptyType class SQLAlchemySyncQueryService(ResultConverter): """Simple service to execute the basic Query repository..""" def __init__( self, session: Union[Session, scoped_session[Session]], **repo_kwargs: Any, ) -> None: """Configure the service object. Args: session: Session managing the unit-of-work for the operation. **repo_kwargs: Optional configuration values to pass into the repository """ self.repository = SQLAlchemySyncQueryRepository( session=session, **repo_kwargs, ) @classmethod @contextmanager def new( cls, session: Optional[Union[Session, scoped_session[Session]]] = None, config: Optional[SQLAlchemySyncConfig] = None, ) -> Iterator[Self]: """Context manager that returns instance of service object. Handles construction of the database session._create_select_for_model Returns: The service object instance. """ if not config and not session: raise AdvancedAlchemyError(detail="Please supply an optional configuration or session to use.") if session: yield cls(session=session) elif config: with config.get_session() as db_session: yield cls(session=db_session) class SQLAlchemySyncRepositoryReadService(ResultConverter, Generic[ModelT, SQLAlchemySyncRepositoryT]): """Service object that operates on a repository object.""" repository_type: type[SQLAlchemySyncRepositoryT] """Type of the repository to use.""" loader_options: ClassVar[Optional[LoadSpec]] = None """Default loader options for the repository.""" execution_options: ClassVar[Optional[dict[str, Any]]] = None """Default execution options for the repository.""" match_fields: ClassVar[Optional[Union[list[str], str]]] = None """List of dialects that prefer to use ``field.id = ANY(:1)`` instead of ``field.id IN (...)``.""" uniquify: ClassVar[bool] = False """Optionally apply the ``unique()`` method to results before returning.""" count_with_window_function: ClassVar[bool] = True """Use an analytical window function to count results. This allows the count to be performed in a single query.""" _repository_instance: SQLAlchemySyncRepositoryT def __init__( self, session: Union[Session, scoped_session[Session]], *, statement: Optional[Select[Any]] = None, auto_expunge: bool = False, auto_refresh: bool = True, auto_commit: bool = False, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, wrap_exceptions: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, **repo_kwargs: Any, ) -> None: """Configure the service object. Args: session: Session managing the unit-of-work for the operation. statement: To facilitate customization of the underlying select query. auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. order_by: Set default order options for queries. error_messages: A set of custom error messages to use for operations wrap_exceptions: Wrap exceptions in a RepositoryError load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. count_with_window_function: When false, list and count will use two queries instead of an analytical window function. **repo_kwargs: passed as keyword args to repo instantiation. """ load = load if load is not None else self.loader_options execution_options = execution_options if execution_options is not None else self.execution_options count_with_window_function = ( count_with_window_function if count_with_window_function is not None else self.count_with_window_function ) self._repository_instance: SQLAlchemySyncRepositoryT = self.repository_type( # type: ignore[assignment] session=session, statement=statement, auto_expunge=auto_expunge, auto_refresh=auto_refresh, auto_commit=auto_commit, order_by=order_by, error_messages=error_messages, wrap_exceptions=wrap_exceptions, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), count_with_window_function=count_with_window_function, **repo_kwargs, ) def _get_uniquify(self, uniquify: Optional[bool] = None) -> bool: return bool(uniquify or self.uniquify) @property def repository(self) -> SQLAlchemySyncRepositoryT: """Return the repository instance.""" if not self._repository_instance: msg = "Repository not initialized" raise ImproperConfigurationError(msg) return self._repository_instance @cached_property def model_type(self) -> type[ModelT]: """Return the model type.""" return cast("type[ModelT]", self.repository.model_type) def count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> int: """Count of records returned by query. Args: *filters: arguments for filtering. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: key value pairs of filter types. Returns: A count of the collection, filtered, but ignoring pagination. """ return self.repository.count( *filters, statement=statement, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ) def exists( self, *filters: Union[StatementFilter, ColumnElement[bool]], error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> bool: """Wrap repository exists operation. Args: *filters: Types for specific filtering operations. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Keyword arguments for attribute based filtering. Returns: Representation of instance with identifier `item_id`. """ return self.repository.exists( *filters, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ) def get( self, item_id: Any, *, statement: Optional[Select[tuple[ModelT]]] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Wrap repository scalar operation. Args: item_id: Identifier of instance to be retrieved. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of instance with identifier `item_id`. """ return cast( "ModelT", self.repository.get( item_id=item_id, auto_expunge=auto_expunge, statement=statement, id_attribute=id_attribute, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) def get_one( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, load: Optional[LoadSpec] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> ModelT: """Wrap repository scalar operation. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: Representation of instance with identifier `item_id`. """ return cast( "ModelT", self.repository.get_one( *filters, auto_expunge=auto_expunge, statement=statement, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) def get_one_or_none( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Optional[ModelT]: """Wrap repository scalar operation. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: Representation of instance with identifier `item_id`. """ return cast( "Optional[ModelT]", self.repository.get_one_or_none( *filters, auto_expunge=auto_expunge, statement=statement, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) def to_model_on_create(self, data: "ModelDictT[ModelT]") -> "ModelDictT[ModelT]": """Convenience method to allow for custom behavior on create. Args: data: The data to be converted to a model. Returns: The data to be converted to a model. """ return data def to_model_on_update(self, data: "ModelDictT[ModelT]") -> "ModelDictT[ModelT]": """Convenience method to allow for custom behavior on update. Args: data: The data to be converted to a model. Returns: The data to be converted to a model. """ return data def to_model_on_delete(self, data: "ModelDictT[ModelT]") -> "ModelDictT[ModelT]": """Convenience method to allow for custom behavior on delete. Args: data: The data to be converted to a model. Returns: The data to be converted to a model. """ return data def to_model_on_upsert(self, data: "ModelDictT[ModelT]") -> "ModelDictT[ModelT]": """Convenience method to allow for custom behavior on upsert. Args: data: The data to be converted to a model. Returns: The data to be converted to a model. """ return data def to_model( self, data: "ModelDictT[ModelT]", operation: Optional[str] = None, ) -> ModelT: """Parse and Convert input into a model. Args: data: Representations to be created. operation: Optional operation flag so that you can provide behavior based on CRUD operation Returns: Representation of created instances. """ operation_map = { "create": self.to_model_on_create, "update": self.to_model_on_update, "delete": self.to_model_on_delete, "upsert": self.to_model_on_upsert, } if operation and (op := operation_map.get(operation)): data = op(data) if is_dict(data): return model_from_dict(model=self.model_type, **data) if is_pydantic_model(data): return model_from_dict( model=self.model_type, **data.model_dump(exclude_unset=True), ) if is_msgspec_struct(data): from msgspec import UNSET return model_from_dict( model=self.model_type, **{f: val for f in data.__struct_fields__ if (val := getattr(data, f, None)) != UNSET}, ) if is_dto_data(data): return cast("ModelT", data.create_instance()) return cast("ModelT", data) def list_and_count( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, count_with_window_function: Optional[bool] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[Sequence[ModelT], int]: """List of records and total count returned by query. Args: *filters: Types for specific filtering operations. statement: To facilitate customization of the underlying select query. auto_expunge: Remove object from session before returning. count_with_window_function: When false, list and count will use two queries instead of an analytical window function. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: List of instances and count of total collection, ignoring pagination. """ return cast( "tuple[Sequence[ModelT], int]", self.repository.list_and_count( *filters, statement=statement, auto_expunge=auto_expunge, count_with_window_function=count_with_window_function, order_by=order_by, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) @classmethod @contextmanager def new( cls, session: Optional[Union[Session, scoped_session[Session]]] = None, statement: Optional[Select[tuple[ModelT]]] = None, config: Optional[SQLAlchemySyncConfig] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, count_with_window_function: Optional[bool] = None, ) -> Iterator[Self]: """Context manager that returns instance of service object. Handles construction of the database session._create_select_for_model Returns: The service object instance. """ if not config and not session: raise AdvancedAlchemyError(detail="Please supply an optional configuration or session to use.") if session: yield cls( statement=statement, session=session, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=uniquify, count_with_window_function=count_with_window_function, ) elif config: with config.get_session() as db_session: yield cls( statement=statement, session=db_session, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=uniquify, count_with_window_function=count_with_window_function, ) def list( self, *filters: Union[StatementFilter, ColumnElement[bool]], statement: Optional[Select[tuple[ModelT]]] = None, auto_expunge: Optional[bool] = None, order_by: Optional[Union[list[OrderingPair], OrderingPair]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Sequence[ModelT]: """Wrap repository scalars operation. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. statement: To facilitate customization of the underlying select query. order_by: Set default order options for queries. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: The list of instances retrieved from the repository. """ return cast( "Sequence[ModelT]", self.repository.list( *filters, statement=statement, auto_expunge=auto_expunge, order_by=order_by, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) class SQLAlchemySyncRepositoryService( SQLAlchemySyncRepositoryReadService[ModelT, SQLAlchemySyncRepositoryT], Generic[ModelT, SQLAlchemySyncRepositoryT], ): """Service object that operates on a repository object.""" def create( self, data: "ModelDictT[ModelT]", *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> "ModelT": """Wrap repository instance creation. Args: data: Representation to be created. auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients Returns: Representation of created instance. """ data = self.to_model(data, "create") return cast( "ModelT", self.repository.add( data=data, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, error_messages=error_messages, ), ) def create_many( self, data: "BulkModelDictT[ModelT]", *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, ) -> Sequence[ModelT]: """Wrap repository bulk instance creation. Args: data: Representations to be created. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of created instances. """ if is_dto_data(data): data = data.create_instance() data = [(self.to_model(datum, "create")) for datum in cast("ModelDictListT[ModelT]", data)] return cast( "Sequence[ModelT]", self.repository.add_many( data=cast("list[ModelT]", data), # pyright: ignore[reportUnnecessaryCast] auto_commit=auto_commit, auto_expunge=auto_expunge, error_messages=error_messages, ), ) def update( self, data: "ModelDictT[ModelT]", item_id: Optional[Any] = None, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> "ModelT": """Wrap repository update operation. Args: data: Representation to be updated. item_id: Identifier of item to be updated. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Updated representation. """ data = self.to_model(data, "update") if ( item_id is None and self.repository.get_id_attribute_value( # pyright: ignore[reportUnknownMemberType] item=data, id_attribute=id_attribute, ) is None ): msg = ( "Could not identify ID attribute value. One of the following is required: " f"``item_id`` or ``data.{id_attribute or self.repository.id_attribute}``" ) raise RepositoryError(msg) if item_id is not None: data = self.repository.set_id_attribute_value(item_id=item_id, item=data, id_attribute=id_attribute) # pyright: ignore[reportUnknownMemberType] return cast( "ModelT", self.repository.update( data=data, attribute_names=attribute_names, with_for_update=with_for_update, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, id_attribute=id_attribute, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) def update_many( self, data: "BulkModelDictT[ModelT]", *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> Sequence[ModelT]: """Wrap repository bulk instance update. Args: data: Representations to be updated. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of updated instances. """ if is_dto_data(data): data = data.create_instance() data = [(self.to_model(datum, "update")) for datum in cast("ModelDictListT[ModelT]", data)] return cast( "Sequence[ModelT]", self.repository.update_many( cast("list[ModelT]", data), # pyright: ignore[reportUnnecessaryCast] auto_commit=auto_commit, auto_expunge=auto_expunge, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) def upsert( self, data: "ModelDictT[ModelT]", item_id: Optional[Any] = None, *, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_refresh: Optional[bool] = None, match_fields: Optional[Union[list[str], str]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Wrap repository upsert operation. Args: data: Instance to update existing, or be created. Identifier used to determine if an existing instance exists is the value of an attribute on `data` named as value of `self.id_attribute`. item_id: Identifier of the object for upsert. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Updated or created representation. """ data = self.to_model(data, "upsert") item_id = item_id if item_id is not None else self.repository.get_id_attribute_value(item=data) # pyright: ignore[reportUnknownMemberType] if item_id is not None: self.repository.set_id_attribute_value(item_id, data) # pyright: ignore[reportUnknownMemberType] return cast( "ModelT", self.repository.upsert( data=data, attribute_names=attribute_names, with_for_update=with_for_update, auto_expunge=auto_expunge, auto_commit=auto_commit, auto_refresh=auto_refresh, match_fields=match_fields, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) def upsert_many( self, data: "BulkModelDictT[ModelT]", *, auto_expunge: Optional[bool] = None, auto_commit: Optional[bool] = None, no_merge: bool = False, match_fields: Optional[Union[list[str], str]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> Sequence[ModelT]: """Wrap repository upsert operation. Args: data: Instance to update existing, or be created. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. no_merge: Skip the usage of optimized Merge statements (**reserved for future use**) match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Updated or created representation. """ if is_dto_data(data): data = data.create_instance() data = [(self.to_model(datum, "upsert")) for datum in cast("ModelDictListT[ModelT]", data)] return cast( "Sequence[ModelT]", self.repository.upsert_many( data=cast("list[ModelT]", data), # pyright: ignore[reportUnnecessaryCast] auto_expunge=auto_expunge, auto_commit=auto_commit, no_merge=no_merge, match_fields=match_fields, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) def get_or_upsert( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, upsert: bool = True, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: """Wrap repository instance creation. Args: *filters: Types for specific filtering operations. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. upsert: When using match_fields and actual model values differ from `kwargs`, perform an update operation on the model. create: Should a model be created. If no model is found, an exception is raised. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: Representation of created instance. """ match_fields = match_fields or self.match_fields validated_model = self.to_model(kwargs, "create") return cast( "tuple[ModelT, bool]", self.repository.get_or_upsert( *filters, match_fields=match_fields, upsert=upsert, attribute_names=attribute_names, with_for_update=with_for_update, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **validated_model.to_dict(), ), ) def get_and_update( self, *filters: Union[StatementFilter, ColumnElement[bool]], match_fields: Optional[Union[list[str], str]] = None, attribute_names: Optional[Iterable[str]] = None, with_for_update: Optional[bool] = None, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, auto_refresh: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> tuple[ModelT, bool]: """Wrap repository instance creation. Args: *filters: Types for specific filtering operations. match_fields: a list of keys to use to match the existing model. When empty, all fields are matched. attribute_names: an iterable of attribute names to pass into the ``update`` method. with_for_update: indicating FOR UPDATE should be used, or may be a dictionary containing flags to indicate a more specific set of FOR UPDATE flags for the SELECT auto_expunge: Remove object from session before returning. auto_refresh: Refresh object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Identifier of the instance to be retrieved. Returns: Representation of updated instance. """ match_fields = match_fields or self.match_fields validated_model = self.to_model(kwargs, "update") return cast( "tuple[ModelT, bool]", self.repository.get_and_update( *filters, match_fields=match_fields, attribute_names=attribute_names, with_for_update=with_for_update, auto_commit=auto_commit, auto_expunge=auto_expunge, auto_refresh=auto_refresh, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **validated_model.to_dict(), ), ) def delete( self, item_id: Any, *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> ModelT: """Wrap repository delete operation. Args: item_id: Identifier of instance to be deleted. auto_commit: Commit objects before returning. auto_expunge: Remove object from session before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of the deleted instance. """ return cast( "ModelT", self.repository.delete( item_id=item_id, auto_commit=auto_commit, auto_expunge=auto_expunge, id_attribute=id_attribute, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) def delete_many( self, item_ids: list[Any], *, auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, id_attribute: Optional[Union[str, InstrumentedAttribute[Any]]] = None, chunk_size: Optional[int] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, ) -> Sequence[ModelT]: """Wrap repository bulk instance deletion. Args: item_ids: Identifier of instance to be deleted. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. id_attribute: Allows customization of the unique identifier to use for model fetching. Defaults to `id`, but can reference any surrogate or candidate key for the table. chunk_size: Allows customization of the ``insertmanyvalues_max_parameters`` setting for the driver. Defaults to `950` if left unset. error_messages: An optional dictionary of templates to use for friendlier error messages to clients load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. Returns: Representation of removed instances. """ return cast( "Sequence[ModelT]", self.repository.delete_many( item_ids=item_ids, auto_commit=auto_commit, auto_expunge=auto_expunge, id_attribute=id_attribute, chunk_size=chunk_size, error_messages=error_messages, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), ), ) def delete_where( self, *filters: Union[StatementFilter, ColumnElement[bool]], auto_commit: Optional[bool] = None, auto_expunge: Optional[bool] = None, error_messages: Union[ErrorMessages, None, EmptyType] = Empty, sanity_check: bool = True, load: Optional[LoadSpec] = None, execution_options: Optional[dict[str, Any]] = None, uniquify: Optional[bool] = None, **kwargs: Any, ) -> Sequence[ModelT]: """Wrap repository scalars operation. Args: *filters: Types for specific filtering operations. auto_expunge: Remove object from session before returning. auto_commit: Commit objects before returning. error_messages: An optional dictionary of templates to use for friendlier error messages to clients sanity_check: When true, the length of selected instances is compared to the deleted row count load: Set default relationships to be loaded execution_options: Set default execution options uniquify: Optionally apply the ``unique()`` method to results before returning. **kwargs: Instance attribute value filters. Returns: The list of instances deleted from the repository. """ return cast( "Sequence[ModelT]", self.repository.delete_where( *filters, auto_commit=auto_commit, auto_expunge=auto_expunge, error_messages=error_messages, sanity_check=sanity_check, load=load, execution_options=execution_options, uniquify=self._get_uniquify(uniquify), **kwargs, ), ) python-advanced-alchemy-1.0.1/advanced_alchemy/service/_typing.py000066400000000000000000000074251476663714600252030ustar00rootroot00000000000000"""This is a simple wrapper around a few important classes in each library. This is used to ensure compatibility when one or more of the libraries are installed. """ from typing import ( Any, ClassVar, Optional, Protocol, cast, runtime_checkable, ) from typing_extensions import TypeVar, dataclass_transform T = TypeVar("T") T_co = TypeVar("T_co", covariant=True) try: from pydantic import BaseModel, FailFast, TypeAdapter # pyright: ignore[reportGeneralTypeIssues] PYDANTIC_INSTALLED = True except ImportError: @runtime_checkable class BaseModel(Protocol): # type: ignore[no-redef] """Placeholder Implementation""" model_fields: "ClassVar[dict[str, Any]]" def model_dump(self, *args: Any, **kwargs: Any) -> "dict[str, Any]": """Placeholder""" return {} @runtime_checkable class TypeAdapter(Protocol[T_co]): # type: ignore[no-redef] """Placeholder Implementation""" def __init__( self, type: Any, # noqa: A002 *, config: "Optional[Any]" = None, _parent_depth: int = 2, module: "Optional[str]" = None, ) -> None: """Init""" def validate_python( self, object: Any, # noqa: A002 /, *, strict: "Optional[bool]" = None, from_attributes: "Optional[bool]" = None, context: "Optional[dict[str, Any]]" = None, ) -> T_co: """Stub""" return cast("T_co", object) @runtime_checkable class FailFast(Protocol): # type: ignore[no-redef] """Placeholder Implementation for FailFast""" def __init__(self, *args: Any, **kwargs: Any) -> None: """Init""" PYDANTIC_INSTALLED = False # pyright: ignore[reportConstantRedefinition] try: from msgspec import ( UNSET, Struct, UnsetType, # pyright: ignore[reportAssignmentType,reportGeneralTypeIssues] convert, # pyright: ignore[reportGeneralTypeIssues] ) MSGSPEC_INSTALLED: bool = True except ImportError: import enum @dataclass_transform() @runtime_checkable class Struct(Protocol): # type: ignore[no-redef] """Placeholder Implementation""" __struct_fields__: "ClassVar[tuple[str, ...]]" def convert(*args: Any, **kwargs: Any) -> Any: # type: ignore[no-redef] # noqa: ARG001 """Placeholder implementation""" return {} class UnsetType(enum.Enum): # type: ignore[no-redef] UNSET = "UNSET" UNSET = UnsetType.UNSET # pyright: ignore[reportConstantRedefinition] MSGSPEC_INSTALLED = False # pyright: ignore[reportConstantRedefinition] try: from litestar.dto.data_structures import DTOData LITESTAR_INSTALLED = True except ImportError: @runtime_checkable class DTOData(Protocol[T]): # type: ignore[no-redef] """Placeholder implementation""" __slots__ = ("_backend", "_data_as_builtins") def __init__(self, backend: Any, data_as_builtins: Any) -> None: """Placeholder init""" def create_instance(self, **kwargs: Any) -> T: """Placeholder implementation""" return cast("T", kwargs) def update_instance(self, instance: T, **kwargs: Any) -> T: """Placeholder implementation""" return cast("T", kwargs) def as_builtins(self) -> Any: """Placeholder implementation""" return {} LITESTAR_INSTALLED = False # pyright: ignore[reportConstantRedefinition] __all__ = ( "LITESTAR_INSTALLED", "MSGSPEC_INSTALLED", "PYDANTIC_INSTALLED", "UNSET", "BaseModel", "DTOData", "FailFast", "Struct", "TypeAdapter", "UnsetType", "convert", ) python-advanced-alchemy-1.0.1/advanced_alchemy/service/_util.py000066400000000000000000000252221476663714600246410ustar00rootroot00000000000000"""Service object implementation for SQLAlchemy. RepositoryService object is generic on the domain model type which should be a SQLAlchemy model. """ import datetime from collections.abc import Sequence from enum import Enum from functools import partial from pathlib import Path, PurePath from typing import TYPE_CHECKING, Any, Callable, Optional, Union, cast, overload from uuid import UUID from advanced_alchemy.exceptions import AdvancedAlchemyError from advanced_alchemy.filters import LimitOffset, StatementFilter from advanced_alchemy.repository.typing import ModelOrRowMappingT from advanced_alchemy.service.pagination import OffsetPagination from advanced_alchemy.service.typing import ( MSGSPEC_INSTALLED, PYDANTIC_INSTALLED, BaseModel, FilterTypeT, ModelDTOT, Struct, convert, get_type_adapter, ) if TYPE_CHECKING: from sqlalchemy import ColumnElement, RowMapping from advanced_alchemy.base import ModelProtocol __all__ = ("ResultConverter", "find_filter") DEFAULT_TYPE_DECODERS = [ # pyright: ignore[reportUnknownVariableType] (lambda x: x is UUID, lambda t, v: t(v.hex)), # pyright: ignore[reportUnknownLambdaType,reportUnknownMemberType] (lambda x: x is datetime.datetime, lambda t, v: t(v.isoformat())), # pyright: ignore[reportUnknownLambdaType,reportUnknownMemberType] (lambda x: x is datetime.date, lambda t, v: t(v.isoformat())), # pyright: ignore[reportUnknownLambdaType,reportUnknownMemberType] (lambda x: x is datetime.time, lambda t, v: t(v.isoformat())), # pyright: ignore[reportUnknownLambdaType,reportUnknownMemberType] (lambda x: x is Enum, lambda t, v: t(v.value)), # pyright: ignore[reportUnknownLambdaType,reportUnknownMemberType] ] def _default_msgspec_deserializer( target_type: Any, value: Any, type_decoders: "Union[Sequence[tuple[Callable[[Any], bool], Callable[[Any, Any], Any]]], None]" = None, ) -> Any: # pragma: no cover """Transform values non-natively supported by ``msgspec`` Args: target_type: Encountered type value: Value to coerce type_decoders: Optional sequence of type decoders Returns: A ``msgspec``-supported type """ if isinstance(value, target_type): return value if type_decoders: for predicate, decoder in type_decoders: if predicate(target_type): return decoder(target_type, value) if issubclass(target_type, (Path, PurePath, UUID)): return target_type(value) try: return target_type(value) except Exception as e: msg = f"Unsupported type: {type(value)!r}" raise TypeError(msg) from e def find_filter( filter_type: type[FilterTypeT], filters: "Union[Sequence[Union[StatementFilter, ColumnElement[bool]]], Sequence[StatementFilter]]", ) -> "Union[FilterTypeT, None]": """Get the filter specified by filter type from the filters. Args: filter_type: The type of filter to find. filters: filter types to apply to the query Returns: The match filter instance or None """ return next( (cast("Optional[FilterTypeT]", filter_) for filter_ in filters if isinstance(filter_, filter_type)), None, ) class ResultConverter: """Simple mixin to help convert to a paginated response model. Single objects are transformed to the supplied schema type, and lists of objects are automatically transformed into an `OffsetPagination` response of the supplied schema type. Args: data: A database model instance or row mapping. Type: :class:`~advanced_alchemy.repository.typing.ModelOrRowMappingT` Returns: The converted schema object. """ @overload def to_schema( self, data: "ModelOrRowMappingT", *, schema_type: None = None, ) -> "ModelOrRowMappingT": ... @overload def to_schema( self, data: "Union[ModelProtocol, RowMapping]", *, schema_type: "type[ModelDTOT]", ) -> "ModelDTOT": ... @overload def to_schema( self, data: "ModelOrRowMappingT", total: "Optional[int]" = None, *, schema_type: None = None, ) -> "ModelOrRowMappingT": ... @overload def to_schema( self, data: "Union[ModelProtocol, RowMapping]", total: "Optional[int]" = None, *, schema_type: "type[ModelDTOT]", ) -> "ModelDTOT": ... @overload def to_schema( self, data: "ModelOrRowMappingT", total: "Optional[int]" = None, filters: "Union[Sequence[Union[StatementFilter, ColumnElement[bool]]], Sequence[StatementFilter], None]" = None, *, schema_type: None = None, ) -> "ModelOrRowMappingT": ... @overload def to_schema( self, data: "Union[ModelProtocol, RowMapping]", total: "Optional[int]" = None, filters: "Union[Sequence[Union[StatementFilter, ColumnElement[bool]]], Sequence[StatementFilter], None]" = None, *, schema_type: "type[ModelDTOT]", ) -> "ModelDTOT": ... @overload def to_schema( self, data: "Sequence[ModelOrRowMappingT]", *, schema_type: None = None, ) -> "OffsetPagination[ModelOrRowMappingT]": ... @overload def to_schema( self, data: "Union[Sequence[ModelProtocol], Sequence[RowMapping]]", *, schema_type: "type[ModelDTOT]", ) -> "OffsetPagination[ModelDTOT]": ... @overload def to_schema( self, data: "Sequence[ModelOrRowMappingT]", total: "Optional[int]" = None, filters: "Union[Sequence[Union[StatementFilter, ColumnElement[bool]]], Sequence[StatementFilter], None]" = None, *, schema_type: None = None, ) -> "OffsetPagination[ModelOrRowMappingT]": ... @overload def to_schema( self, data: "Union[Sequence[ModelProtocol], Sequence[RowMapping]]", total: "Optional[int]" = None, filters: "Union[Sequence[Union[StatementFilter, ColumnElement[bool]]], Sequence[StatementFilter], None]" = None, *, schema_type: "type[ModelDTOT]", ) -> "OffsetPagination[ModelDTOT]": ... def to_schema( self, data: "Union[ModelOrRowMappingT, Sequence[ModelOrRowMappingT], ModelProtocol, Sequence[ModelProtocol], RowMapping, Sequence[RowMapping]]", total: "Optional[int]" = None, filters: "Union[Sequence[Union[StatementFilter, ColumnElement[bool]]], Sequence[StatementFilter], None]" = None, *, schema_type: "Optional[type[ModelDTOT]]" = None, ) -> "Union[ModelOrRowMappingT, OffsetPagination[ModelOrRowMappingT], ModelDTOT, OffsetPagination[ModelDTOT]]": """Convert the object to a response schema. When `schema_type` is None, the model is returned with no conversion. Args: data: The return from one of the service calls. Type: :class:`~advanced_alchemy.repository.typing.ModelOrRowMappingT` total: The total number of rows in the data. filters: :class:`~advanced_alchemy.filters.StatementFilter`| :class:`sqlalchemy.sql.expression.ColumnElement` Collection of route filters. schema_type: :class:`~advanced_alchemy.service.typing.ModelDTOT` Optional schema type to convert the data to Returns: - :class:`~advanced_alchemy.base.ModelProtocol` | :class:`sqlalchemy.orm.RowMapping` | :class:`~advanced_alchemy.service.pagination.OffsetPagination` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel` """ if filters is None: filters = [] if schema_type is None: if not isinstance(data, Sequence): return cast("ModelOrRowMappingT", data) # type: ignore[unreachable,unused-ignore] limit_offset = find_filter(LimitOffset, filters=filters) total = total or len(data) limit_offset = limit_offset if limit_offset is not None else LimitOffset(limit=len(data), offset=0) return OffsetPagination[ModelOrRowMappingT]( items=cast("Sequence[ModelOrRowMappingT]", data), limit=limit_offset.limit, offset=limit_offset.offset, total=total, ) if MSGSPEC_INSTALLED and issubclass(schema_type, Struct): if not isinstance(data, Sequence): return cast( "ModelDTOT", convert( obj=data, type=schema_type, from_attributes=True, dec_hook=partial( _default_msgspec_deserializer, type_decoders=DEFAULT_TYPE_DECODERS, ), ), ) limit_offset = find_filter(LimitOffset, filters=filters) total = total or len(data) limit_offset = limit_offset if limit_offset is not None else LimitOffset(limit=len(data), offset=0) return OffsetPagination[ModelDTOT]( items=convert( obj=data, type=list[schema_type], # type: ignore[valid-type] from_attributes=True, dec_hook=partial( _default_msgspec_deserializer, type_decoders=DEFAULT_TYPE_DECODERS, ), ), limit=limit_offset.limit, offset=limit_offset.offset, total=total, ) if PYDANTIC_INSTALLED and issubclass(schema_type, BaseModel): if not isinstance(data, Sequence): return cast( "ModelDTOT", get_type_adapter(schema_type).validate_python(data, from_attributes=True), ) limit_offset = find_filter(LimitOffset, filters=filters) total = total if total else len(data) limit_offset = limit_offset if limit_offset is not None else LimitOffset(limit=len(data), offset=0) return OffsetPagination[ModelDTOT]( items=get_type_adapter(list[schema_type]).validate_python(data, from_attributes=True), # type: ignore[valid-type] # pyright: ignore[reportUnknownArgumentType] limit=limit_offset.limit, offset=limit_offset.offset, total=total, ) if not MSGSPEC_INSTALLED and not PYDANTIC_INSTALLED: msg = "Either Msgspec or Pydantic must be installed to use schema conversion" raise AdvancedAlchemyError(msg) msg = "`schema_type` should be a valid Pydantic or Msgspec schema" raise AdvancedAlchemyError(msg) python-advanced-alchemy-1.0.1/advanced_alchemy/service/pagination.py000066400000000000000000000011531476663714600256530ustar00rootroot00000000000000from collections.abc import Sequence from dataclasses import dataclass from typing import Generic, TypeVar T = TypeVar("T") __all__ = ("OffsetPagination",) @dataclass class OffsetPagination(Generic[T]): """Container for data returned using limit/offset pagination.""" __slots__ = ("items", "limit", "offset", "total") items: Sequence[T] """List of data being sent as part of the response.""" limit: int """Maximal number of items to send.""" offset: int """Offset from the beginning of the query. Identical to an index. """ total: int """Total number of items.""" python-advanced-alchemy-1.0.1/advanced_alchemy/service/typing.py000066400000000000000000000263601476663714600250430ustar00rootroot00000000000000"""Service object implementation for SQLAlchemy. RepositoryService object is generic on the domain model type which should be a SQLAlchemy model. """ from functools import lru_cache from typing import ( TYPE_CHECKING, Annotated, Any, TypeVar, Union, cast, overload, ) from typing_extensions import TypeAlias, TypeGuard from advanced_alchemy.repository.typing import ModelT from advanced_alchemy.service._typing import ( LITESTAR_INSTALLED, MSGSPEC_INSTALLED, PYDANTIC_INSTALLED, UNSET, BaseModel, DTOData, FailFast, Struct, TypeAdapter, convert, ) if TYPE_CHECKING: from collections.abc import Sequence from advanced_alchemy.filters import StatementFilter PYDANTIC_USE_FAILFAST = False # leave permanently disabled for now T = TypeVar("T") FilterTypeT = TypeVar("FilterTypeT", bound="StatementFilter") """Type variable for filter types. :class:`~advanced_alchemy.filters.StatementFilter` """ SupportedSchemaModel: TypeAlias = Union[Struct, BaseModel] """Type alias for objects that support to_dict or model_dump methods.""" ModelDTOT = TypeVar("ModelDTOT", bound="SupportedSchemaModel") """Type variable for model DTOs. :class:`msgspec.Struct`|:class:`pydantic.BaseModel` """ PydanticOrMsgspecT = SupportedSchemaModel """Type alias for pydantic or msgspec models. :class:`msgspec.Struct` or :class:`pydantic.BaseModel` """ ModelDictT: TypeAlias = "Union[dict[str, Any], ModelT, SupportedSchemaModel, DTOData[ModelT]]" """Type alias for model dictionaries. Represents: - :type:`dict[str, Any]` | :class:`~advanced_alchemy.base.ModelProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel` | :class:`litestar.dto.data_structures.DTOData` | :class:`~advanced_alchemy.base.ModelProtocol` """ ModelDictListT: TypeAlias = "Sequence[Union[dict[str, Any], ModelT, SupportedSchemaModel]]" """Type alias for model dictionary lists. A list or sequence of any of the following: - :type:`Sequence`[:type:`dict[str, Any]` | :class:`~advanced_alchemy.base.ModelProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`] """ BulkModelDictT: TypeAlias = ( "Union[Sequence[Union[dict[str, Any], ModelT, SupportedSchemaModel]], DTOData[list[ModelT]]]" ) """Type alias for bulk model dictionaries. :type:`Sequence`[ :type:`dict[str, Any]` | :class:`~advanced_alchemy.base.ModelProtocol` | :class:`msgspec.Struct` :class:`pydantic.BaseModel`] | :class:`litestar.dto.data_structures.DTOData` """ @lru_cache(typed=True) def get_type_adapter(f: type[T]) -> TypeAdapter[T]: """Caches and returns a pydantic type adapter. Args: f: Type to create a type adapter for. Returns: :class:`pydantic.TypeAdapter`[:class:`typing.TypeVar`[T]] """ if PYDANTIC_USE_FAILFAST: return TypeAdapter( Annotated[f, FailFast()], ) return TypeAdapter(f) def is_dto_data(v: Any) -> TypeGuard[DTOData[Any]]: """Check if a value is a Litestar DTOData object. Args: v: Value to check. Returns: bool """ return LITESTAR_INSTALLED and isinstance(v, DTOData) def is_pydantic_model(v: Any) -> TypeGuard[BaseModel]: """Check if a value is a pydantic model. Args: v: Value to check. Returns: bool """ return PYDANTIC_INSTALLED and isinstance(v, BaseModel) def is_msgspec_struct(v: Any) -> TypeGuard[Struct]: """Check if a value is a msgspec struct. Args: v: Value to check. Returns: bool """ return MSGSPEC_INSTALLED and isinstance(v, Struct) def is_dataclass(obj: Any) -> TypeGuard[Any]: """Check if an object is a dataclass.""" return hasattr(obj, "__dataclass_fields__") def is_dataclass_with_field(obj: Any, field_name: str) -> TypeGuard[object]: # Can't specify dataclass type directly """Check if an object is a dataclass and has a specific field.""" return is_dataclass(obj) and hasattr(obj, field_name) def is_dataclass_without_field(obj: Any, field_name: str) -> TypeGuard[object]: """Check if an object is a dataclass and does not have a specific field.""" return is_dataclass(obj) and not hasattr(obj, field_name) def is_dict(v: Any) -> TypeGuard[dict[str, Any]]: """Check if a value is a dictionary. Args: v: Value to check. Returns: bool """ return isinstance(v, dict) def is_dict_with_field(v: Any, field_name: str) -> TypeGuard[dict[str, Any]]: """Check if a dictionary has a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return is_dict(v) and field_name in v def is_dict_without_field(v: Any, field_name: str) -> TypeGuard[dict[str, Any]]: """Check if a dictionary does not have a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return is_dict(v) and field_name not in v def is_pydantic_model_with_field(v: Any, field_name: str) -> TypeGuard[BaseModel]: """Check if a pydantic model has a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return is_pydantic_model(v) and hasattr(v, field_name) def is_pydantic_model_without_field(v: Any, field_name: str) -> TypeGuard[BaseModel]: """Check if a pydantic model does not have a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return is_pydantic_model(v) and not hasattr(v, field_name) def is_msgspec_struct_with_field(v: Any, field_name: str) -> TypeGuard[Struct]: """Check if a msgspec struct has a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return is_msgspec_struct(v) and hasattr(v, field_name) def is_msgspec_struct_without_field(v: Any, field_name: str) -> "TypeGuard[Struct]": """Check if a msgspec struct does not have a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return is_msgspec_struct(v) and not hasattr(v, field_name) def is_schema(v: Any) -> "TypeGuard[SupportedSchemaModel]": """Check if a value is a msgspec Struct or Pydantic model. Args: v: Value to check. Returns: bool """ return is_msgspec_struct(v) or is_pydantic_model(v) def is_schema_or_dict(v: Any) -> "TypeGuard[Union[SupportedSchemaModel, dict[str, Any]]]": """Check if a value is a msgspec Struct, Pydantic model, or dict. Args: v: Value to check. Returns: bool """ return is_schema(v) or is_dict(v) def is_schema_with_field(v: Any, field_name: str) -> "TypeGuard[SupportedSchemaModel]": """Check if a value is a msgspec Struct or Pydantic model with a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return is_msgspec_struct_with_field(v, field_name) or is_pydantic_model_with_field(v, field_name) def is_schema_without_field(v: Any, field_name: str) -> "TypeGuard[SupportedSchemaModel]": """Check if a value is a msgspec Struct or Pydantic model without a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return not is_schema_with_field(v, field_name) def is_schema_or_dict_with_field(v: Any, field_name: str) -> "TypeGuard[Union[SupportedSchemaModel, dict[str, Any]]]": """Check if a value is a msgspec Struct, Pydantic model, or dict with a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return is_schema_with_field(v, field_name) or is_dict_with_field(v, field_name) def is_schema_or_dict_without_field( v: Any, field_name: str ) -> "TypeGuard[Union[SupportedSchemaModel, dict[str, Any]]]": """Check if a value is a msgspec Struct, Pydantic model, or dict without a specific field. Args: v: Value to check. field_name: Field name to check for. Returns: bool """ return not is_schema_or_dict_with_field(v, field_name) @overload def schema_dump( data: "Union[dict[str, Any], SupportedSchemaModel, DTOData[ModelT]]", exclude_unset: bool = True ) -> "Union[dict[str, Any], ModelT]": ... @overload def schema_dump(data: ModelT, exclude_unset: bool = True) -> ModelT: ... def schema_dump( # noqa: PLR0911 data: "Union[dict[str, Any], ModelT, SupportedSchemaModel, DTOData[ModelT]]", exclude_unset: bool = True ) -> "Union[dict[str, Any], ModelT]": """Dump a data object to a dictionary. Args: data: :type:`dict[str, Any]` | :class:`advanced_alchemy.base.ModelProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel` | :class:`litestar.dto.data_structures.DTOData[ModelT]` exclude_unset: :type:`bool` Whether to exclude unset values. Returns: Union[:type: dict[str, Any], :class:`~advanced_alchemy.base.ModelProtocol`] """ if is_dict(data): return data if is_pydantic_model(data): return data.model_dump(exclude_unset=exclude_unset) if is_msgspec_struct(data): if exclude_unset: return {f: val for f in data.__struct_fields__ if (val := getattr(data, f, None)) != UNSET} return {f: getattr(data, f, None) for f in data.__struct_fields__} if is_dto_data(data): return cast("dict[str, Any]", data.as_builtins()) if hasattr(data, "__dict__"): return data.__dict__ return cast("ModelT", data) # type: ignore[no-return-any] __all__ = ( "LITESTAR_INSTALLED", "MSGSPEC_INSTALLED", "PYDANTIC_INSTALLED", "PYDANTIC_USE_FAILFAST", "UNSET", "BaseModel", "BulkModelDictT", "DTOData", "FailFast", "FilterTypeT", "ModelDTOT", "ModelDictListT", "ModelDictT", "PydanticOrMsgspecT", "Struct", "SupportedSchemaModel", "TypeAdapter", "UnsetType", "convert", "get_type_adapter", "is_dataclass", "is_dataclass_with_field", "is_dataclass_without_field", "is_dict", "is_dict_with_field", "is_dict_without_field", "is_dto_data", "is_msgspec_struct", "is_msgspec_struct_with_field", "is_msgspec_struct_without_field", "is_pydantic_model", "is_pydantic_model_with_field", "is_pydantic_model_without_field", "is_schema", "is_schema_or_dict", "is_schema_or_dict_with_field", "is_schema_or_dict_without_field", "is_schema_with_field", "is_schema_without_field", "schema_dump", ) if TYPE_CHECKING: if not PYDANTIC_INSTALLED: from advanced_alchemy.service._typing import BaseModel, FailFast, TypeAdapter else: from pydantic import BaseModel, FailFast, TypeAdapter # type: ignore[assignment] # noqa: TC004 if not MSGSPEC_INSTALLED: from advanced_alchemy.service._typing import UNSET, Struct, UnsetType, convert else: from msgspec import UNSET, Struct, UnsetType, convert # type: ignore[assignment] # noqa: TC004 if not LITESTAR_INSTALLED: from advanced_alchemy.service._typing import DTOData else: from litestar.dto import DTOData # type: ignore[assignment] # noqa: TC004 python-advanced-alchemy-1.0.1/advanced_alchemy/types/000077500000000000000000000000001476663714600226545ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/types/__init__.py000066400000000000000000000012471476663714600247710ustar00rootroot00000000000000from advanced_alchemy.types.datetime import DateTimeUTC from advanced_alchemy.types.encrypted_string import ( EncryptedString, EncryptedText, EncryptionBackend, FernetBackend, PGCryptoBackend, ) from advanced_alchemy.types.guid import GUID, NANOID_INSTALLED, UUID_UTILS_INSTALLED from advanced_alchemy.types.identity import BigIntIdentity from advanced_alchemy.types.json import ORA_JSONB, JsonB __all__ = ( "GUID", "NANOID_INSTALLED", "ORA_JSONB", "UUID_UTILS_INSTALLED", "BigIntIdentity", "DateTimeUTC", "EncryptedString", "EncryptedText", "EncryptionBackend", "FernetBackend", "JsonB", "PGCryptoBackend", ) python-advanced-alchemy-1.0.1/advanced_alchemy/types/datetime.py000066400000000000000000000022251476663714600250230ustar00rootroot00000000000000# ruff: noqa: FA100 import datetime from typing import Optional from sqlalchemy import DateTime from sqlalchemy.engine import Dialect from sqlalchemy.types import TypeDecorator __all__ = ("DateTimeUTC",) class DateTimeUTC(TypeDecorator[datetime.datetime]): """Timezone Aware DateTime. Ensure UTC is stored in the database and that TZ aware dates are returned for all dialects. """ impl = DateTime(timezone=True) cache_ok = True @property def python_type(self) -> type[datetime.datetime]: return datetime.datetime def process_bind_param(self, value: Optional[datetime.datetime], dialect: Dialect) -> Optional[datetime.datetime]: if value is None: return value if not value.tzinfo: msg = "tzinfo is required" raise TypeError(msg) return value.astimezone(datetime.timezone.utc) def process_result_value(self, value: Optional[datetime.datetime], dialect: Dialect) -> Optional[datetime.datetime]: if value is None: return value if value.tzinfo is None: return value.replace(tzinfo=datetime.timezone.utc) return value python-advanced-alchemy-1.0.1/advanced_alchemy/types/encrypted_string.py000066400000000000000000000313711476663714600266160ustar00rootroot00000000000000import abc import base64 import contextlib import os from typing import TYPE_CHECKING, Any, Callable, Optional, Union from sqlalchemy import String, Text, TypeDecorator from sqlalchemy import func as sql_func from advanced_alchemy.exceptions import IntegrityError if TYPE_CHECKING: from sqlalchemy.engine import Dialect cryptography = None # type: ignore[var-annotated,unused-ignore] with contextlib.suppress(ImportError): from cryptography.fernet import Fernet from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes __all__ = ("EncryptedString", "EncryptedText", "EncryptionBackend", "FernetBackend", "PGCryptoBackend") class EncryptionBackend(abc.ABC): """Abstract base class for encryption backends. This class defines the interface that all encryption backends must implement. Concrete implementations should provide the actual encryption/decryption logic. Attributes: passphrase (bytes): The encryption passphrase used by the backend. """ def mount_vault(self, key: "Union[str, bytes]") -> None: """Mounts the vault with the provided encryption key. Args: key (str | bytes): The encryption key used to initialize the backend. """ if isinstance(key, str): key = key.encode() @abc.abstractmethod def init_engine(self, key: "Union[bytes, str]") -> None: # pragma: nocover """Initializes the encryption engine with the provided key. Args: key (bytes | str): The encryption key. Raises: NotImplementedError: If the method is not implemented by the subclass. """ @abc.abstractmethod def encrypt(self, value: Any) -> str: # pragma: nocover """Encrypts the given value. Args: value (Any): The value to encrypt. Returns: str: The encrypted value. Raises: NotImplementedError: If the method is not implemented by the subclass. """ @abc.abstractmethod def decrypt(self, value: Any) -> str: # pragma: nocover """Decrypts the given value. Args: value (Any): The value to decrypt. Returns: str: The decrypted value. Raises: NotImplementedError: If the method is not implemented by the subclass. """ class PGCryptoBackend(EncryptionBackend): """PostgreSQL pgcrypto-based encryption backend. This backend uses PostgreSQL's pgcrypto extension for encryption/decryption operations. Requires the pgcrypto extension to be installed in the database. Attributes: passphrase (bytes): The base64-encoded passphrase used for encryption and decryption. """ def init_engine(self, key: "Union[bytes, str]") -> None: """Initializes the pgcrypto engine with the provided key. Args: key (bytes | str): The encryption key. """ if isinstance(key, str): key = key.encode() self.passphrase = base64.urlsafe_b64encode(key) def encrypt(self, value: Any) -> str: """Encrypts the given value using pgcrypto. Args: value (Any): The value to encrypt. Returns: str: The encrypted value. Raises: TypeError: If the value is not a string. """ if not isinstance(value, str): # pragma: nocover value = repr(value) value = value.encode() return sql_func.pgp_sym_encrypt(value, self.passphrase) # type: ignore[return-value] def decrypt(self, value: Any) -> str: """Decrypts the given value using pgcrypto. Args: value (Any): The value to decrypt. Returns: str: The decrypted value. Raises: TypeError: If the value is not a string. """ if not isinstance(value, str): # pragma: nocover value = str(value) return sql_func.pgp_sym_decrypt(value, self.passphrase) # type: ignore[return-value] class FernetBackend(EncryptionBackend): """Fernet-based encryption backend. This backend uses the Python cryptography library's Fernet implementation for encryption/decryption operations. Provides symmetric encryption with built-in rotation support. Attributes: key (bytes): The base64-encoded key used for encryption and decryption. fernet (cryptography.fernet.Fernet): The Fernet instance used for encryption/decryption. """ def mount_vault(self, key: "Union[str, bytes]") -> None: """Mounts the vault with the provided encryption key. This method hashes the key using SHA256 before initializing the engine. Args: key (str | bytes): The encryption key. """ if isinstance(key, str): key = key.encode() digest = hashes.Hash(hashes.SHA256(), backend=default_backend()) # pyright: ignore[reportPossiblyUnboundVariable] digest.update(key) engine_key = digest.finalize() self.init_engine(engine_key) def init_engine(self, key: "Union[bytes, str]") -> None: """Initializes the Fernet engine with the provided key. Args: key (bytes | str): The encryption key. """ if isinstance(key, str): key = key.encode() self.key = base64.urlsafe_b64encode(key) self.fernet = Fernet(self.key) # pyright: ignore[reportPossiblyUnboundVariable] def encrypt(self, value: Any) -> str: """Encrypts the given value using Fernet. Args: value (Any): The value to encrypt. Returns: str: The encrypted value. Raises: TypeError: If the value is not a string. cryptography.fernet.InvalidToken: If encryption fails. """ if not isinstance(value, str): value = repr(value) value = value.encode() encrypted = self.fernet.encrypt(value) return encrypted.decode("utf-8") def decrypt(self, value: Any) -> str: """Decrypts the given value using Fernet. Args: value (Any): The value to decrypt. Returns: str: The decrypted value. Raises: TypeError: If the value is not a string. cryptography.fernet.InvalidToken: If decryption fails. """ if not isinstance(value, str): # pragma: nocover value = str(value) decrypted: Union[str, bytes] = self.fernet.decrypt(value.encode()) if not isinstance(decrypted, str): decrypted = decrypted.decode("utf-8") # pyright: ignore[reportAttributeAccessIssue] return decrypted DEFAULT_ENCRYPTION_KEY = os.urandom(32) class EncryptedString(TypeDecorator[str]): """SQLAlchemy TypeDecorator for storing encrypted string values in a database. This type provides transparent encryption/decryption of string values using the specified backend. It extends :class:`sqlalchemy.types.TypeDecorator` and implements String as its underlying type. Args: key (str | bytes | Callable[[], str | bytes] | None): The encryption key. Can be a string, bytes, or callable returning either. Defaults to os.urandom(32). backend (Type[EncryptionBackend] | None): The encryption backend class to use. Defaults to FernetBackend. length (int | None): The length of the unencrypted string. This is used for documentation and validation purposes only, as encrypted strings will be longer. **kwargs (Any | None): Additional arguments passed to the underlying String type. Attributes: key (str | bytes | Callable[[], str | bytes]): The encryption key. backend (EncryptionBackend): The encryption backend instance. length (int | None): The unencrypted string length. """ impl = String cache_ok = True def __init__( self, key: "Union[str, bytes, Callable[[], Union[str, bytes]]]" = DEFAULT_ENCRYPTION_KEY, backend: "type[EncryptionBackend]" = FernetBackend, length: "Optional[int]" = None, **kwargs: Any, ) -> None: """Initializes the EncryptedString TypeDecorator. Args: key (str | bytes | Callable[[], str | bytes] | None): The encryption key. Can be a string, bytes, or callable returning either. Defaults to os.urandom(32). backend (Type[EncryptionBackend] | None): The encryption backend class to use. Defaults to FernetBackend. length (int | None): The length of the unencrypted string. This is used for documentation and validation purposes only. **kwargs (Any | None): Additional arguments passed to the underlying String type. """ super().__init__() self.key = key self.backend = backend() self.length = length @property def python_type(self) -> type[str]: """Returns the Python type for this type decorator. Returns: Type[str]: The Python string type. """ return str def load_dialect_impl(self, dialect: "Dialect") -> Any: """Loads the appropriate dialect implementation based on the database dialect. Note: The actual column length will be larger than the specified length due to encryption overhead. For most encryption methods, the encrypted string will be approximately 1.35x longer than the original. Args: dialect (Dialect): The SQLAlchemy dialect. Returns: Any: The dialect-specific type descriptor. """ if dialect.name in {"mysql", "mariadb"}: # For MySQL/MariaDB, always use Text to avoid length limitations return dialect.type_descriptor(Text()) if dialect.name == "oracle": # Oracle has a 4000-byte limit for VARCHAR2 (by default) return dialect.type_descriptor(String(length=4000)) return dialect.type_descriptor(String()) def process_bind_param(self, value: Any, dialect: "Dialect") -> "Union[str, None]": """Processes the value before binding it to the SQL statement. This method encrypts the value using the specified backend and validates length if specified. Args: value (Any): The value to process. dialect (Dialect): The SQLAlchemy dialect. Returns: str | None: The encrypted value or None if the input is None. Raises: ValueError: If the value exceeds the specified length. """ if value is None: return value # Validate length if specified if self.length is not None and len(str(value)) > self.length: msg = f"Unencrypted value exceeds maximum unencrypted length of {self.length}" raise IntegrityError(msg) self.mount_vault() return self.backend.encrypt(value) def process_result_value(self, value: Any, dialect: "Dialect") -> "Union[str, None]": """Processes the value after retrieving it from the database. This method decrypts the value using the specified backend. Args: value (Any): The value to process. dialect (Dialect): The SQLAlchemy dialect. Returns: str | None: The decrypted value or None if the input is None. """ if value is None: return value self.mount_vault() return self.backend.decrypt(value) def mount_vault(self) -> None: """Mounts the vault with the encryption key. If the key is callable, it is called to retrieve the key. Otherwise, the key is used directly. """ key = self.key() if callable(self.key) else self.key self.backend.mount_vault(key) class EncryptedText(EncryptedString): """SQLAlchemy TypeDecorator for storing encrypted text/CLOB values in a database. This type provides transparent encryption/decryption of text values using the specified backend. It extends :class:`EncryptedString` and implements Text as its underlying type. This is suitable for storing larger encrypted text content compared to EncryptedString. Args: key (str | bytes | Callable[[], str | bytes] | None): The encryption key. Can be a string, bytes, or callable returning either. Defaults to os.urandom(32). backend (Type[EncryptionBackend] | None): The encryption backend class to use. Defaults to FernetBackend. **kwargs (Any | None): Additional arguments passed to the underlying String type. """ impl = Text cache_ok = True def load_dialect_impl(self, dialect: "Dialect") -> Any: """Loads the appropriate dialect implementation for Text type. Args: dialect (Dialect): The SQLAlchemy dialect. Returns: Any: The dialect-specific Text type descriptor. """ return dialect.type_descriptor(Text()) python-advanced-alchemy-1.0.1/advanced_alchemy/types/guid.py000066400000000000000000000064531476663714600241660ustar00rootroot00000000000000# ruff: noqa: FA100 from base64 import b64decode from importlib.util import find_spec from typing import Any, Optional, Union, cast from uuid import UUID from sqlalchemy.dialects.mssql import UNIQUEIDENTIFIER as MSSQL_UNIQUEIDENTIFIER from sqlalchemy.dialects.oracle import RAW as ORA_RAW from sqlalchemy.dialects.postgresql import UUID as PG_UUID from sqlalchemy.engine import Dialect from sqlalchemy.types import BINARY, CHAR, TypeDecorator from typing_extensions import Buffer __all__ = ("GUID",) UUID_UTILS_INSTALLED = find_spec("uuid_utils") NANOID_INSTALLED = find_spec("fastnanoid") class GUID(TypeDecorator[UUID]): """Platform-independent GUID type. Uses PostgreSQL's UUID type (Postgres, DuckDB, Cockroach), MSSQL's UNIQUEIDENTIFIER type, Oracle's RAW(16) type, otherwise uses BINARY(16) or CHAR(32), storing as stringified hex values. Will accept stringified UUIDs as a hexstring or an actual UUID """ impl = BINARY(16) cache_ok = True @property def python_type(self) -> type[UUID]: return UUID def __init__(self, *args: Any, binary: bool = True, **kwargs: Any) -> None: self.binary = binary def load_dialect_impl(self, dialect: Dialect) -> Any: if dialect.name in {"postgresql", "duckdb", "cockroachdb"}: return dialect.type_descriptor(PG_UUID()) if dialect.name == "oracle": return dialect.type_descriptor(ORA_RAW(16)) if dialect.name == "mssql": return dialect.type_descriptor(MSSQL_UNIQUEIDENTIFIER()) if self.binary: return dialect.type_descriptor(BINARY(16)) return dialect.type_descriptor(CHAR(32)) def process_bind_param( self, value: Optional[Union[bytes, str, UUID]], dialect: Dialect, ) -> Optional[Union[bytes, str]]: if value is None: return value if dialect.name in {"postgresql", "duckdb", "cockroachdb", "mssql"}: return str(value) value = self.to_uuid(value) if value is None: return value if dialect.name in {"oracle", "spanner+spanner"}: return value.bytes return value.bytes if self.binary else value.hex def process_result_value( self, value: Optional[Union[bytes, str, UUID]], dialect: Dialect, ) -> Optional[UUID]: if value is None: return value if value.__class__.__name__ == "UUID": return cast("UUID", value) if dialect.name == "spanner+spanner": return UUID(bytes=b64decode(cast("str | Buffer", value))) if self.binary: return UUID(bytes=cast("bytes", value)) return UUID(hex=cast("str", value)) @staticmethod def to_uuid(value: Any) -> Optional[UUID]: if value.__class__.__name__ == "UUID" or value is None: return cast("Optional[UUID]", value) try: value = UUID(hex=value) except (TypeError, ValueError): value = UUID(bytes=value) return cast("Optional[UUID]", value) def compare_values(self, x: Any, y: Any) -> bool: """Compare two values for equality.""" if x.__class__.__name__ == "UUID" and y.__class__.__name__ == "UUID": return cast("bool", x.bytes == y.bytes) return cast("bool", x == y) python-advanced-alchemy-1.0.1/advanced_alchemy/types/identity.py000066400000000000000000000003101476663714600250510ustar00rootroot00000000000000from sqlalchemy.types import BigInteger, Integer BigIntIdentity = BigInteger().with_variant(Integer, "sqlite") """A ``BigInteger`` variant that reverts to an ``Integer`` for unsupported variants.""" python-advanced-alchemy-1.0.1/advanced_alchemy/types/json.py000066400000000000000000000061601476663714600242020ustar00rootroot00000000000000# ruff: noqa: FA100 from typing import Any, Optional, Union, cast from sqlalchemy import text, util from sqlalchemy.dialects.oracle import BLOB as ORA_BLOB from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB from sqlalchemy.engine import Dialect from sqlalchemy.types import JSON as _JSON from sqlalchemy.types import SchemaType, TypeDecorator, TypeEngine from advanced_alchemy._serialization import decode_json, encode_json __all__ = ("ORA_JSONB",) class ORA_JSONB(TypeDecorator[dict[str, Any]], SchemaType): # noqa: N801 """Oracle Binary JSON type. JsonB = _JSON().with_variant(PG_JSONB, "postgresql").with_variant(ORA_JSONB, "oracle") """ impl = ORA_BLOB cache_ok = True @property def python_type(self) -> type[dict[str, Any]]: return dict def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize JSON type""" self.name = kwargs.pop("name", None) self.oracle_strict = kwargs.pop("oracle_strict", True) def coerce_compared_value(self, op: Any, value: Any) -> Any: return self.impl.coerce_compared_value(op=op, value=value) # type: ignore[no-untyped-call, call-arg] def load_dialect_impl(self, dialect: Dialect) -> TypeEngine[Any]: return dialect.type_descriptor(ORA_BLOB()) def process_bind_param(self, value: Any, dialect: Dialect) -> Optional[Any]: return value if value is None else encode_json(value) def process_result_value(self, value: Union[bytes, None], dialect: Dialect) -> Optional[Any]: if dialect.oracledb_ver < (2,): # type: ignore[attr-defined] return value if value is None else decode_json(value) return value def _should_create_constraint(self, compiler: Any, **kw: Any) -> bool: return cast("bool", compiler.dialect.name == "oracle") def _variant_mapping_for_set_table(self, column: Any) -> Optional[dict[str, Any]]: if column.type._variant_mapping: # noqa: SLF001 variant_mapping = dict(column.type._variant_mapping) # noqa: SLF001 variant_mapping["_default"] = column.type else: variant_mapping = None return variant_mapping @util.preload_module("sqlalchemy.sql.schema") def _set_table(self, column: Any, table: Any) -> None: schema = util.preloaded.sql_schema variant_mapping = self._variant_mapping_for_set_table(column) constraint_options = "(strict)" if self.oracle_strict else "" sqltext = text(f"{column.name} is json {constraint_options}") e = schema.CheckConstraint( sqltext, name=f"{column.name}_is_json", _create_rule=util.portable_instancemethod( # type: ignore[no-untyped-call] self._should_create_constraint, {"variant_mapping": variant_mapping}, ), _type_bound=True, ) table.append_constraint(e) JsonB = ( _JSON().with_variant(PG_JSONB, "postgresql").with_variant(ORA_JSONB, "oracle").with_variant(PG_JSONB, "cockroachdb") ) """A JSON type that uses native ``JSONB`` where possible and ``Binary`` or ``Blob`` as an alternative. """ python-advanced-alchemy-1.0.1/advanced_alchemy/utils/000077500000000000000000000000001476663714600226505ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/utils/__init__.py000066400000000000000000000000001476663714600247470ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/advanced_alchemy/utils/dataclass.py000066400000000000000000000121241476663714600251610ustar00rootroot00000000000000from dataclasses import Field, fields, is_dataclass from inspect import isclass from typing import TYPE_CHECKING, Any, ClassVar, Optional, Protocol, final, runtime_checkable if TYPE_CHECKING: from collections.abc import Iterable from collections.abc import Set as AbstractSet from typing_extensions import TypeAlias, TypeGuard __all__ = ( "DataclassProtocol", "Empty", "EmptyType", "extract_dataclass_fields", "extract_dataclass_items", "is_dataclass_class", "is_dataclass_instance", "simple_asdict", ) @final class Empty: """A sentinel class used as placeholder.""" EmptyType: "TypeAlias" = type[Empty] """Type alias for the :class:`~advanced_alchemy.utils.dataclass.Empty` sentinel class.""" @runtime_checkable class DataclassProtocol(Protocol): """Protocol for instance checking dataclasses""" __dataclass_fields__: "ClassVar[dict[str, Any]]" def extract_dataclass_fields( dt: "DataclassProtocol", exclude_none: bool = False, exclude_empty: bool = False, include: "Optional[AbstractSet[str]]" = None, exclude: "Optional[AbstractSet[str]]" = None, ) -> "tuple[Field[Any], ...]": """Extract dataclass fields. Args: dt: :class:`DataclassProtocol` instance. exclude_none: Whether to exclude None values. exclude_empty: Whether to exclude Empty values. include: An iterable of fields to include. exclude: An iterable of fields to exclude. Returns: A tuple of dataclass fields. """ include = include or set() exclude = exclude or set() if common := (include & exclude): msg = f"Fields {common} are both included and excluded." raise ValueError(msg) dataclass_fields: Iterable[Field[Any]] = fields(dt) if exclude_none: dataclass_fields = (field for field in dataclass_fields if getattr(dt, field.name) is not None) if exclude_empty: dataclass_fields = (field for field in dataclass_fields if getattr(dt, field.name) is not Empty) if include: dataclass_fields = (field for field in dataclass_fields if field.name in include) if exclude: dataclass_fields = (field for field in dataclass_fields if field.name not in exclude) return tuple(dataclass_fields) def extract_dataclass_items( dt: "DataclassProtocol", exclude_none: bool = False, exclude_empty: bool = False, include: "Optional[AbstractSet[str]]" = None, exclude: "Optional[AbstractSet[str]]" = None, ) -> tuple[tuple[str, Any], ...]: """Extract dataclass name, value pairs. Unlike the 'asdict' method exports by the stdlib, this function does not pickle values. Args: dt: :class:`DataclassProtocol` instance. exclude_none: Whether to exclude None values. exclude_empty: Whether to exclude Empty values. include: An iterable of fields to include. exclude: An iterable of fields to exclude. Returns: A tuple of key/value pairs. """ dataclass_fields = extract_dataclass_fields(dt, exclude_none, exclude_empty, include, exclude) return tuple((field.name, getattr(dt, field.name)) for field in dataclass_fields) def simple_asdict( obj: "DataclassProtocol", exclude_none: bool = False, exclude_empty: bool = False, convert_nested: bool = True, exclude: "Optional[AbstractSet[str]]" = None, ) -> "dict[str, Any]": """Convert a dataclass to a dictionary. This method has important differences to the standard library version: - it does not deepcopy values - it does not recurse into collections Args: obj: :class:`DataclassProtocol` instance. exclude_none: Whether to exclude None values. exclude_empty: Whether to exclude Empty values. convert_nested: Whether to recursively convert nested dataclasses. exclude: An iterable of fields to exclude. Returns: A dictionary of key/value pairs. """ ret: dict[str, Any] = {} for field in extract_dataclass_fields(obj, exclude_none, exclude_empty, exclude=exclude): value = getattr(obj, field.name) if is_dataclass_instance(value) and convert_nested: ret[field.name] = simple_asdict(value, exclude_none, exclude_empty) else: ret[field.name] = getattr(obj, field.name) return ret def is_dataclass_instance(obj: Any) -> "TypeGuard[DataclassProtocol]": """Check if an object is a dataclass instance. Args: obj: An object to check. Returns: True if the object is a dataclass instance. """ return hasattr(type(obj), "__dataclass_fields__") # pyright: ignore[reportUnknownArgumentType] def is_dataclass_class(annotation: Any) -> "TypeGuard[type[DataclassProtocol]]": """Wrap :func:`is_dataclass ` in a :data:`typing.TypeGuard`. Args: annotation: tested to determine if instance or type of :class:`dataclasses.dataclass`. Returns: ``True`` if instance or type of ``dataclass``. """ try: return isclass(annotation) and is_dataclass(annotation) except TypeError: # pragma: no cover return False python-advanced-alchemy-1.0.1/advanced_alchemy/utils/deprecation.py000066400000000000000000000075071476663714600255300ustar00rootroot00000000000000import inspect from functools import wraps from typing import Callable, Literal, Optional from warnings import warn from typing_extensions import ParamSpec, TypeVar __all__ = ("deprecated", "warn_deprecation") T = TypeVar("T") P = ParamSpec("P") DeprecatedKind = Literal["function", "method", "classmethod", "attribute", "property", "class", "parameter", "import"] def warn_deprecation( version: str, deprecated_name: str, kind: DeprecatedKind, *, removal_in: Optional[str] = None, alternative: Optional[str] = None, info: Optional[str] = None, pending: bool = False, ) -> None: """Warn about a call to a (soon to be) deprecated function. Args: version: Advanced Alchemy version where the deprecation will occur deprecated_name: Name of the deprecated function removal_in: Advanced Alchemy version where the deprecated function will be removed alternative: Name of a function that should be used instead info: Additional information pending: Use :class:`warnings.PendingDeprecationWarning` instead of :class:`warnings.DeprecationWarning` kind: Type of the deprecated thing """ parts = [] if kind == "import": access_type = "Import of" elif kind in {"function", "method"}: access_type = "Call to" else: access_type = "Use of" if pending: parts.append(f"{access_type} {kind} awaiting deprecation {deprecated_name!r}") # pyright: ignore[reportUnknownMemberType] else: parts.append(f"{access_type} deprecated {kind} {deprecated_name!r}") # pyright: ignore[reportUnknownMemberType] parts.extend( # pyright: ignore[reportUnknownMemberType] ( f"Deprecated in advanced-alchemy {version}", f"This {kind} will be removed in {removal_in or 'the next major version'}", ), ) if alternative: parts.append(f"Use {alternative!r} instead") # pyright: ignore[reportUnknownMemberType] if info: parts.append(info) # pyright: ignore[reportUnknownMemberType] text = ". ".join(parts) # pyright: ignore[reportUnknownArgumentType] warning_class = PendingDeprecationWarning if pending else DeprecationWarning warn(text, warning_class, stacklevel=2) def deprecated( version: str, *, removal_in: Optional[str] = None, alternative: Optional[str] = None, info: Optional[str] = None, pending: bool = False, kind: Optional[Literal["function", "method", "classmethod", "property"]] = None, ) -> Callable[[Callable[P, T]], Callable[P, T]]: """Create a decorator wrapping a function, method or property with a warning call about a (pending) deprecation. Args: version: Advanced Alchemy version where the deprecation will occur removal_in: Advanced Alchemy version where the deprecated function will be removed alternative: Name of a function that should be used instead info: Additional information pending: Use :class:`warnings.PendingDeprecationWarning` instead of :class:`warnings.DeprecationWarning` kind: Type of the deprecated callable. If ``None``, will use ``inspect`` to figure out if it's a function or method Returns: A decorator wrapping the function call with a warning """ def decorator(func: Callable[P, T]) -> Callable[P, T]: @wraps(func) def wrapped(*args: P.args, **kwargs: P.kwargs) -> T: warn_deprecation( version=version, deprecated_name=func.__name__, info=info, alternative=alternative, pending=pending, removal_in=removal_in, kind=kind or ("method" if inspect.ismethod(func) else "function"), ) return func(*args, **kwargs) return wrapped return decorator python-advanced-alchemy-1.0.1/advanced_alchemy/utils/fixtures.py000066400000000000000000000042311476663714600250730ustar00rootroot00000000000000from typing import TYPE_CHECKING, Any, Union from advanced_alchemy._serialization import decode_json from advanced_alchemy.exceptions import MissingDependencyError if TYPE_CHECKING: from pathlib import Path from anyio import Path as AsyncPath __all__ = ("open_fixture", "open_fixture_async") def open_fixture(fixtures_path: "Union[Path, AsyncPath]", fixture_name: str) -> Any: """Loads JSON file with the specified fixture name Args: fixtures_path: :class:`pathlib.Path` | :class:`anyio.Path` The path to look for fixtures fixture_name (str): The fixture name to load. Raises: :class:`FileNotFoundError`: Fixtures not found. Returns: Any: The parsed JSON data """ from pathlib import Path fixture = Path(fixtures_path / f"{fixture_name}.json") if fixture.exists(): with fixture.open(mode="r", encoding="utf-8") as f: f_data = f.read() return decode_json(f_data) msg = f"Could not find the {fixture_name} fixture" raise FileNotFoundError(msg) async def open_fixture_async(fixtures_path: "Union[Path, AsyncPath]", fixture_name: str) -> Any: """Loads JSON file with the specified fixture name Args: fixtures_path: :class:`pathlib.Path` | :class:`anyio.Path` The path to look for fixtures fixture_name (str): The fixture name to load. Raises: :class:`~advanced_alchemy.exceptions.MissingDependencyError`: The `anyio` library is required to use this function. :class:`FileNotFoundError`: Fixtures not found. Returns: Any: The parsed JSON data """ try: from anyio import Path as AsyncPath except ImportError as exc: msg = "The `anyio` library is required to use this function. Please install it with `pip install anyio`." raise MissingDependencyError(msg) from exc fixture = AsyncPath(fixtures_path / f"{fixture_name}.json") if await fixture.exists(): async with await fixture.open(mode="r", encoding="utf-8") as f: f_data = await f.read() return decode_json(f_data) msg = f"Could not find the {fixture_name} fixture" raise FileNotFoundError(msg) python-advanced-alchemy-1.0.1/advanced_alchemy/utils/module_loader.py000066400000000000000000000052131476663714600260360ustar00rootroot00000000000000"""General utility functions.""" import sys from importlib import import_module from importlib.util import find_spec from pathlib import Path from typing import TYPE_CHECKING, Any, Optional if TYPE_CHECKING: from types import ModuleType __all__ = ( "import_string", "module_to_os_path", ) def module_to_os_path(dotted_path: str = "app") -> Path: """Find Module to OS Path. Return a path to the base directory of the project or the module specified by `dotted_path`. Args: dotted_path: The path to the module. Defaults to "app". Raises: TypeError: The module could not be found. Returns: Path: The path to the module. """ try: if (src := find_spec(dotted_path)) is None: # pragma: no cover msg = f"Couldn't find the path for {dotted_path}" raise TypeError(msg) except ModuleNotFoundError as e: msg = f"Couldn't find the path for {dotted_path}" raise TypeError(msg) from e path = Path(str(src.origin)) return path.parent if path.is_file() else path def import_string(dotted_path: str) -> Any: """Dotted Path Import. Import a dotted module path and return the attribute/class designated by the last name in the path. Raise ImportError if the import failed. Args: dotted_path: The path of the module to import. Raises: ImportError: Could not import the module. Returns: object: The imported object. """ def _is_loaded(module: "Optional[ModuleType]") -> bool: spec = getattr(module, "__spec__", None) initializing = getattr(spec, "_initializing", False) return bool(module and spec and not initializing) def _cached_import(module_path: str, class_name: str) -> Any: """Import and cache a class from a module. Args: module_path: dotted path to module. class_name: Class or function name. Returns: object: The imported class or function """ # Check whether module is loaded and fully initialized. module = sys.modules.get(module_path) if not _is_loaded(module): module = import_module(module_path) return getattr(module, class_name) try: module_path, class_name = dotted_path.rsplit(".", 1) except ValueError as e: msg = "%s doesn't look like a module path" raise ImportError(msg, dotted_path) from e try: return _cached_import(module_path, class_name) except AttributeError as e: msg = "Module '%s' does not define a '%s' attribute/class" raise ImportError(msg, module_path, class_name) from e python-advanced-alchemy-1.0.1/advanced_alchemy/utils/portals.py000066400000000000000000000147651476663714600247230ustar00rootroot00000000000000"""This module provides a portal provider and portal for calling async functions from synchronous code.""" import asyncio import functools import queue import threading from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar, cast from warnings import warn from advanced_alchemy.exceptions import ImproperConfigurationError if TYPE_CHECKING: from collections.abc import Coroutine __all__ = ("Portal", "PortalProvider", "PortalProviderSingleton") _R = TypeVar("_R") class PortalProviderSingleton(type): """A singleton metaclass for PortalProvider.""" _instances: "ClassVar[dict[type, PortalProvider]]" = {} def __call__(cls, *args: Any, **kwargs: Any) -> "PortalProvider": if cls not in cls._instances: # pyright: ignore[reportUnnecessaryContains] cls._instances[cls] = super().__call__(*args, **kwargs) return cls._instances[cls] # pyright: ignore[reportUnknownVariableType,reportUnknownMemberType] class PortalProvider(metaclass=PortalProviderSingleton): """A provider for creating and managing threaded portals.""" def __init__(self) -> None: """Initialize the PortalProvider.""" self._request_queue: queue.Queue[ tuple[ Callable[..., Coroutine[Any, Any, Any]], tuple[Any, ...], dict[str, Any], queue.Queue[tuple[Optional[Any], Optional[Exception]]], ] ] = queue.Queue() self._result_queue: queue.Queue[tuple[Optional[Any], Optional[Exception]]] = queue.Queue() self._loop: Optional[asyncio.AbstractEventLoop] = None self._thread: Optional[threading.Thread] = None self._ready_event: threading.Event = threading.Event() @property def portal(self) -> "Portal": """The portal instance.""" return Portal(self) @property def is_running(self) -> bool: """Whether the portal provider is running.""" return self._thread is not None and self._thread.is_alive() @property def is_ready(self) -> bool: """Whether the portal provider is ready.""" return self._ready_event.is_set() @property def loop(self) -> "asyncio.AbstractEventLoop": # pragma: no cover """The event loop.""" if self._loop is None: msg = "The PortalProvider is not started. Did you forget to call .start()?" raise ImproperConfigurationError(msg) return self._loop def start(self) -> None: """Starts the background thread and event loop.""" if self._thread is not None: # pragma: no cover warn("PortalProvider already started", stacklevel=2) return self._thread = threading.Thread(target=self._run_event_loop, daemon=True) self._thread.start() self._ready_event.wait() # Wait for the loop to be ready def stop(self) -> None: """Stops the background thread and event loop.""" if self._loop is None or self._thread is None: return self._loop.call_soon_threadsafe(self._loop.stop) self._thread.join() self._loop.close() self._loop = None self._thread = None self._ready_event.clear() def _run_event_loop(self) -> None: # pragma: no cover """The main function of the background thread.""" self._loop = asyncio.new_event_loop() asyncio.set_event_loop(self._loop) self._ready_event.set() # Signal that the loop is ready self._loop.run_forever() async def _async_caller( self, func: "Callable[..., Coroutine[Any, Any, _R]]", args: tuple[Any, ...], kwargs: dict[str, Any], ) -> _R: """Wrapper to run the async function and send the result to the result queue.""" result: _R = await func(*args, **kwargs) return result def call(self, func: "Callable[..., Coroutine[Any, Any, _R]]", *args: Any, **kwargs: Any) -> _R: """Calls an async function from a synchronous context. Args: func: The async function to call. *args: Positional arguments to the function. **kwargs: Keyword arguments to the function. Returns: The result of the async function. Raises: Exception: If the async function raises an exception. """ if self._loop is None: msg = "The PortalProvider is not started. Did you forget to call .start()?" raise ImproperConfigurationError(msg) # Create a new result queue local_result_queue: queue.Queue[tuple[Optional[_R], Optional[Exception]]] = queue.Queue() # Send the request to the background thread self._request_queue.put((func, args, kwargs, local_result_queue)) # Trigger the execution in the event loop _handle = self._loop.call_soon_threadsafe(self._process_request) # Wait for the result from the background thread result, exception = local_result_queue.get() if exception: raise exception return cast("_R", result) def _process_request(self) -> None: # pragma: no cover """Processes a request from the request queue in the event loop.""" assert self._loop is not None # noqa: S101 if not self._request_queue.empty(): func, args, kwargs, local_result_queue = self._request_queue.get() future = asyncio.run_coroutine_threadsafe(self._async_caller(func, args, kwargs), self._loop) # Attach a callback to handle the result/exception future.add_done_callback( functools.partial(self._handle_future_result, local_result_queue=local_result_queue), # pyright: ignore[reportArgumentType] ) def _handle_future_result( self, future: "asyncio.Future[Any]", local_result_queue: "queue.Queue[tuple[Optional[Any], Optional[Exception]]]", ) -> None: # pragma: no cover """Handles the result or exception from the completed future.""" try: result = future.result() local_result_queue.put((result, None)) except Exception as e: # noqa: BLE001 local_result_queue.put((None, e)) class Portal: def __init__(self, provider: "PortalProvider") -> None: self._provider = provider def call(self, func: "Callable[..., Coroutine[Any, Any, _R]]", *args: Any, **kwargs: Any) -> _R: """Calls an async function using the associated PortalProvider.""" return self._provider.call(func, *args, **kwargs) python-advanced-alchemy-1.0.1/advanced_alchemy/utils/text.py000066400000000000000000000027021476663714600242070ustar00rootroot00000000000000"""General utility functions.""" import re import unicodedata from typing import Optional __all__ = ( "check_email", "slugify", ) def check_email(email: str) -> str: """Validate an email.""" if "@" not in email: msg = "Invalid email!" raise ValueError(msg) return email.lower() def slugify(value: str, allow_unicode: bool = False, separator: Optional[str] = None) -> str: """Slugify. Convert to ASCII if ``allow_unicode`` is ``False``. Convert spaces or repeated dashes to single dashes. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace, dashes, and underscores. Args: value (str): the string to slugify allow_unicode (bool, optional): allow unicode characters in slug. Defaults to False. separator (str, optional): by default a `-` is used to delimit word boundaries. Set this to configure something different. Returns: str: a slugified string of the value parameter """ if allow_unicode: value = unicodedata.normalize("NFKC", value) else: value = unicodedata.normalize("NFKD", value).encode("ascii", "ignore").decode("ascii") value = re.sub(r"[^\w\s-]", "", value.lower()) if separator is not None: return re.sub(r"[-\s]+", "-", value).strip("-_").replace("-", separator) return re.sub(r"[-\s]+", "-", value).strip("-_") python-advanced-alchemy-1.0.1/codecov.yml000066400000000000000000000002531476663714600204060ustar00rootroot00000000000000coverage: status: project: default: target: auto threshold: 2% patch: default: target: auto comment: require_changes: true python-advanced-alchemy-1.0.1/docs/000077500000000000000000000000001476663714600171715ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/Makefile000066400000000000000000000011721476663714600206320ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) python-advanced-alchemy-1.0.1/docs/PYPI_README.md000066400000000000000000000022341476663714600213120ustar00rootroot00000000000000

Litestar Logo - Light

[litestar-org]: https://github.com/litestar-org [contributing]: https://docs.advanced-alchemy.litestar.dev/latest/contribution-guide.html [discord]: https://discord.gg/litestar [litestar-discussions]: https://github.com/orgs/litestar-org/discussions [project-discussions]: https://github.com/litestar-org/advanced-alchemy/discussions [project-docs]: https://docs.advanced-alchemy.litestar.dev [install-guide]: https://docs.advanced-alchemy.litestar.dev/latest/#installation [fastapi-example]: https://github.com/litestar-org/advanced-alchemy/blob/main/examples/fastapi_service.py [flask-example]: https://github.com/litestar-org/advanced-alchemy/blob/main/examples/flask/flask_services.py [litestar-example]: https://github.com/litestar-org/advanced-alchemy/blob/main/examples/litestar.py [standalone-example]: https://github.com/litestar-org/advanced-alchemy/blob/main/examples/standalone.py python-advanced-alchemy-1.0.1/docs/__init__.py000066400000000000000000000000001476663714600212700ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/_static/000077500000000000000000000000001476663714600206175ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/_static/aa-banner-dark.svg000066400000000000000000000414701476663714600241110ustar00rootroot00000000000000 python-advanced-alchemy-1.0.1/docs/_static/aa-banner-light.svg000066400000000000000000000411271476663714600242760ustar00rootroot00000000000000 python-advanced-alchemy-1.0.1/docs/_static/custom.css000066400000000000000000000063611476663714600226510ustar00rootroot00000000000000/* Theme color definitions */ :root { --brand-font-size-xl: 6rem; --brand-font-size-lg: 5rem; --brand-font-size-md: 4rem; --brand-font-size-sm: 2.5rem; --brand-font-size-xs: 1.8rem; --brand-font-size-xxs: 1.6rem; --brand-letter-spacing-xl: 0.25em; --brand-letter-spacing-lg: 0.2em; --brand-letter-spacing-md: 0.1em; --brand-letter-spacing-sm: 0.05em; --brand-letter-spacing-xs: 0.03em; } html.light { --sl-color-primary: #202235; --sl-color-secondary: #edb641; --sl-color-accent: #ffd480; --sl-color-text-1: var(--sl-color-primary); --sl-color-text-2: var(--sl-color-secondary); --sy-c-foot-background: #f0f0f0; --yue-c-text: #000; --brand-text-glow: 0 0 10px rgba(32, 34, 53, 0.3), 0 0 20px rgba(32, 34, 53, 0.2), 0 0 30px rgba(237, 182, 65, 0.1); } html.dark { --sl-color-text-1: var(--sl-color-secondary); --sy-c-foot-background: black; --yue-c-text: #fff; --brand-text-glow: 0 0 10px rgba(237, 182, 65, 0.4), 0 0 20px rgba(237, 182, 65, 0.3), 0 0 30px rgba(237, 182, 65, 0.2); } .title-with-logo { display: flex; align-items: center; justify-content: center; margin: 5rem auto 4rem; width: 100%; padding: 0 2rem; user-select: none; -webkit-user-select: none; -moz-user-select: none; -ms-user-select: none; } html[class] .title-with-logo .brand-text { font-family: var(--sl-font-sans); font-weight: 300; font-size: var(--brand-font-size-lg); letter-spacing: var(--brand-letter-spacing-xl); text-transform: uppercase; text-align: center; line-height: 1.4; max-width: 100%; word-break: break-word; word-wrap: break-word; overflow-wrap: break-word; hyphens: auto; -webkit-hyphens: auto; -ms-hyphens: auto; transition: color var(--sl-transition), text-shadow var(--sl-transition); } html.light .title-with-logo .brand-text { color: var(--sl-color-text-1); text-shadow: var(--brand-text-glow); } html.dark .title-with-logo .brand-text { color: var(--sl-color-text-2); text-shadow: var(--brand-text-glow); } /* Large screens */ @media (min-width: 1200px) { html[class] .title-with-logo .brand-text { font-size: var(--brand-font-size-xl); } } /* Medium-small screens */ @media (max-width: 991px) { html[class] .title-with-logo .brand-text { font-size: var(--brand-font-size-md); letter-spacing: var(--brand-letter-spacing-lg); } } /* Small screens */ @media (max-width: 767px) { html[class] .title-with-logo .brand-text { font-size: var(--brand-font-size-sm); letter-spacing: var(--brand-letter-spacing-md); } html[class] .title-with-logo { margin: 2rem auto 1.5rem; } } /* Extra small screens */ @media (max-width: 480px) { html[class] .title-with-logo .brand-text { font-size: var(--brand-font-size-xs); letter-spacing: var(--brand-letter-spacing-sm); line-height: 1.2; } html[class] .title-with-logo { margin: 1.5rem auto 1rem; padding: 0 1rem; } } /* Smallest screens */ @media (max-width: 360px) { html[class] .title-with-logo .brand-text { font-size: var(--brand-font-size-xxs); letter-spacing: var(--brand-letter-spacing-xs); } } /* Preserve existing layout styles */ #badges img { margin-top: 0; margin-bottom: 0; } #badges { display: flex; flex-wrap: wrap; gap: 10px; margin-bottom: 3em; } python-advanced-alchemy-1.0.1/docs/_static/favicon.png000066400000000000000000000764041476663714600227650ustar00rootroot00000000000000‰PNG  IHDR๔๔หึ฿ŠขiTXtXML:com.adobe.xmp kQีjsRGBฎฮ้ IDATx^์ ˜Uี๗งz&!„คซ'dwA‘ลQBืLBาี BwE„ไU5ข lโ†โŠ"ฒ!้๊ษึี„EQQ@ลๅ•}ฯtuBXฒฬtฯšไU–ษLw฿[U]งŸ'๒ฬ=sฮ๏™ำต{ ๒B@! bO€bŸ$ „€B@@บL! „€@@zQRB@! ]ๆ€B@ ฝŠ()! „€†.s@! „@†E”„€B@HC—98k๗ํ^Ka;ต๑‰K>‚ ง(U๓ฉ6ะ58๔๔”นทญŽ`ˆ’ˆi่ฑ(“ู ีึพตlรเYฬฦŽo`b3ZbำR˜n๓ vzrๅrKฝ‹3!SาะcZ8 {dีพƒแืๆ3่0; งŽ ฐ ‡ 3นา‘‘$!B =จ"ูz…์~Dt ‡ดปxl!๛๔™ด]ZิBŸโJฤ‚€4๔X”I‚ฯษ๎ศ  84Sn‡อ9๎_”ณค*F% ]&Hl x๋ .mธ:ฦef=C]H„@ HC —AuูฬmAjฯM\๒’๐+ ๎ขมิ‘้ร–?"x„@’ HCOr๕c˜๛SKึ=4x ฏa๘rxV†eฺ+๎ ฯ…( h†ํ๚Ht/"IQ๑# =~5“ˆƒ}N,0ผท฿r 1‚eI“J" ๘ฮะœ=อ*?—H’ดxi่2bM ฒจ๗UH๑Eผ7ึ‰H๐ เ฿๘†qJฯœา4”แB c HC๏ุา&+ฑชcศ@pไ๊›’•yโฒ}”™ฮษไK?K\ๆ’ฐƒ€4t™"Eภ+๔อ๙ŸะืQ‰I2๗2ำ%ๆึตd•‡‡ฏ$ ]fEGX:kšI€อภ~๒V|์ส๎๒V˜sฟฦ. Xด˜€4๔wํ!๐ ›jโ ุ‹;Œ{|ฆr4Œ๔˜ฒN\ˆฟ :~ โeฬผ ”z฿ธ/=‡฿ฌชฎุ ค†žดŠKพร6-}ฃe>hฮ-กฌSฯฑ^0A%|9ฏ\…žุ †.ณ!‘‚+๖ษ๋hr๒dผำฬญ๘ฒNL<วzภึ*แ๛ฤVOฎ\Vั[! i่2 Kภs,VMž€ƒาถ{ปชN\ํ=ว พmฅ?๔vู{]… ุ MคกหLH, r…ฎ|…{ษKo‰5”ฤ5†ฎฆHล‹€Ž็ฟr…ฎ~…๎ณฑGO~ล?ใ5{$Z!=าะฃW‰จE<วzภd%wr…ฎ|…n0ํ:5_zXฉb,„€r—9\žcญ0E…€\ก~…ผค0๖U๏dฺๅไ.S™€b+^D`์฿5ม%:”@ลฑVฌIo#W่สW่ƒฉ๎ํฆฯ^บช๙"ˆฅi่2Kภsฌงlง@บrC๏gd&ฯ\แ)ีAŒ…€†.s นt4tนๅฎRœฑงN}WyMrgขd.๔+t=E%†<วzภJกหบ๒zmO’๓ฬ•fก aาะe"$–€Ž†.W่๊W่jทูaข` Y๙! @@บ<17ฯฑ‚7ซ_ฅ’…4t๕†พvOุอ*ฏWฉƒุ ! W่2Lภsฌ`ํ๓ฮJไ–ป๒-wำvๅยBiŠฑุD@~‘d&$–€็XุE €4ti่JHŒ…€>าะ๕ฑฅ˜ะัะๅ–ป๚-wนBู/Ž„Yาะ#[ ,l^มz„”ศy่ส็กKCWšb,C@บL†ฤ๐๋~ป+[๎rห]i‰ฑะG@บ>–ข3žc=`W•ฐๅ–ป๒-๗!ำvปUj ถB@l" ]fBb HCW/ฝ็(7๔AำvวฉG" B@HC—9XาะีKฏกกo4mwผz$ข „€4t™‰% /ลฉ—s,y)Nฃ(-คกkม("q$ W่๊Uำp…พด ๊‘ˆ‚าะe$–€\กซ—^ร๚ ฆํnฃ‰(! ]ๆ@b ศบz้5\ก?oฺ๎D๕HDAi่2K@ึกซ—s,ีu่ฯ™ถ;I=QB@บฬฤ+t๕าkธBึดษ๊‘ˆ‚าะe$–€\กซ—^ร๚3ฆํNQD„€†.s ฑไp๕าkธB—†ฎ^QรคกหDH, r8‹๊:๔5ฆํNM์$”ฤ…€Fาะ5ยฉx๐๋;ฉD-{น+oบฺดดJ ฤVMคกหLH,ฯฑฐณ9mM๕-๗ชiปฆR ฤXi่2’MภsฌวผJ…‚\กหบส[! “€\ก๋ค)Zฑ" ฃกCฎะๅ =Vณ^‚ํdาะ;นบ’จ<วzภ๖*˜ไ ]ฎะUๆุ คก๋ค)Zฑ"เ9ึใvP Zฎะๅ ]i‰ฑะG@บ>–ข3žc=`;•ฐๅ ]ฎะUๆุ คก๋ค)Zฑ"เ9ึำฆ4ืตDบ4๔XMz ถฃ HC๏่๒Jrฃจ8ึ*zT( mgปทซhฤูVรNqฒ=ฮ@bi่‘*‡ำJ:บ์gฝS\๐W&ธแ1๒Gz+'ฝ๘๊hาะ;บผ’h<วชPฺิDnนห-w๙-Q! =*•8ZNภs,€าถฃาะฅกท|โŠC!ฐาะej$–€Ž†.หXฒl-ฑฟA’xิHCZE$ž–๐ซ @ํค/9mm์g่ฃWTžกทlฦ‹ฃN' ฝำ+,๙m‘€็XซLQA$ทๅ–ปส[! “€4t4E+V<วZ`[• ฅกKCW™?b+t†ฎ“ฆhลЀކ.ฯะๅzฌ&ฝัคกwty%นัxŽ๕ €ษJ”d/wy)Ni‰ฑะG@บ>–ข3žc= `ขJุrห]nนซฬฑ: HCืISดbEภsฌ็lฃด4ti่*๓Gl…€NาะuาญX๐ๅ็ฟ†. =V“^‚ํhาะ;บผ’ฯะUŸB^ŠSRT5mWi๛]™ๅB@l" ]fBb h8)LฎะนBO์/$9าะ#W จU*Žตž€๑*ไ–ป4t•๙#ถB@'i่:iŠVฌxŽตภ8• ฅกKCW™?b+t†ฎ“ฆhลŠ€็X[>ฅปLdบ๊{๒ ฝน&ใ„ภคกหI,ฯฑtฉ+tนBW™?b+t†ฎ“ฆhลŠ€็XCR*AKC—†ฎ2ฤV่$ ]'MัŠฯฑj • ฅกKCW™?b+t†ฎ“ฆhลŠ€$–€–†.หศฦ2‰ ’ฤฃF@zิ*"๑ด„แญีU“7ช:“[๎rห]u‰ฝะE@บ.’ข+ผ๐่qี‰•`/wฅ4ti่JHŒ…€Fาะ5ยฉx[๎๊๕’u่๊ EA่" ]Iั‰์V“ืั:ๅ ๅบrห]nน+M 1 HCืSคโCเ‰E๓ถŸz.ธบT๛ศฒ5Yถฆ6ƒฤZh# ]JЁUnvbj=ซณ\กหบ๊{! ‹€4t]$E'Vt5tศ3ty†ซ™/มv2i่\]ษm‹n:p’ั=nญ2"i่าะ•'‘=คก๋แ(*1#ฐ๚ฦ์<ญV [nนห-wี9$๖B@i่บHŠNฌ่j่rห’+๔Xอ| ถ“ HC๏ไ๊Jn[$ฐf๑ASkFwU‘r—†ฎ<‰D@่! ]GQ‰ต‹ฒ™ม จ†-ทๅ–ป๊{! ‹€4t]$E'Vžu๎ูˆฎUชAKC—†ฎ:‡ฤ^่" ]Iั‰] ]žกห3๔XM| ถฃ HC๏่๒Jr["ฐชž"zR•\กหบ๊{! ‹€4t]$E'V๚๐ปžPZ^Š“—โ”'‘=คก๋แ(*1#0ฐtึฦะะใสaห^๎ฒ—ป๒$! ‡€4t=E%fไ ]OมไB@Œ@@บL‹Ž!P้ฯฮ$฿8เฃZ–”์งบS\#ฅ š๙ี์ืพž™ป๒oสX!าะ“Pๅฮ๑ฉ%‡N๋<ž'ุฝีฉส-wท๋ฌแ f|ห!‹hมฟN&:š€4๔Ž.o็&็9ฝ‡๑'˜qx[ณ”+๔V^กT๊Gˆ่ป]๔ƒษ3Wxm โ\ด™€4๔6@ืO€๘*6|เำ์Yฟex# ๆ7Oอ—๏ฯCด•=ว šh:Qห฿ฎ%ฎ]–ฮฏsโ‘„@ห HCo9rqุ(อg—Ÿสภว ่iิ>ฬ๑]ฦ8s9Kชa๚ˆฒถ็XA๓+b1ฦŒK2y๗ฆˆล%แP HCฏˆซ๐๚ญ7ขFŸ๑‡UtBดฝด7‡จy้ชc}“Sฃ(ƒd_HฅEQŒObบ HCืMT๔” ฒ๛D_`+‹…)@8ัฬน฿ำEิตซ‹ณ{ฑAQฟล}/็›ywaิyJ|B@…€4tzbซ•ภๆF~>€Cต ‡!ฦx0ฝ5ฟŽฌ๒P๒qา๔+h”Gว ๆ๛ˆ่หS๏œqฝผƒjIˆ †021ะMภsฒ๏๘๗ม ฬึญ’:&ใLnล๏Cา•lฐtฐป6๘;ปD.๐เ/ฟ4*ลL˜๙า•‘‹W คก+ภS5รœ้K ฬTSjฉ๕c cพ™[๑›–zธณีล์ฎพOืƒฐฤC}qx๔ g๕ไ\ ‡๔ฤ(k ตc HC๏ุาF71ฯ้จ]PN=สเ๒ซ%ำ๘y}{cm›๓d๛ั-Wญโ๔žBเslง^ึ(ฑK>ฮH๒๒รึ/ahษ_ยฐ“xx๚ฆพํบฦ๑—ม์๊‡OL‹.n๐'.–F^ษ*No/มŸะ‘vฎ฿ฒญ#ฏ๔กszๆ๚d[ฃ็B Iาะ›'f๕xtแถfยงA)ลณณ๋wฺศ` ัเYpัgv2v๙.ข—?mN8ษVEkƒ)8~.่Š0`็ป 7ิ&^*_เ"\% mDาะeb„Jภ+fำบฮ)ุ_๘ฦ”1พ˜ไMbBb๛ู๊ฒ™bศท}6๖Nj…฿&|<ลภง3ถ{Uถb"ฺB@z[ฐwพำJฑ๏mฤ~ฐF๛Mceฒงเ/„๙ึๆr๋บ!=k๙3cล(?‡@ต฿สณwNศ‹bsqํ8ูN6œ๚‹ช^าะ๕๒Lผš็ไ&3ฏ >ภˆ๛˜q5uีฎ6gฏ|4bฑ%:ร[ปW?=i6 šฟ๙ภษ2ฤฬ—˜4aูล ŠKB/! ]&„6•B๏ˆ๘โˆฝแ<ฏฌฦ•=sJิ–ฌ…J` hf๘๘มKu‘๘0p 8~ชํ‰€$!๐2าะeJ(Xใ๔พบ1€สb๚ๆ๏m๐']#/7้ƒฺjฅี…]|๐i  WํมขRw๊,yTำู๊ ฦ" },B๒๓Q T‹ฝŸaๆฏDำ๓ฬ๘i*…๏Oใ‘˜$ Vนู‰ฦz:Ž|œยn$U%žใ4ู^ฃุ๋$ ]'อi ฏ)๏ฎ  YmO›๐ 1]ผพถอOๅjผํี=€Š“=ยฦ@6tgc:เSใ>>}๖าUc•B dาะC‰๒žำ{(ภ?is~+™๑5ำvo–๕โmฎDฏvfผู‡๑9€Žhีv[Hณส“™S^ โR‡€4t™ u`'7พส๋/แ“u้8ฦ ี.žjฏ“~yQŒแญ„ู?„๙RmŠŸ ๘สิ฿๒99ษญMทญู[8วŸภฆพ“นOฒ!|อOu]ฺs่ฒ'ฺƒ8Ž,5Kf์V๓ป>ฮญ… (wch$๛ึศ‚’ภ:–€\กwli๕%Vqฌ๐=[้Sญ_‰€› ะS์า๕[ษศคจmjฎPซ}‹ฟไxbัผญวงž๛ €ฯต8๋็4?m—Š-๖+๎D@z‚Š=Zชk4ต–๊^ ฦ;[Š„๑wƒ๐qyNR๊‰wๆ-™ฑjฉ˜B>@7ํRเW>B@;iฺ่‘ฦOpU๖5)Ÿ–ุต…ั?ะ‚๔sๆทh ต๚WBเ?ผ‚๕nพเUญรB—ฆsฅณไึOŠ'i่Iฉ๔๒ฌ๔gg’Oฟj๑Yิ?๎ฎ๑ง'ฯ+WŽ_า`Ÿ๘ิ:ใหiู‡o\;๏อ*ฏo™Kqิ๑คกw|‰ทœ W่ˆƒ-ู]‹๔!yŽ˜เIแิซลพ}˜Ÿตl๓$ย]C์พ$O7๚ฆ]๚zธ>Dฝ“HC๏คjพ(—j฿ม๐~€• Gส-๖H’ึˆ6ํฎh8ฟ6TDL_4๓ฅก๚๑Ž! ฝcJ๙฿D*๋ํDX`bธ้๑oบŒ๑semyธ”E=š<'7X๗ €f…!ใ23๏žช๏าะ;ขŒMb๓nW+ljjŒ_ฅŸฯK๓oุช&ภ ีท฿r1a7/˜ถ{^„QHh =EะBe๑Œ7‘บ @Z—ๆH:D๘J:็~6Lข-โD ๊d฿ว ‡๙พ  +_'.kk HCo-๏ะผญY2cทกZ๊ฮฐ—ฆ1๑‡3น๒OCKD„…@L l~Ynq˜บˆO็\'ฆˆ$์ HCp+ไƒณฬ}ฃ๛w ผ&Dฯ๘พ๑ฎžน+}ˆดˆ5jaฦLฉเ•i!%ฒž :$3ง๔ป๔E6ฦคกวธxA่ผ๐่qี‰•[ผ#ฤTV†eฺ+๎ ั‡H Ž ฐบ˜•™–…๘{M ด฿ปtG“$ด†ฎ e{„<'๛+€ข๗'RฉฺASfฏ|0D"-:Šภฺๅ}ๆฦผŒภo)ฑG†ๅœ„่ฦTVzL „]-Z0ใ์ะR`{้๓แศ‹jHC[ล6ว๋ณวƒ้‡แ…O‡มพI๖ญแ๙e!ะ๙*Žu5๏ 'S^fฺๅฐพ0„ฒจ†F@zhhรฎ:ึ ฯอS›ผ'Eh,%แ.c=ฯœ๚ฎ๒š๐ฒe! มZuo[ฎ ซฉ3ใ;™ผ{r2hJ–ฃะุt+หำjติL ล๓ญƒ]ใrr/บ"š`Uว๚) ำqfพte(ฺ"าะcS*`•›˜ZGwุ3”ฐ wmš8c‡y‹^E_D…@‚ (_ฉ~l๋ ภ3Lป#N|๊าะc2˜AีขU0;ค๏ฅ๎ิŒ๔ฌๅฯ„ค/ฒB ๑”›๚่W๙]]o๎9tู‰PาะcRxฯฑ‚}œ?Nธ๔ท.ฃ๛ 9d%บข*^L`x๗o๙€ใt“!เSงญ}ํwื nmั‹>i่ัฏ*E๋Hb2ŒP ๘ืฦT๗ำg/]†พh !02ฯฑ~FS๐Sำv?,“G@zฤkพ้eบแœkจo ฝฝgฮญOFƒ„':Žภฆ๏+o"๐\ษ๛ '3v้[บuE/ฺคกGป>๐๋/B˜^*U{›์Y‘u6Ÿ้ช ฦŽrjปม๒V๙$„€4๔บR่ฟ?„ื๙ิ3ง,“m$ฐชž"๚=€u†มภ@—?ธว”นทญึฉ+Zั% =ขต๑œย๓‰0OŽ` ฌH & Tฯxฉ`ษูไ&%F4#ยM้œ{„NMัŠ.i่ฌMตุทฯ๏ฏ=<ย‰fฮ ใ‹‚๖PEP$‰ภๆ๓ิ—่ึš7๑ fฎผ€'Ÿ' =b~tแถžธU๐|wํกพfๆ3ต๋Š ZT ึ{™๐s-by!ๅ{O™ปโอบ"1าะ#Vชc}“SC๋—ฆํ‚ฎH ! ‘€W่]โ/h” ค๎4m๗š5E.bคกGจ โwยเC้๎ต๘€ฌ๒๚ดERอ<'๋”ำ,๛yำvฟคYSไ"D@zDŠม ทzbๅฏ ผFsHฯL{Mอ—ึฌ+rB@„D บlๆถIงฆhEƒ€4๔ิม+XwƒฐฏๆPไ%อ@ENดƒ€็๔~เำด๚๖้@sn้ญš"ึvาะ\ฯ้ภkc ืhฏฬผาใšuENเ…Gงช+ทะ๗–:ใ๏้็3{ัj-NG…H@zˆpว’่?x{ร๏๚—ฮgdO207=ว-Œๅ_~.„@<l[๑wญ;ษ1Ÿnๆห฿ˆ‰ฒาะ๋กาฏh]ฦ|๒vฺ.‡ฑŽ]g˜ข%„@ƒ*…๑U š6Y฿ฺCN[ิHดอRาะT€ล}–a๘%อ๎นv๏+๋อ5S9!ฺืง3ฎ5๓๎{#’ž„กH@บ"ภfฬ๙oํฎฎš>ำนฝ๋F๒๙ญ้นๅ`X๙!ะžZr่ด๎ฺเ฿คuฅGภAi cC+]!ŠNคกื J็ฐŠc}–€๓ujt–i—.ัซ)jB@D@ลษA ตลผ ทฺ}hฟปตiŠP[HCo1๖gƒ{6ข๋!อ/ยญL็,ธล้ˆ;! ฺ@ โXW๐>]ฎ ๘tฺv/าฅ':ํ! ฝล=ว Ž1a๏žœ๛š"%„@Œฌ*dงงˆภDaแฆtฮ=B‡–hดž€4๔0gU‹VฐœlO]๎ธ(cปฺOgำŸ่!ะUง๗$งaoม_^ฃeƒž™S๚]รzbะvาะ[P‚vxzlCmโ;ฬ[๔B ยB@Dœ€็Xฟัตื;ๅดํZOYย€4๔L ฯฑ‚gR{่rลDGfr%}๋Pu&:B Mn:p๏.5dL๓Sธ-…‘"ฺ>๏ลlL€แ31ึ3c=X๏๋77 7j๔, ำ.?ึฆด๊vซ…[65๓+–ี€ Œi่!—มsฒวt67„~3็ๆด้‰ˆ8ฏ฿z#1vaฆW<ํ ะ๖ ๎cถำผฏร+ˆ0p?3๛ภCLฉจ‹๎KฯZLV๋|>ซ%ย]fฮO‹–ˆดŒ€4๔Q{ฮ๐ณ๓7๊rS3๘ตำๆ”ƒฺไ#:†@uูฬm1X‹A{‚๙ต0ฐ7ฏำผ=rXผž&โ๛|ฆฟ๔7 —jk๕•ƒnvซษ๋(xIvg-‰2Ž2๓๎/ตh‰HKHCsฅhI mฟDธ<sO 1d‘กnใฦฝŒM{˜บใึ;จpAX^#.ตโ‹ธๆ๗uiฺฎถG…ญวŸ<าะCฌน็X๐&M.^‡ก]'ูทhา!ะ2โwRŠ็๚Lsๆ–9Ž–ฃวมดŒ‰]๎๊Zs่ฒ'ยฯsz๏xoฺLแLฎSZข>i่!1ฎ๖[y๖ฑXฃL=OฃžH P ญร ะ<0ฟ €ชณxŠ฿อภ ] ๋งุฅ๛uฅ ๙oฯ}ฆํj{dจ+Gั™€4๔f†็Xห๔i’ฏlจME–ฉiข)2กxbัผญวw=Ÿ๘(0ๆ†ขZ(IดO๔nฟ๒ูธพ'ฟโŸชaxŽu-ปHศNฅขjLb>i่!0๖œพ=ฏบค ๔‰ด]๚ฎ.=ับ ฟˆต˜วฬG๐^]บ ื๙+ƒฏๅฎ๎+›ฝ-๏๛๛w่เ(๋าuPl†4๔8Wœ์w ๔qMาฅŸหผ†ๆ฿Pำค'2B@™ภšล}ปืศ? „˜ฌ,(#x`\‘ฮ•–5zมึqฏl Dฦพ้ ญK5– ฎ‹€4๔บ0ี?h๕ู)xz\ใํฦฟ๓gา๙ฒฮ็๑๕'$#…ภ‹ ,๎ณ รถž-`ZH€๑ ๔ฎ!‡“็•ƒท็วhฝSศธึฬปrfL๊ํ ]3ฏh?0พฆY6ป—™.ษไK? A[$…ภจ ู ขฏj|/Dˆ7M€ฎ#บ _๙็ฑ$*Ž๕s]Bh(ตK๚ฐๅŒๅS~>าะ5ณฏ8ึฟxตfูห=B„ Ÿv•;ออบˆด@ล้ํ%ppE~จเˆB?๛Xษปwn)ฒ`—=๘6ทRพfๆ3ี…D!,าะ5’ญ-›’[”b`€@mจm๓y๛ฝฤ“ๅร+X๓A8[ใ> ษฺุl—LLอ—VŽไถ๊X71p˜†ึSwjzTถบีOวIHCืXRฟ8uG%ฝnT2ฐUว:oK#ฏV๔†6ฮ{๙ก*›—^GธLฺ._จCK4๔†ฎ‰้@ม~W(;?ีb…A_ฬุฅเฑ|„@C }ฏ#โK ฌ—Oผ ฌHN|๑f5ีbถฤL:ŽD}ฬดโงsฃ—†ฎฉถ•b๏นฤ%Mr*2%เฤดํฎ""ถษ ฐvyŸนqƒ> +Y'"ห.\;/ุอ*ฏ๗ }ณ@R-™หัชZ0†!" ]Uฯฑ‚ท?ฃ๔อ๕€ฯh๕‰OšpŠL T‹ึYฬ8ภถ-p'.ฺCเa"|"sฯฑ‚๎สGข2๐๓Œํพฟ=้ˆืัHCื0?<ง๗P€—hา.A ๓7ฆบพ2}๖า็ต‹‹`, Ty>๘ฒWcดƒMฐ๊#๘ท~๓ฟu ฌc/ู˜‰†wOแ €ฑ€ญ6เ‡MlG๐แ๚ไF‰ภ฿าเgฝ?ธqZฯแท?ซAK$4†ฎฆ็Xฟ๐n RกH/ฮ /ศ๖ฑกเ่ำ7๕mื=ฮ3Mะ/ ๔adเ?`0=ศ†๑เะF<ฐแ+žึ™ำ*7;ฑ{ฝบVรฎม"Aปดภปmn:]ถB+๘ข|aQ0gๆKWช ‰‚NาะiV—อ–kkeZdN เำ.้y–ึขจล:J1๛!b๚F\nฏ๐G๎"โป|ค˜ษญะ๒–ถ:ษM ซ‹ู]ด'3ฟl์๐ž ์ู™W๗#Pcพีฬ—g่โ):zHCWไX-f฿ฯLฑฺฝม‹บิ้Sๆฎx@1}18ส’์๋ฉF—่pจOƒฑDw๚์ฉ'_C„c5ด๊อ3w๖Sต7ม|ืฎcMฅjปO™ฝ๒มธึชใ–†ฎXีชำ{#ƒP”i‹9 ฅบฯ“็๋mมชำg๚gง ็k<$Hgผ/€แ‚h9 ^fฮqตLจ3HZมฉg3อxoš‘ั`๚ข™/-ˆL<ศว๊”บ ฯูR๋(ึ/†ฯืA|V&Wi‰หภHจ8ฝง๘(‚ฦภp=Kงฺ๎-‘Rp’;a(u4ˆ่€ดR๖ำvwiฅC๑5:นBW˜!ž“= ๋$ขdz; œุษWKQ‚F,›ืo1๏†~šOt ]ตgเMไขี$h๎T3ห ๗ฤyW>ƒ้-m9ซ˜ˆีE@z]˜Fไ9ึBG+HDฮ”—ฃ+uŽ์ืนาl1 U‹fผึH—6~๖u(9zDธŽ|Z(่๋ใ์าg€๐k๋ณŠศ(ฦฬผ{BDขI|าะ›œ.<`ยึทZี่[ญŒแฐ/๙๑ว™IŸ`Z“!้4๓ˆ้tพt…NQัาK`อโƒฆ๚F๗8UฏrSjทƒ๑M3๏_rๅำ$M{้ำ)W๎q๘xฆํfโhb”†d•ซ…์\&Zิค๙‹อึื&pฯ4ซ;น๑ซฑแ8€]5hซI0๎๑ว๗ฬu๏RkxแัใVOชœ]‚uอืื.ซ็ln :]kU!;=œ ข`K6}ษ้๒ใ•ไๅถ{tfฃ4๔&kแฌ+@8ฑI๓šš9๗˜—๋x…0๑9‘ุอ‹ฐฐF|ฮด9ๅ)็+J*…๑vTR3~’ˆพ™ข๎๏o;gIUMJฌว"๐๏mฅใ“ ์;ึุถœq™™wฯh‹oq๚าะ›œžc=ชใชO8ผ'็ผฅ0<'{`|>K^˜~‚ฎกฯ›ณWนหง…ชNoŽม_ฐO พU…ˆฯM็ส฿kc ‰u]-Z63.†7ฐ‰า็!ำvƒ๓ไำfาะ›(@ตุทณOฆ/7Ycฺ๎ิzt*๋p"k>฿Tฯ๘Pว0พ—bใ"ู˜&Tสรโ^1{<˜NฐW๘ถ์!8 6ธแBูฟปUุไ{๓œ๎าด้V+๛ฆs+๎m?dG ฝ‰๚W ึูD~กT??0m7xFV๗งZฬพ‡™พ`๛บBศ7แk้\๙ถะ\$Px๓ฆ0'่ไถื™้'์ใฬผาใ ,EdS^ส0q๚,€ญ#่L=/q$:i่M”฿sฌ ุ„้KL ฬ&O,šท๕ธิ๓Ÿ'๐งUcะcฯม>—šv๙z=zษTYoํหŒ3™๑v bH.W]ํฎฤ่ƒwบบเโโธvFส ?e์า[ฺƒ๘~ๅ *a2ี7fง๘ใiตP›ถซ๔bำฆ๕ซตo4KC<:$ž๐qบb’}๋€ม$hT์๛˜่`ผ3๙ฎaโำe็ภTขVฒo๒‰~ ใผ๓พd( ฅvIถ‘fํลN€\ก7ศฐR์}1ชAณ†ำฅฆ]๚”บPqฒG(ุ!L้ ‚ŽXซAืpUฺ.๕๊v†ฺ๊B๏Œ๘"ฬ•uผN๙๘i๙๒SA9yYT‹ู™้+๊z7G'!&œ–ษน฿ิฉ)Z†/xN๖2`๘%%ฅOl๕ไสe%‘฿†7ž}F—ฆ&ี9๛ฦตๆาš4c)ใญูฬcFฐiH”NแZCฤงฆsๅซc V‚~ ตห๛ฬมEญพ <ฆI็สQ>ีฏใgŠ4๔K์ญ?€๑ึอ^>|iปกผศ2Pด๖HฟหL–bŒa˜?ฆ~€ำธิ๒N฿^6xนmจถั‚๙`ไžมค0 *jะ]ใOLžWฎ(๊ˆyฤlฺu฿oๅ2ทฺžl’1‰ GzฅึuบZpyฦ.ึ€๋†‡nZฟN—E๋6+าธภR6๘<~๗?kœW๛ฬog`ู’2าy€งฆsฎำ๐ไƒX๐œำb+N฿#-ฺู7=คก7ภ+d็€H๙™03Ÿ’ษ—/oภuSCtณ[mปฮ8—ม็4%ะr#๚3ภแทLtท_๓‰๊๓ง–:-ๅo3ใฟƒyxีCTž…Vนu >?c—ƒใUๅ“ร[ษ’qYุ{ฤ3pQฦv#ฒ๚&!ล}Qšาะจyตh]ภŒณ0qhอเืถrีเสฑ^VฑUco‡=bเa˜e‚g0{lภcีฎ<ฌUงพซผFW|มsศก๕ตํู0v`๒ท7`lฯฬฏแ๕`ผก/ฉๆฦภ5l }ชgฮญมj๙$ภjว:ฤ฿tuอฅ?ึ๎{ำ.๏฿œถXฉ†AฯฑnpP&# }ภดW+j4e>|  ่› t๒6kX ๐jญ†ีd ๊๛La"ƒถูtJk˜ฤรŸ“›Y#๚›๏ำษ=sWธ‘ Qk)อทแฟ`Šnว๒]7ั๚๕คกืฯ žcญฐU&ฏสภw3ถ๛  U[ฯฑ> ฿i˜ ช%๖‘&ฐš d)Qคkิถเึ.สfปŒฏ€Y๋yๆฤฏชฃห~ เํษO}ž@ืฅ):m#ฐ หปวัW&ฯ\แต- qK›€ žฏซ๚Bธหฬน๛ลBฬƒ–†^gซŽ๕MNญs๘ˆร๘cฺvUืฐซ„ฐE๊อ3wๆTํ๓ |@W(ND4 l ฦ๗ปบฯŸ>{้ชPœˆh"่๘;ภฏMเmใพ 5Ž—†^gีtศB„หำ9๗”:]ถeุช์kRพ๑ฅฐ—ทด%นNtJ๔Cย9 ญ‹๚œชลc™๙Uฯd`nzŽ[Pี๛ฦHCฏƒ/X`T๗ฟ%ุH้%2>”ฑซ๊pู๖!ซ ฝป๘เ“Aรง8Ei›าถณ‰@O€้5๘WDu~IMุผฤ๕_M˜พิ„้‹fพด@YG" ฝ\žำท'เตŽกฃฉีjฏ›6oๅช๊ดฺ~๓๛มหsooตo๑๗_ม^ู>Œoerฅ…‹‹@ลฑ‚ว6=jอo4ํ๒‘aล(บ#PซYBจzk>ชg}ฏ5mw8#>ขt วe/๚8๓ )๖gภ๘iอฏ};Ž_Cb"ฒ!๐ ึb๒Š.ฺถ฿†bฑ6—†^G๙<ว๚<€`/d…Mปหฺ^ž๔๐™๐[ัGผOรA5 L;ุ”๙V๑ณž[w๕N๓์ !ะ•b๏นฤ%UgQ[ัฃšO์ฅกืQ%ฯ้ฝV๙%ฑ}ฆด้%::qฐ%ช|š%@ธ ฬื๚]ื๖บ์‰feฤNจ๐ }ณ@Ra[ŸL๚‘ษส †^0ฏ` ยพu ๒ๆœ™/๗+iDxuฟตoองc œ๗ฝKฤรHx๔7€ฏญ|m+๗๗H๒F T—อ–kส็"ำI้|้Šฆุฑ!ICฏฃดžcฝ ๚†ปฑง๊<<คŽฐ:ฤ[๛N8’^ฟนญมDฯ๙oมผHั/ฆฮq๏‰^xQา xŽ๕wีใฃฐอuา๊( }ŒŠฏY2cทZ-๕€โฤxาด5bkL์๔ oฬ@/3,{ฦ6™ฦ๗ ธ›ม%"r‡ถโ•ฒแFใลขต*N๖fอS๒Jธรฬนมฑย๒ii่c€๖Šึl0”n•หานro‹jy7มYโใ†g2! ฦส3ข•๑zใ6จœZ๏ปIบ3ญRl9oษŒม๎ืคศ฿ž‰ฆ1mฦ*†?ภŒวz๒ๅ?ฤ%—0โ๔ ึE œฅจฌiปvrก"‘อฅกธ๊๔žฤเ๏จิมWd์๒I*nฌ๕g๒_O>ํยเ_ด_ฒcะŸ1ˆb๗MฑK๗wzญโ˜_ีฑdฆYLœ%เ-&ี‘ว?๘ฝAผธซ;ต,I๛ใ{ล์๑`๚aŒF’JีvŸ2{ๅƒช:b_i่cpชฒ_!ขฯิ‡s ฃgšy๗kJ 5๔ข๏;ผ#1vแUvฐkˆXึ๔ภ๐8€วภ๔(ื˜๎๏ษฏ๘gˆพEZ‘ภภMN2บฦ๐\fPฟR$EŒ%5ƒ~ู3ง๔Gล#m>ธM5H6xVfNyนชŽุืG@๚X ฑฎฆ`ฝตยG๖5V€7†้ชBvบaคv"Sต๑ Oภ‹ืODใHฐม'ฌ7ภ|`ƒcƒ`6ิ˜Ÿ๏๒ฑ&5>๕h’ฎฦยซN๋•7ฟก}&6ค4%ฤ–ผภดหฟ ัGค‡WNั€rฬ5๓eๅ+}ๅ8" }ŒB{Žu+€ƒTๆC\ท|UษYl…@ซ TŠูำ%ญ<{€kฦี๘ดษ๓ส•V็ถ?ฯฑžQพณมe3_\ุฑŠ&าะวn่ฉฎฉ6mW8หoœ‰@p—ฆหภ5ฬฌ hวg ˆ>fๆJ7ดรyX>='{@จ่แg้œ๛A ฑญŸ€4šQX1ƒชEซฆ๘ล็ฆํพพ’ศH! ๊%์wภš€žzmยG—ฆŸ3?M๓ofฤใzฏ๑‡•aพีฬ—g(iˆqคก‚ชฒจ๗U”โว๊ฆ9ยภฟQ{sฺvWั[! ^I ZฬžศL‘ฺ‰Œ€2cซรMปธ6๎5ำs†6m7ฬ—WใŽYkาะGม9ะ฿๛ร็ปTˆ๘iปผ #! 4พsๆX—‚๐4ศ…!๑OJอJถ‘0ฤ[ฅ้9ฝ๘๛Š8sSD`E1ฏƒ€4๔ัฎะ๛ณ3ษงeupโŸ›ฑห็ซhˆญ%เ9ูห:=โLํ2ฦฝi9Kชs‹แU ูนLดH5~ƒiืฉ๙ารช:b?6i่ฃ0๒œ์1]76ฦQFศฒ %|b,^L ZฬพŸ™~*ทฅŸหdใ๚L}`ฑ๕Vร€๒Žy#ฯ• IDATำ!S๓ฅ•1ฉYฌร”†>J๙t์'kะc๛!มGˆ@ฅฟw๒๙ฮ…4v(Œหฬผ{ฦุฃ7"X="zR52bผ/wฏQี๛ฑ HC๕ ึOž76ฦ-`2๖ฯไV^ECl…@า ๏ึ=๎w s\ฟิkZๅƒฟ๒๐ษŒ]๚V(pE๔%คกฺะu<ซใLปฌ๔ฆผฬY!t^มบ„ccสaอะ ๑๚ํ_๑tโ๗๋ ซฤอDŸหไJ_Vั๚HC…Sตh]ลŒิ‡rไQ้ikวั~w ชhˆญH2ฏะ7 ไ/7บฮดKฑ๛BRuฌปxำa6 พฤดหช'ท)๘OŽฉ4๔ั๚ฏ™กฒ†ผjฺฎ™œ้$™ ฝžZr่6ตมร๒ฤ๛C˜cๆ%qJยsฌ[1ๆ˜ถ๛1E 1ฏƒ€4๔Q yŽ๒Zว- กฟ™viฯๆํลR$›@ีษ~šA_ํ 1mw๏8ๅโญ๋ม˜ฏ3aก™sQาใบHCญกฒ+Atp]$Gฤฟ1ํ๒;›ทK!\.<`ย„‰[=โถฎแn฿วใ†เ ’มุ‰lhิ™sfพšพfaฯฑ~เ8%YBฟ™ssJb\i่ฃ_กoง๏Wษฑ›ฮ•{›ต;!d^ั๚฿ำอ ุฝัgบ:“wG\ท๙ึw๔)€฿ ื?/3ํฒย]?ฝัŒฅVuฒ฿bะ)c็raฃฦฏ~ki่ฃ7๔ฟxc8_>’‹ฆ]V}ิผ{ฑ1&เ)฿!{i๒มQงตAใŒF6ฏณ๏aฆเKลd](SMŸฝt•.ฝ0u*Žu!ฃๆC=ช๑ซ฿Z๚h ฝ`=ยn๕ใ|้H:m—ีฌฝุ ค๐œ์Ž{ก๋๘ล œ›ฑ šแ9P่{A~ฐ๔ฮอุฟโk~Œึe{ล์ภด@1๏'MAQCฬ๋  ใ—ฅ7๑ขพ3žKUโY-‰บ“T‹ึ'˜qนŽœ™ด]พPEห[2c'ิR๗˜ชขณูvฉiปณ5่„.แ9ฝมc‡‹ญ1mW7ล0:฿\๚hW่Žl1ญ้iภ๔3_๚Hำ๖b(Jภ+XฟแH๕๔๕}ฉฎ๖๗ฬพฏcO๒uฆํnญž[๘ :ถฟ๐ŒiปSยVSo1dœ@ี้อ1ุQeแC;๔ฬนU๙ดฐ—วแญ"sTโ#เ[iคŠF+l+…๑UŠพคก+ฌื\๚่ =8Tฅ้-' t~ฺ.[o1dœ@ล้=…ภjงs๎2snำ{HŒV=/์ลcIซW่ˆฌ8/e lE€๕šKCฝกหfvชๆ+วัฅฆ]๚T๓๖b)’G ZดฮbฦEŠ™ุด]ๅg#ล ๅ\vฦ=f}“bŽก›{N๏Gพข#yห]`ฝๆาะGo่ซฌ= vคJๅS๋-†ŒBจฌณ‰ะิš๑๐c\lๆ]ล QFฎ†ฆ็่O™ถซt,i+ๆŠžปXmฺnบ๑&‡4๔Qf@ลฑEภซ&‰œ2คOL“I โXŸ%เ|•์|nฦ.+iŒๆ฿s,V‰/ฐ5m7๒+N๏ฉฆbฎrห]`ฝๆ‘ŸP๕&ฦ8ฯฑ‚$๖iV›?K็6k/vB ‰ผB๖4}]-๗๐V˜ฌ]gn๔+//่๐[ƒเฅ_ž5mWvฒjฌถlํฌ3@ธ๔ล#Fอsd)i่a่eบาะGํญมhด496ฐEำXtฏ˜=L?TหI฿†2/ฃZ˜ฑ7S๊^ต๘๐„iปMฟpซ่ปn๓ชcJป์๑ศตn(( }ด†๎Xม1‡Moั({นGxๆKh‘%เฌwƒ๐ ฅšyww%-{…์ ๚ขv,ฮFื๑๘@,rUฌg$ฬฅกึะีทŸ\bฺฎาzีHฬ Bดภ๊~k_฿วช.}ๆท๕ไหPีyนฝ็X+(‹—/๛^มบ„ณ”๎0s๎Jb\i่ฃ`ช:ึOh๚8ๅดํZuUB !0L€๘*ึฏWฦม๘‘™wOPึy‘@uqv/6่ฯชšD๘J:็~VU'l{ฯฑ~เ8E?ŽiปyE 1ฏƒ€4๔ัzั๚63Nฎƒใ–†iฺ๎;์ลT$’€็Xฐ‡j๒\ฃ3๓Jซ๊ŸฝŽซ๓a-ข๙fฎtƒฎธยาฉ:ฝ72๘}fบ:“/}@ECl๋# }ด†๎X็3ะ๔ทh๎ฯุ๎k๊+…Œ‰ภ*7;1๕ผฑRผปฒOƒxภD Ÿ๙ ƒŒG@Ÿk๏้9๖g…d x๋ NTฮDใ‹ฉีb๖ฬ๔3ๅ˜t32:ัำHžc`†Šพ์วกBฏ1[i่ฃ๐าฐ|ๆำvทiฌ$2บRด๚ศวl"๔1๐–†ˆ0๎ัr0-1๓+–5d+ƒ#Cภ+X๓Aธ^G@Dtv:W๚ชŠึš%3vซี†฿lŸจข3l“]โ‚P=ว๚ €7*ๅฬe3_œ’†ืE@๚hW่ลc™๙šบHnaPmOšf•ŸSัH‚ํ๐!เc@9ะuV๔:.3]—ษ—ด\Y%กQศq๕ู)xZญ-ฦef=ฃฝb6k0]งr”๒ห~ษดฯ7Kซm<ว ท™ฎๆ—ฮ2ํา%jb]i่ฃPช8ฝฝhm๚“๒WO™ปโฆ:ฐZ์=ฬ็1๒ฃ ๚_"ผtฎ|u‡#ํ˜๔ชE๋ืฬ8\cB๗๚ฬวื๛ๆ๛‹ๆm=>๕์๙ฎ1๘„ื๗ไ่ิ KKวŽx >มฬ•ƒ—๋ไ2i่ฃึ๒F+๏4s+~rc'?P่{ฺAtp‹ƒ_YซีN˜6oๅถุฏธk€ฮ๎/vอเ+@ฉgr+~?RHk4ต–๊>๚฿;ป- wฟ๕Ok๏˜รn:p’ั=nํ˜วภ„wgr๎ฏTuฤ~lาะGa๔ฌspฯFtญใ–G0๘]ปkNณi;ษV็ศ S3v้ญ๖+#เ9–าIcx{€@๗‚8x ~ญ๏ำN—0ฟdั{ำนาตQhฯ่Uูืค|Rโหd์ฟฅ/OํษฌsฝJCฅถฬ jัชถE๓XSƒ˜NJ็KWŒ5. ?tแถž8แ*€ŠBพม^๛Syซ’]…x$†Wจฒ'QG|๑"เ_Swศด`‡Z{…พY ฉjฌCƒฦ๔ํ_๑ดชŽุM@๚Œ4ผ๒ำvฯป=โ้›๚ถ๋๎๖†฿Zห5ž;y^๙‡m„๏Z<ิCภsฌG์Xฯุ(‰ำีyภQำY่ฑ8U.ส๓ฆ‘ุคกะ เํ@}๑ุเy]ฦ.Ÿิฌ}'ุm~a €"šฯฃ>3{๒+ั๘–็d†฿2๑‡oฺๅใ”@UqŽอนรดืว)๏8ว* }์†!9Fกศ+Lฉ`kำU…์๔QฐŸvิO–z(ๅพeส๔-•Šuๅขผ๒ษ‡mN'Žฯ‘=ง๗Z€฿ฃˆNฮณPุˆน4๔1hU๋Bงจ/๛ธiปฑฟ]ุL›๖ไ^w+@okฦพ 6ทฅŸหdi ม{๒‰ƒท'ฟ๋z"V}กฮ0s๎e๕ Žฮ(ฏ` ‚๊]…˜ถ๛ฑ่dีู‘HCฃพUง๗$Ge ฆบ'NŸฝ๔y8ฺVœ์w ๔๑8ล—C3โฤTWฌ^ฑ๏ฐ‡.ฝ้\iฺฎ๊แ&- ๕ฅn<ว ^d›ฆโœs2ถ{Š†ุึO@๚X ฝ฿สณล๕#}ๅHตทLตWIE#nถ•‚u8^ฒ\U– ดLูฉ๙าสึนO๕จณ"ฆŸิ;พอใnOO[kั~w ถ9ކ๋:๑Žˆ? ›95Œฟii่c ซ,ž๑2R๗5Mxุ฿cฺe-๛Rซลั๋อฯอƒฐ&ทฦฃv/O๚ƒ๗ƒ^ดsี"่ฌฏ๐ดˆ…'๒hw฿ืีžำท'เU ƒงsๅิtฤบ^าะว ตi๛ว็To—'j้šŽ#๋ภa“ี a‘ีฃ๋ฌo€๐I=jฺU๎ฃกT.}ุ๒Gด+ทHPื.}q9UฎEXCw# ฝฤžc=`—:†Ž8„Ÿgl๗อฺวษฎZศฮeขEqŠyKฑฦ๑อไNเ^oีข๕ f\^๏๘–Œc,งqฉฃาณ–?ำ!9ฉญ ˜qถขSฆํ๊:W1 N7—†^G…='๋”ซc่ศCbดsำ9n6๔+ธMทงชND์ำv๓‰Eย@ตฟ๏`๖˜ํฤ—ค—t\v‚—๒฿ผ@œฑฬปณฺ_—ไD ฝŽZ{๋"ฮชc่–†”ฑซฺ’DBJCฏฃ๐…์~ัˆG-ึa><คำ—oฌ]”อ ฆh ^uŒ๓8b,Wegxำิ9๎=ช:b_?i่uฐzjษกtืŸซc่‡แ๒tฮ=EE#สถ:6เyQ~คกิฌf฿พหด -{HศNฅb”๙Kl/%0ุ7ึN`ฆ๋šžaเš”๏urฃ๒œ์™]ข:งL•ข ฑA{^'0ฏ`=ยnuiุฆํพCม>าฆ'{3ๆirม๔ฆฉ๙RpvำŸี…]|โ`3ŸฉM‹Ÿ!ใ›f=MYGฺB า฿ป?๙œx@4๗@๐R์ฒด]๊ˆีcๅ_)๔ŒˆUWๅีดฝฦ๒%?ืK@z<=วZเ่:‡8ฌ“ฟฑzŽตภ$>-ƒ๚2vฉคชุWœ^ฏะ ๕ำv๗ึ # PYิ๛ชTjhZอ0Lƒ1A=ฬ˜@ฬซ|ƒV‘งปบjซฆฬ^๙`ยmy^ม๚›๚] บฮดKวถ<๘„;”†^็จ:V๐ยี…uqXงฎk|tง ›a[ฦฏฬผ๛ne ฺ่ไ&ๅฆๅ$6•ญ(xึ9ธg#บVฉฦFเฯคํฒา฿Kี’h/ ฝฮช,๎ณ รWบr$ยษ้œซtะKแถt˜็Xมแ?RuฦJ=[Xฟิ([XชX์#O@ืqิ่ ญ‘‡ƒฅกืYค๊ฒ™๒`mMรGFภUiŠFm='{1@ŸRŒ-ดฺžc๛สฟN)>ๆš๙๒•4ฤXDœ@ลฑพCภIŠaฎOOเId•‡uฤผAาะฆl‰๑w3๏พก—ฑ๊9ึuŽฉ7ุ‘N]#ะ๙iปtnฝŒ๓œK>ณ›—•cUU่‰m\xŽ๕oT‰—ˆtฎซข!ถอ†ทŠc]Mภ๛0yลPJํา์r,ฟaฺjyNM|‚™++฿ถ)Oฏ˜=LชWืฑ=ื:ฬฺ‹v็xฆvzศ฿่iศ่Kฆํ~^ƒŽH4H@zภ*N๏ฉf&ฏฺทn+N๏"ฯUแยŒ#2y๗&-ูV๚ณ3ษงeJฺŒ‚™w•rT๒/ฦB dž“=  Tฐมณ2sสสำจฦ‘D{i่ TฝZ˜ฑ7S๊LFJฟ0ํ’า๒75๚ญซE๋ืฬ8\E9ฬอ[*ลพท๛ฟS‰ภJำvQิs!Yีข๕mfœฌเ†ฺฤmv˜ท่Uฑoœ€4๔™yŽ์qฌฒ่Zำvทmะmค‡W๋็ผW%H&H&W‰Šฦ–lซN๏<ม*|ธhฺeญ๛ิ+#ฆB@;M'%v๔Zฺกk”† Pฯฑ~@mญดOšsKw4่:ฒรฝ‚uJ˜0ใณ™ผ๛•0’ฌz{g&GUํ๑ฉž™,d›ฎž@ล ~Šฌฒ#aาีูฆซ"K* ่ำ‡ ่c…'‘EADA|\@@ฒ™้๊$0]M FQAT“™๎ž“mfบฮฃ&อ Iฆง๋Vwu๗ฉ๏ห๘ๆ๙Ÿ฿ํฬ้ฎพuฯyL|ปGํtำvปบษ%jŽ@w*๚n‡ษ๓A: 1ํ/ื *IH ๚0Jั™ๅ5ตi$oลฎa๐ๅรDนใ๐ดnฺำ9ใ้ ๑•NlY๚ดgQš d“ั๋ˆ่2ฏd็ฎW‚2?จ฿๔บC=.ัMปอฃ†L๗@@ z ๐Tlซฅ่ฏฺั‘ใ6‘’wๅ๑ไๆxๆo%,ห;ฆธM8(ฤ/้Uฯ้๋ื2๗ฉทผ๊ศ|!4๙T์ฃฬ|ŸW_ฬ…H"s›W™_:)่%ฐห&cg๑%LืตGฬดŠvฃ^l(››ณŒ'ด*|:ผj๊ ด`ใE‹”OEž~ฃ›๖ั^ศ\!Tร=้qWy8Z-mหิ<๋ม—๔Vyํ’™ }žฟ๋ญฅO}นT๔J0-(็;ฆจ86›Š]AฬWซ๐รฬ฿Œ$2_Qก%B hr–แyใั—oฝ<๚ชซ้RะK\๎œeภQ%N˜ๆ็ณื^|•27฿=˜5zพ”นป˜๓0๒3บ™r๛ฌ} ~๘ฟร9[~(qวมQ-ณํg†'?ีF@ษ)Š๎๏2yำˆฅ—‚^โ2ไ’ัซAไญ™aฑทใ%Zดผe…๗*4๖:i8/f'‹ัฬฆŒSˆq €}Š_ไ˜Wtำ~O‘ce˜จ*นd์ว >วซii/์• š๙RะKไ˜k}?Uโ๔Mkะš๔๑mKฆUๅ-ใRฎ๗!‘4˜ตpวO์mm{๛็่,E฿—?๛ \1ํo๘“H Šฬฺ `ฌG#yดฝœž้1ผL฿F@ บ‡ืBฮ2วฮผใZCอZ{ฦฟฎเ๛ธญŠ๛ h ภ}ํ`ข‡%j๊m ๏S+•ฌผพไSั3™้ง^ณfฆ{"‰ด๛†Zฎ ‚๎aT<พฦzยžแมF ฆ*:5.9๐ฐi{=/นˆ !ฐ#œeธั<ŸฮH “ยfz‘ฎ<)่ึ kE?B Gู‹FะๆๆO;‘gYะ|ํฮ“vL$แ$—จIyหธ‹ณฝ'Gฅ›iฯ-Wฝ๛…mค +x-d“ฑŸ๑™ฅœ๓ป&&2k=๊jz61$Pฆve†hพO?X^ลค(ภ‹g…๛^๗ุ๊†ฆo?ลiB^cอๅ]udบBRะภฬ%งอ9KH]ฅ›๖• t%‘ณb฿๘‹2ตƒyD-ศซ#TPีใ€…ใ๖GT๙5ค ซแˆœeธ-?ƒNJพG@"ฆํ็A)%{๓:1—Œ-q ฿ฌแาp–ืeพ:œeฌ0ษซOŸ13zี‘๙j HAWฤS]ท1žง›™Ÿ+ฒ(U'S)Lj#3>Iุ)ิ)!H9+z:@๗+0ทZ7m•X —€tEฏƒœ ืถสฑดหtำžชศVเdบฺ#5 n}W…อ ะบู๑b…}Hx!Pน”๑๖Œ_$์๋ผ๊ศ|๕ค +dšK)0ฺ;Kฯ:xsQ4าข๏(x~ฟด ‰๎ธhธญYK &ณ„@ๅ t[ญr0๐ฉ็ซฑI‹Œ›‘๓,$ส HAWˆ4—4Nม๛ํ๒kซบ+ฤy+g๐ํ^๗ น„๎ซฝiผ@ฤ็…ใ™Cฮ‘B †ไ,ร}าไ R๚‘nฺŸR #>‚ฎjึ2: h๑*๋88ชeถŒWj˜ŸOE“™&(๛๚ไ๗ฯฤ๘z8a฿็“พศ ภ่NE฿ํ0ฝชย ฦกๆDๆYZขกž€tลL๓)ใฬ๘ŠWYตGฬ๔ฏ:ี2Ÿฮ uหถฑƒsธy7y๔พ€‡ ฤ?Žดež$Œ๎QTฆ j#Kw‚เS5๓r=‘iญถ๋ษฏtลซล๔ธก๐wฒ๕๚nธำŽŽกอ#Fh๏มพ็ปj‚ำรภ:b^าViษ›ฃ ห'™อ*ึA4„@ต่Nฦ๖wˆGj\ต๛Ž8‚๎ร2ไSฦฃฬ˜ซ@๚!ดOS Snกo๊uZ๚ฦˆ)TภบๆDZษ›งš$I(๛tศฃjU๐๊’‚๎ร"ๅRฦ,0ซ‘ึ’Gซิ!POT~wNเหยfๆ๚zโWนJA๗iีฒ–๑2๏๑.O๗๋f๚ฃuDAz" ๐ำywa๏7ัศ๔ิฟjฬU บOซ–ณขtฃ yr๘๐์ฬTh‰†ตO`p/ฯ_4aƒฏˆ˜™kkŸZ๕g(ง5\฿>ฅน 5บ'วํแ5แx&ๆUGๆืuํำhะpจp3@เI€6pF‘รฬš{@ศ๋็e@{^ๅฌj62๋๋‹Tํe›ณŒOŒxฝบพ[ๆ>๕–W!™๏?)่>2ฮ&ฃืัe*B!Ž– -ัจ]๙”aฒƒ“ˆ0ƒJส”ฑ „ลh.๛]I2ฉbบ“ัร"๗T8ฟ฿ฟฆ›๖ีKF‹€ŠVภz์qฺขุีใVรม๑็๐ชฉา‚ฮp&ษฺุ'เ๖`h็๙"ๅgไ? ~จทฅ๏ฌ}’ต‘aฮ2žpฌ‚lึF๑พ๒น’e’‚๎3่œปเ‹”„aดžศศ/V%0ซ_ค'k‹F_ใ‚ฯb[ท๕็bƒˆพ6ำท๚@TUPุQ ^ ว3_WแK4สC@ บฯœ?ฅฏVp๒™๋ดณ/ิxภ^ณ–n๐ูถศœภเฆK๗VจŠป?Egห ฿iภๅa3*z’ , ถโ#๒ุ์๑๊น฿9€žย(ž$Ÿฮหฒtส‚HAW†rืBนคq ( %๏š•`ฌV‘l{๋ค…พ`Zes ๏่f๚ฟ*๋AขoO g_{๛{sUŸจฏาM๛J!\]ค —aฝบŸ8Isึ( ตฑฟO;`ฯน๋้‰L•่JEฯะ˜~`t ,~้8ฺน-‰Ž?ยO›ศ.Šฝ‹Bบ  tq_๏{dgป šๅี‚^&JฟKะM๛Œ2Y—0 O—0ใ†XูัBงใ8gดฬ~า ทบฑ”K๗ ไ*b๚l8‘AภซกDฅ —i1๓Oฯ}M[ฦซษ„S#q๛aZขl9หpoฃบทSƒzˆp’U‰–ˆŽ@ฮŠ}เ œฑ]" โวอสผT-žซg>eสŒฯ+สราM;กHKd*@@ z™ก็Sฦ็˜q›ชฐDัpทฟฏDLD*J@ z…๐็ฃณFฯ+ VศัŸ0ปใ…š"Uf9หX`f™ร๚Ž.ัอ๔>‰ืฅ์เฦฺgผ_€งuำ>^‘–ศT˜€๔ .@.i„ThแูpOไXš`ฏBM‘*ึงŒd™ย๙†Nฝ'ฒ7อฐเ{ฐ: Kw‚๐)E้๖Cรแz›‚"=‘ฉ0)่\€๕ํSš ZฃปAnœ:t‹nฆฟคNO”สE gE-€โ~ฤ#b›™V†—ธภyhi“ฮกŠ๘ q™๑ลHยฎฺ๕ฆ™MลN&fe‡Iฺ่ฐ™พข8ึrพRะ+ผบนคq!7ฉดA„„œฺฅ’จZ๙d๋!Lก็TGbเ†>โ'ล3๎ว]^]–1Us7โ1NQ์แ9ดSฌYwrnฯ{€^0VM๒๔—p~ฐอSC3(*Rะ+ผlG๒›ศNLๅ#J๋>D73Jš5TQ]„ฯงŒo0ใ+ช’e Mะฮือท}ๅ๓ษก.zา‰๘ฤp<ฃ๊yiUถชFgเwฤFz „cT™fฦq‘„+Uzข RะฐV๋‡„~ คฬcU๘ืSง eš"ไฌeผDŠ6:1pWฤดฯ)ีlถ}๚*< ฦJีุ~žบฉูL_ฌBซ5Tฟูัz<้zdY๋9KAศ +๎ฦถ5+ๆk๔Dๆฟ’ขุุbใ 8๘ƒ@ŠŽฮ.Š~€4ZBDฏuำVyJฅ๊|Œั=1Pี๏๊uิzxฦoTq9ช^$ร‰)cwB`เไง1#ฃ๏V ˆะฆวm๗Q(นJ@ีqภฬXภ}‡จ: $›2N!E m้ํฟ๗IO# KH[ษ่^!ขgT5^q“ts[ไภŸ@ฎท SRะUPTคัŒต:ฤO*’&๓†C8ถ%nIฑฎศ)"ทŒป8ซŸ‰g~โUg๛๙๊๚l๓<ฬ\ฅทZึb+>"อซช0ฯ;tำŒB=‘ )่[œeภนŠmฝBก#ไ6›bชŠไ๒–๑;๗$วxIOุzาุษไ์ขุ1b›งฎาM๛JีjU/g๎›uฏพ9š?8ูศlฎUf’—บ๏e„ฅ"9+>ุ์ถŸœคHr›LGxีิ™ฒIN1UrYหุ@ภhRW๊ฆ}•GNฯYัUํM›๏ือฬGฝiิว์l*v1_ญ0~ …cšอeฟSจ)R$ Ÿะธ(]ฉhTcฒ•[cฌ'lU ”ซGม5‹ข‘!Rะ)ือŒฒ๓ฝท_‹lส๘yคn…nฺ'ึใ'็|2:›‰ฆ6ส~7๘ฒฐ™น~8>dluP๖ขฉฮ๔ƒ๋:Ÿ2neฦ็U;dโs"๑ฬ]ชuEฏ4])ใรฝ#ใ้ cไH2S[<‰์bฒ๛l:<เIงฏ)่_#^8/”“uฯม>NนUฦซNจ„–ถๅTฎ-‚Eศ-i…็W4โษอC4`)สะNe“ฦDธฅิ๙๎<~6mฅEห‹Ÿ ฬๅ…๓š๒cฒhU์็้pOdŠดญULต ไค Wม"ญ]2sbcกฯ}ไฤ‡ช๔—&๔0ึ\ฎ`งuภ ˜Eอ‘๙ฮqž๛ืig…ใ๗๘‘^.e<ฦ|ฺ–nฺ 53}ฐ)ำcLลIu๖…ูkึาNลบ"Wค Wม"นปวŽะvwภ6ชทLlะงŒo[’Wฏ-ŠCศYฦ฿x0ฆ่ ๗ฝ๒/•cdŒ‘CๅฑปŸ่ึฐ™>฿‹Fญฬๅ ด๎c–=ฤเ(ฮฉ๐ฟ_T์Uไ| จ~Iๆ’ฑsA์ž$็ว๕5†Zๅ49?ะ๎^3—4R ดyฌ4vฉCลฬ% AธiจqCœ@Ÿ ›้‡W๋?gu/6๎bฦYชseะ๙3}ซj]ัซRะซgญœfญ่ํ:ฯ„gจ!4MŠบ/tw)ชnใฃฺำุX<+๏๔บ=™xV7mฅผU3็z๖^ไฟศzFผก๘ต๒VซReพัแ้ใๆdฒมหพ๖ nŒ j„F˜์ฑEf>ปŽ™/SC›ัอฬ)jดชS%g ฬ๓ม}^#>าฏ'|๐+’>‚๎#\?ฅณํญ’r‹๚>ล๙SŸ6uฯน๋|าูํ(๏ฎๅ๐่ณ3?,r6iF„ฯ•2wงs˜>ฉ'า?VฆWEB[฿ฌ}  ธถ7h(œ(MW| [ฅ’Rะซtแ\นได g1อง4^แตFๆคW๛ค/ฒƒrIc>ศใy้;ะ$เn tี3ืb@็ญXœ‰ฏV|ศIa7O42=ลxจฅ1ฏ-<~ิ่1#˜ๆC^ š1ำiดEฒJ HAฏา…f;kลพ@เ๏๙˜ฦ฿฿๎6ต9‘vญ’หGูค๑:ฅ:ใoDห๖9ฅ#ทฝ~็โ่{5G›Aเ3|8เ บSง?ญ:ง ๋ญ]2sฦB฿พ0ˆNืใ้ƒฮA•—€๔๒๒๖%š๒[ค๏t๙z-๖“ž/IึhŠ^ฮ k|Nีmิ‘g ถ6็g<‡๙่–Dฦ๋Y๕~ZTฎ=ฐวฅ+พำ๑oŸฬ_าO'๗)OZA@ z –มป‰ฌ[Dเู•vฉะI\˜N,s;BษๅN;:&ด‰ณ๕'๘ _~IยB=nŸ^ภ•‹8ุื=ฮu__\0พฅ'์K}ััช' ฝ๊—pk๎๗u{Œน‚#|Li#3>Iุ๎‘•r๙@ Ÿ2.aฦ >H—]ฒ€ยอe๎‘ลuq vฅ[`ผO ?ŽงKosŸ่ึ€ฌ๔Xฤm)ธท๚๚Cด’๗๚™ฎ ›้+ŒQฯฺy+๖[VJณ<๙Fฬ\Rธ•‰˜Kง‚๐s฿ข๋q๒พYแ๒‚^~ๆพF์Nฦ๖wˆ๏,=Ÿ๎ต;ฃDxls˜ํ=gัF_ชC๑๕)cZแnจชึ๋ล๐*๛ZงZŽ๏ผe\สภ7‹?ig8๊cำแžHœๆ?่น‰ฯฐ#ห„ช" ฝช–ซ8ณƒ\–๙๘Œ๚6#ะˆ็ศกลญหpFๅ,ใ๋พ6œ9Aห i๕๐8•d%ฬ“?เ็.๔›ฃ81ูศlส๚Šเ‚ต๑ไl๐๕ฅžDŠ›ผ!>น%žษ7\FK gE่ไbว—2nธ'OธP77ธzGu->qRศiธจYtผ9ŠgK1๗‘pIKAฏฑ>lส˜FๅบuหธHOุ฿ฎaœeOmอข9ฃG4๔ุ`S๖เฅ$ฌวํ K™ZMs๒)รdฦt}ง7๖lžฝ๏•›|Œ!า5F@ z-่Ž้tฅขQษm1สT9ี 8S๚ชซ#ณข๛€ษmฏzฐ:U”˜๏ิ?o=๛`zx’วธฎw# oๆฐGฏุR3K๖ง ›[O‚^/\{์ระุ}œfLา]อiKฏ*Cฌบ1Pิก= ๐ัAL˜฿'์ฯั›*O๋ญุ{ ฬp˜*อ]่H1๗p-หKAฏๅี.ทฎvใHMƒ  ,m, ๘rุดkโy๊ ผD:Fว4Œฅ{˜17~ถ๓pฅnฺWฬ“R;นd์\ะภ๑ส~5Bฺๆ7jldี;ก;=!๊.Or ^5็ผe\หภWU๋– —ใณzยv‚ึไีHt‚3Bปเำ|O๑ฐžฐO๕=ŽจiRะkzy฿™œŒ#ไกษŸฃ)฿r5ŸŽgVิj฿าอ-Žถมกไ[ ‚๙"ฬผ^‘๘ešตb1ป฿”7หู‰;tำLา’5N@ z/๐ฮา|ไf™฿'สm}:๊Nm _ฺ|rf}"๗%ๅl*v˜/'`ไ๐”๖ฐฯ1ั5ตๅ‹ฮkส้บ /๚xPฬฟ–‹AืGฬ๔eร[?-vN@ zพ2|bšหSyํ$โ‹ย๑ฬ=uŠ]yฺ[๛ีพฤเฯ๚๕U ฟeเ6ดค< ๆ“ญ‡05<๐eฐล บ bฆo-C, Q'ค ืษB๏,Mทgs“ำปˆ™Œ2cX}Rฺฑชฃ๎žZ–;vููไ๐้ ด)P๎๐;ณศ์L5C;$ ทฒๆSฑ‹| #†œเ}€Cฤว3๗{—!๐oRะๅี€œeธฟXส฿ๆ’xฯธGœสฅภ๚๖)อ…Pำt€ง8 <๐]๛๎;€1Vƒ๘9ฐถJ ั“อm๎5ๅฌ่qบอ็.…sฬŒ3คcaอฟด*’ ๔Š`^ะผ2ƒๅพ^่ณบ™.ว1ตๅฮ-0๑ฒNl้ฃฆIOF0˜ mtBœใทx๕ฤ๙™žภ˜-ƒ‘ม}$฿dเ์2„โoDฺpผใน2ฦ”PuD@ z-๖Pฉ๎์}ภ„กฦช9ง8„ #ณ2/ฉืE!ฐ•[๑9ฺr 1ปั~ฎ|;์œr๚๚No™๛ิ[ฒBภ/Rะ"[ฅบํWม•แDฌ๊'๐Bฺˆ+ๅ๘ุ*}ุvฮŠžะทผปŒ6-bfฎ-cL Uงค ื้ย•v6๛)Ÿ9ิ8Ÿ~พ„ซ๊กk—ODv;]ษ่QัMo๗o-3˜nVญdหฬUยํ‚€tyi์’@6<นิš*ƒ‰โ_ทQ™๘ตš ไฌid๐๕ž]๎<๔;0'2'ฝบฑ%^‚^ฟk_TๆƒŸn.ใษr๏๔Ex†\-;ƒ‹Zฒบด~I๋dงZPๆ oโNเ[รfๆบ_PvRะหŽผ๚ๆŸ>ž๛๚Pผ’๎N๐ี3๓h%}H์`Xปdๆฤ†B๏ื ๔)pูรเณไ๕Y๒r€€ty!M o—2p}ั|Hฯ๓7ร ๛>฿Bˆpีp7r2๑E ธุ‡y ฎข4 ฯhเำšใ™ฟ)R!0lRะ‡ฌพ'ไO;‘ว=ˆf๏J“`เฏฎŠ˜ถDCฎ:#0๘uะฅๆU4uฦอzยพฐข$ธO่๒(…@ฮŠ#l^ฅพฃ‰็0ใM฿1nN&[JN2ง:ธGๆ~๒d_ะ๑vi๘xธอNVุ‡„ไบผJ&MลN&ๆ;่%‹(žศภ}๐ฐi?ฅXZไ*H ำŽŽiุLgณƒ‹A˜\A+ƒก๙ืิ฿pZ๘ค'Qy/โ@l% ]^ žฌ{lฺž ฮAHxR=™๑{"๚Ÿ =›~ฒ๏•›Tห‹^yไฌุL‚€•'โะQ˜่ฟ#๑๔5C”B ผค ——wอFห'c็1ฑปan\ภ’|‹€Ÿ8Œ{# ๛W๓&vvB sQ๋๛B Ÿ`๐ุ3(ˆุึX๛ดt สŠˆ HA—ื„2[?ญ๓7A|Ž2Q…B๎&: tฟใ๔฿™ฝ์ ฅEส#G#{S@์>rv‚G9ีำณDtA8ž™jaั* HAWISดtตGj!Gษภ-yฑvs"๗ภ๚ฌac๎ๆJฆMงc>@ำ+๔์๘๎3?าz๙โๆ“3๋kx)$ต! ฝF2ˆiไRัO1ำuดั฿ฟ=๑J=ศงค›ฟ+ๅnn mข๙`œฒป}\๙ >&โO…ใ™u! Ž€tu,Ei'Oaภฆ่โ*๔7"$ ภา‚ึุฑืฌฅชฤw`mv-ฑทV่Ÿอฬณ 4'ฐFท๋xnff.r ช" ฝช–ซzอฎoŸv@Asnpjeั `9ภ) ”าอŽซศ{ลฌฎ]2sฆB เฬ˜ย*fฆ๘ภฮ ~ุXเฏสYลC“‘ม" =X๋Q๓n๒–แnx๚.GTaฒY"<ๆ•ฤฺส๕ฃU“ฬๆ*ฬCฉๅWํ่ศ๑›q3ลภ<D'* เณ˜ป{Ž๓ลpbู๓>‡y!เ+)่พโ๑`ๅฌุ™D|€wU1ฅ~~OŒ_‚h%มYY๋gy๓ยyก๕cึ๊P่h€Ž†รGƒp€†j[G^vภ—H3•j[9๑ป+RะๅตQ1kอ=ฒaรฬ์žว\1#jฟเ ๚#/€๘`~A73ฏซ ใฏฺ›‹ข‘Bศูทภก@ด1ฟ„c็)ฒ๛ว๑VูซŒ+๕„ํสฺ่B@-)่jyŠZ w>ปอ-?ใKจ†)opฟƒ‘—‰eP—งซ๊jขฆฎ๑mK๒~'าHtBShbH+Ld‡๖$ข‰ ny๛๑ฌ@ผ@๛ฝ]๒พF๛ํฅ๚=oฟพnึถ๐ทๅ1ด ะ—พ‚๎;b P,ญ}ื ๘€ฑลฮซฑq๎ฃ d ุย ^oa`‹๛`ธ๕ถlC฿>กo๑@ะ ๗oAภH†3ยo5xb•ฝแe™฿|๛ะ-Rศฝ ”นี@@ z5ฌRy\฿>ฅน_k๚2?Wว…ฝฮV—tป‰่†B๏–Zๆ>ๅ!‘Kิ4)่5ฝผีk 5z์ศณภ๔€ฉ๎lฤ} ไ|3๗๕}W yฉKจА‚^๑%ล่NฦZยg>ฃ˜๑2ฆ 0^๘–พ†ฆ;ๅ@ :\IYฺงสk บ œ:ึ฿ท‰ว•s€รI+—|ะ"[`žžศ,š1๑#สI@>ก—“ถฤRF€,ะ๒ว>9L็``S˜\พ˜๕`X"อu|[tฎ2Rะซlมฤ๎; t=vยXญqฤm[FตL€ž'rn๐ึ–ป๗ฟrS-g*น แ‚>\b2>ะึ/i\่ื>IDg0๐@›sลศq7‡่H[zUฑ“dœจ7Rะ๋mล๋(฿bใ 0Ÿฆyภภ๑คrUfz”เะ๒8อฐP=ึลฉจ )่•แ.QหL sQ๋๛ด††ำษแำ@8ฌฬแ%\q๚,%ฦฝ6l~DnฉMF mค หkก๎tงข๏.ฐ6›˜@ˆUw‚“p'ฌ๑าF๑‹rœิล‰PK@ บZžขVexแผฆ๙ฉ ‚ Pภ๛ซ)…ภํ=ฦท฿D-๓Dกวร๑Ž็†ž"#„€(†€๔b(ษ˜บ!ฐพ}ฺโฉ >Ž€ฃชดoปโ๕๒๚ถWศ.8ฺ-ณ;lลๆDNARะๅฅ vCภ=~vฬฃŽfโใ˜q,h }hด rนgฆฏฐย!^3OO62›๏Z  ฝQR(/uMณก‡‚๘0">”๗ฟq €ฆ๒: Dดบล›ษC+"Oท>K 8p&&„@‚^g .้๚G Ÿl=šv;x?@€[|—A™ฟ๐'0^€†—5ฦŸ๚˜41‘Y[†๐B"HA/’ ^ ไฌ่>pด(ไ์ฆ<‰A‰แ๖)o =ษk๓_๘ŸDดฦaฌใ5Œ^า์<่สT! สD@ z™@K!P 7ฯ ๗ั–‰šƒLh4๋อŒ1ถฟ๛7ow†={มิ ` ศ›ท่%๐&๊%`‹ฺHŽณ–H[M่_ืธfฯน๋Š๑&c„€6)่ม^q'„€B (Rะ‹ย$ƒ„€B@›€๔`ฏธB@!P)่Ea’AB@! ‚M@ zฐืG ! „€(Š€๔ข0ษ ! „€ม& =ุ๋#๎„€B@E@ zQ˜dB@`‚์๕wB@! Š" ฝ(L2H! „@ฐ HA๖๚ˆ;! „€E‚^&$„€B ุFฤ Yช@IENDฎB`‚python-advanced-alchemy-1.0.1/docs/_static/logo-default.png000066400000000000000000000764041476663714600237220ustar00rootroot00000000000000‰PNG  IHDR๔๔หึ฿ŠขiTXtXML:com.adobe.xmp kQีjsRGBฎฮ้ IDATx^์ ˜Uี๗งz&!„คซ'dwA‘ลQBืLBาี BwE„ไU5ข lโ†โŠ"ฒ!้๊ษึี„EQQ@ลๅ•}ฯtuBXฒฬtฯšไU–ษLw฿[U]งŸ'๒ฬ=sฮ๏™ำต{ ๒B@! bO€bŸ$ „€B@@บL! „€@@zQRB@! ]ๆ€B@ ฝŠ()! „€†.s@! „@†E”„€B@HC—98k๗ํ^Ka;ต๑‰K>‚ ง(U๓ฉ6ะ58๔๔”นทญŽ`ˆ’ˆi่ฑ(“ู ีึพตlรเYฬฦŽo`b3ZbำR˜n๓ vzrๅrKฝ‹3!SาะcZ8 {dีพƒแืๆ3่0; งŽ ฐ ‡ 3นา‘‘$!B =จ"ูz…์~Dt ‡ดปxl!๛๔™ด]ZิBŸโJฤ‚€4๔X”I‚ฯษ๎ศ  84Sn‡อ9๎_”ณค*F% ]&Hl x๋ .mธ:ฦef=C]H„@ HC —AuูฬmAjฯM\๒’๐+ ๎ขมิ‘้ร–?"x„@’ HCOr๕c˜๛SKึ=4x ฏa๘rxV†eฺ+๎ ฯ…( h†ํ๚Ht/"IQ๑# =~5“ˆƒ}N,0ผท฿r 1‚eI“J" ๘ฮะœ=อ*?—H’ดxi่2bM ฒจ๗UH๑Eผ7ึ‰H๐ เ฿๘†qJฯœา4”แB c HC๏ุา&+ฑชcศ@pไ๊›’•yโฒ}”™ฮษไK?K\ๆ’ฐƒ€4t™"Eภ+๔อ๙ŸะืQ‰I2๗2ำ%ๆึตd•‡‡ฏ$ ]fEGX:kšI€อภ~๒V|์ส๎๒V˜sฟฦ. Xด˜€4๔wํ!๐ ›jโ ุ‹;Œ{|ฆr4Œ๔˜ฒN\ˆฟ :~ โeฬผ ”z฿ธ/=‡฿ฌชฎุ ค†žดŠKพร6-}ฃe>hฮ-กฌSฯฑ^0A%|9ฏ\…žุ †.ณ!‘‚+๖ษ๋hr๒dผำฬญ๘ฒNL<วzภึ*แ๛ฤVOฎ\Vั[! i่2 Kภs,VMž€ƒาถ{ปชN\ํ=ว พmฅ?๔vู{]… ุ MคกหLH, r…ฎ|…{ษKo‰5”ฤ5†ฎฆHล‹€Ž็ฟr…ฎ~…๎ณฑGO~ล?ใ5{$Z!=าะฃW‰จE<วzภd%wr…ฎ|…n0ํ:5_zXฉb,„€r—9\žcญ0E…€\ก~…ผค0๖U๏dฺๅไ.S™€b+^D`์฿5ม%:”@ลฑVฌIo#W่สW่ƒฉ๎ํฆฯ^บช๙"ˆฅi่2Kภsฌงlง@บrC๏gd&ฯ\แ)ีAŒ…€†.s นt4tนๅฎRœฑงN}WyMrgขd.๔+t=E%†<วzภJกหบ๒zmO’๓ฬ•fก aาะe"$–€Ž†.W่๊W่jทูaข` Y๙! @@บ<17ฯฑ‚7ซ_ฅ’…4t๕†พvOุอ*ฏWฉƒุ ! W่2Lภsฌ`ํ๓ฮJไ–ป๒-wำvๅยBiŠฑุD@~‘d&$–€็XุE €4ti่JHŒ…€>าะ๕ฑฅ˜ะัะๅ–ป๚-wนBู/Ž„Yาะ#[ ,l^มz„”ศy่ส็กKCWšb,C@บL†ฤ๐๋~ป+[๎rห]i‰ฑะG@บ>–ข3žc=`W•ฐๅ–ป๒-๗!ำvปUj ถB@l" ]fBb HCW/ฝ็(7๔AำvวฉG" B@HC—9XาะีKฏกกo4mwผz$ข „€4t™‰% /ลฉ—s,y)Nฃ(-คกkม("q$ W่๊Uำp…พด ๊‘ˆ‚าะe$–€\กซ—^ร๚ ฆํnฃ‰(! ]ๆ@b ศบz้5\ก?oฺ๎D๕HDAi่2K@ึกซ—s,ีu่ฯ™ถ;I=QB@บฬฤ+t๕าkธBึดษ๊‘ˆ‚าะe$–€\กซ—^ร๚3ฆํNQD„€†.s ฑไp๕าkธB—†ฎ^QรคกหDH, r8‹๊:๔5ฆํNM์$”ฤ…€Fาะ5ยฉx๐๋;ฉD-{น+oบฺดดJ ฤVMคกหLH,ฯฑฐณ9mM๕-๗ชiปฆR ฤXi่2’MภsฌวผJ…‚\กหบส[! “€\ก๋ค)Zฑ" ฃกCฎะๅ =Vณ^‚ํdาะ;นบ’จ<วzภ๖*˜ไ ]ฎะUๆุ คก๋ค)Zฑ"เ9ึใvP Zฎะๅ ]i‰ฑะG@บ>–ข3žc=`;•ฐๅ ]ฎะUๆุ คก๋ค)Zฑ"เ9ึำฆ4ืตDบ4๔XMz ถฃ HC๏่๒Jrฃจ8ึ*zT( mgปทซhฤูVรNqฒ=ฮ@bi่‘*‡ำJ:บ์gฝS\๐W&ธแ1๒Gz+'ฝ๘๊hาะ;บผ’h<วชPฺิDnนห-w๙-Q! =*•8ZNภs,€าถฃาะฅกท|โŠC!ฐาะej$–€Ž†.หXฒl-ฑฟA’xิHCZE$ž–๐ซ @ํค/9mm์g่ฃWTžกทlฦ‹ฃN' ฝำ+,๙m‘€็XซLQA$ทๅ–ปส[! “€4t4E+V<วZ`[• ฅกKCW™?b+t†ฎ“ฆhลЀކ.ฯะๅzฌ&ฝัคกwty%นัxŽ๕ €ษJ”d/wy)Ni‰ฑะG@บ>–ข3žc= `ขJุrห]nนซฬฑ: HCืISดbEภsฌ็lฃด4ti่*๓Gl…€NาะuาญX๐ๅ็ฟ†. =V“^‚ํhาะ;บผ’ฯะUŸB^ŠSRT5mWi๛]™ๅB@l" ]fBb h8)LฎะนBO์/$9าะ#W จU*Žตž€๑*ไ–ป4t•๙#ถB@'i่:iŠVฌxŽตภ8• ฅกKCW™?b+t†ฎ“ฆhลŠ€็X[>ฅปLdบ๊{๒ ฝน&ใ„ภคกหI,ฯฑtฉ+tนBW™?b+t†ฎ“ฆhลŠ€็XCR*AKC—†ฎ2ฤV่$ ]'MัŠฯฑj • ฅกKCW™?b+t†ฎ“ฆhลŠ€$–€–†.หศฦ2‰ ’ฤฃF@zิ*"๑ด„แญีU“7ช:“[๎rห]u‰ฝะE@บ.’ข+ผ๐่qี‰•`/wฅ4ti่JHŒ…€Fาะ5ยฉx[๎๊๕’u่๊ EA่" ]Iั‰์V“ืั:ๅ ๅบrห]nน+M 1 HCืSคโCเ‰E๓ถŸz.ธบT๛ศฒ5Yถฆ6ƒฤZh# ]JЁUnvbj=ซณ\กหบ๊{! ‹€4t]$E'Vt5tศ3ty†ซ™/มv2i่\]ษm‹n:p’ั=nญ2"i่าะ•'‘=คก๋แ(*1#ฐ๚ฦ์<ญV [nนห-wี9$๖B@i่บHŠNฌ่j่rห’+๔Xอ| ถ“ HC๏ไ๊Jn[$ฐf๑ASkFwU‘r—†ฎ<‰D@่! ]GQ‰ต‹ฒ™ม จ†-ทๅ–ป๊{! ‹€4t]$E'Vžu๎ูˆฎUชAKC—†ฎ:‡ฤ^่" ]Iั‰] ]žกห3๔XM| ถฃ HC๏่๒Jr["ฐชž"zR•\กหบ๊{! ‹€4t]$E'V๚๐ปžPZ^Š“—โ”'‘=คก๋แ(*1#0ฐtึฦะะใสaห^๎ฒ—ป๒$! ‡€4t=E%fไ ]OมไB@Œ@@บL‹Ž!P้ฯฮ$฿8เฃZ–”์งบS\#ฅ š๙ี์ืพž™ป๒oสX!าะ“Pๅฮ๑ฉ%‡N๋<ž'ุฝีฉส-wท๋ฌแ f|ห!‹hมฟN&:š€4๔Ž.o็&็9ฝ‡๑'˜qx[ณ”+๔V^กT๊Gˆ่ป]๔ƒษ3Wxm โ\ด™€4๔6@ืO€๘*6|เำ์Yฟex# ๆ7Oอ—๏ฯCด•=ว šh:Qห฿ฎ%ฎ]–ฮฏsโ‘„@ห HCo9rqุ(อg—Ÿสภว ่iิ>ฬ๑]ฦ8s9Kชa๚ˆฒถ็XA๓+b1ฦŒK2y๗ฆˆล%แP HCฏˆซ๐๚ญ7ขFŸ๑‡UtBดฝด7‡จy้ชc}“Sฃ(ƒd_HฅEQŒObบ HCืMT๔” ฒ๛D_`+‹…)@8ัฬน฿ำEิตซ‹ณ{ฑAQฟล}/็›ywaิyJ|B@…€4tzbซ•ภๆF~>€Cต ‡!ฦx0ฝ5ฟŽฌ๒P๒qา๔+h”Gว ๆ๛ˆ่หS๏œqฝผƒjIˆ †021ะMภsฒ๏๘๗ม ฬึญ’:&ใLnล๏Cา•lฐtฐป6๘;ปD.๐เ/ฟ4*ลL˜๙า•‘‹W คก+ภS5รœ้K ฬTSjฉ๕c cพ™[๑›–zธณีล์ฎพOืƒฐฤC}qx๔ g๕ไ\ ‡๔ฤ(k ตc HC๏ุาF71ฯ้จ]PN=สเ๒ซ%ำ๘y}{cm›๓d๛ั-Wญโ๔žBเslง^ึ(ฑK>ฮH๒๒รึ/ahษ_ยฐ“xx๚ฆพํบฦ๑—ม์๊‡OL‹.n๐'.–F^ษ*No/มŸะ‘vฎ฿ฒญ#ฏ๔กszๆ๚d[ฃ็B Iาะ›'f๕xtแถfยงA)ลณณ๋wฺศ` ัเYpัgv2v๙.ข—?mN8ษVEkƒ)8~.่Š0`็ป 7ิ&^*_เ"\% mDาะeb„Jภ+fำบฮ)ุ_๘ฦ”1พ˜ไMbBb๛ู๊ฒ™bศท}6๖Nj…฿&|<ลภง3ถ{Uถb"ฺB@z[ฐwพำJฑ๏mฤ~ฐF๛Mceฒงเ/„๙ึๆr๋บ!=k๙3cล(?‡@ต฿สณwNศ‹bsqํ8ูN6œ๚‹ช^าะ๕๒Lผš็ไ&3ฏ >ภˆ๛˜q5uีฎ6gฏ|4bฑ%:ร[ปW?=i6 šฟ๙ภษ2ฤฬ—˜4aูล ŠKB/! ]&„6•B๏ˆ๘โˆฝแ<ฏฌฦ•=sJิ–ฌ…J` hf๘๘มKu‘๘0p 8~ชํ‰€$!๐2าะeJ(Xใ๔พบ1€สb๚ๆ๏m๐']#/7้ƒฺjฅี…]|๐i  WํมขRw๊,yTำู๊ ฦ" },B๒๓Q T‹ฝŸaๆฏDำ๓ฬ๘i*…๏Oใ‘˜$ Vนู‰ฦz:Ž|œยn$U%žใ4ู^ฃุ๋$ ]'อi ฏ)๏ฎ  YmO›๐ 1]ผพถอOๅjผํี=€Š“=ยฦ@6tgc:เSใ>>}๖าUc•B dาะC‰๒žำ{(ภ?is~+™๑5ำvo–๕โmฎDฏvfผู‡๑9€Žhีv[Hณส“™S^ โR‡€4t™ u`'7พส๋/แ“u้8ฦ ี.žjฏ“~yQŒแญ„ู?„๙RmŠŸ ๘สิ฿๒99ษญMทญู[8วŸภฆพ“นOฒ!|อOu]ฺs่ฒ'ฺƒ8Ž,5Kf์V๓ป>ฮญ… (wch$๛ึศ‚’ภ:–€\กwli๕%Vqฌ๐=[้Sญ_‰€› ะS์า๕[ษศคจmjฎPซ}‹ฟไxbัผญวงž๛ €ฯต8๋็4?m—Š-๖+๎D@z‚Š=Zชk4ต–๊^ ฦ;[Š„๑wƒ๐qyNR๊‰wๆ-™ฑjฉ˜B>@7ํRเW>B@;iฺ่‘ฦOpU๖5)Ÿ–ุต…ั?ะ‚๔sๆทh ต๚WBเ?ผ‚๕nพเUญรB—ฆsฅณไึOŠ'i่Iฉ๔๒ฌ๔gg’Oฟj๑Yิ?๎ฎ๑ง'ฯ+WŽ_า`Ÿ๘ิ:ใหiู‡o\;๏อ*ฏo™Kqิ๑คกw|‰ทœ W่ˆƒ-ู]‹๔!yŽ˜เIแิซลพ}˜Ÿตl๓$ย]C์พ$O7๚ฆ]๚zธ>Dฝ“HC๏คjพ(—j฿ม๐~€• Gส-๖H’ึˆ6ํฎh8ฟ6TDL_4๓ฅก๚๑Ž! ฝcJ๙฿D*๋ํDX`bธ้๑oบŒ๑semyธ”E=š<'7X๗ €f…!ใ23๏žช๏าะ;ขŒMb๓nW+ljjŒ_ฅŸฯK๓oุช&ภ ีท฿r1a7/˜ถ{^„QHh =EะBe๑Œ7‘บ @Z—ๆH:D๘J:็~6Lข-โD ๊d฿ว ‡๙พ  +_'.kk HCo-๏ะผญY2cทกZ๊ฮฐ—ฆ1๑‡3น๒OCKD„…@L l~Ynq˜บˆO็\'ฆˆ$์ HCp+ไƒณฬ}ฃ๛w ผ&Dฯ๘พ๑ฎžน+}ˆดˆ5jaฦLฉเ•i!%ฒž :$3ง๔ป๔E6ฦคกวธxA่ผ๐่qี‰•[ผ#ฤTV†eฺ+๎ ั‡H Ž ฐบ˜•™–…๘{M ด฿ปtG“$ด†ฎ e{„<'๛+€ข๗'RฉฺASfฏ|0D"-:Šภฺๅ}ๆฦผŒภo)ฑG†ๅœ„่ฦTVzL „]-Z0ใ์ะR`{้๓แศ‹jHC[ล6ว๋ณวƒ้‡แ…O‡มพI๖ญแ๙e!ะ๙*Žu5๏ 'S^fฺๅฐพ0„ฒจ†F@zhhรฎ:ึ ฯอS›ผ'Eh,%แ.c=ฯœ๚ฎ๒š๐ฒe! มZuo[ฎ ซฉ3ใ;™ผ{r2hJ–ฃะุt+หำjติL ล๓ญƒ]ใrr/บ"š`Uว๚) ำqfพte(ฺ"าะcS*`•›˜ZGwุ3”ฐ wmš8c‡y‹^E_D…@‚ (_ฉ~l๋ ภ3Lป#N|๊าะc2˜AีขU0;ค๏ฅ๎ิŒ๔ฌๅฯ„ค/ฒB ๑”›๚่W๙]]o๎9tู‰PาะcRxฯฑ‚}œ?Nธ๔ท.ฃ๛ 9d%บข*^L`x๗o๙€ใt“!เSงญ}ํwื nmั‹>i่ัฏ*E๋Hb2ŒP ๘ืฦT๗ำg/]†พh !02ฯฑ~FS๐Sำv?,“G@zฤkพ้eบแœkจo ฝฝgฮญOFƒ„':Žภฆ๏+o"๐\ษ๛ '3v้[บuE/ฺคกGป>๐๋/B˜^*U{›์Y‘u6Ÿ้ช ฦŽrjปม๒V๙$„€4๔บR่ฟ?„ื๙ิ3ง,“m$ฐชž"๚=€u†มภ@—?ธว”นทญึฉ+Zั% =ขต๑œย๓‰0OŽ` ฌH & Tฯxฉ`ษูไ&%F4#ยM้œ{„NMัŠ.i่ฌMตุทฯ๏ฏ=<ย‰fฮ ใ‹‚๖PEP$‰ภๆ๓ิ—่ึš7๑ fฎผ€'Ÿ' =b~tแถžธU๐|wํกพfๆ3ต๋Š ZT ึ{™๐s-by!ๅ{O™ปโอบ"1าะ#Vชc}“SC๋—ฆํ‚ฎH ! ‘€W่]โ/h” ค๎4m๗š5E.bคกGจ โwยเC้๎ต๘€ฌ๒๚ดERอ<'๋”ำ,๛yำvฟคYSไ"D@zDŠม ทzbๅฏ ผFsHฯL{Mอ—ึฌ+rB@„D บlๆถIงฆhEƒ€4๔ิม+XwƒฐฏๆPไ%อ@ENดƒ€็๔~เำด๚๖้@sn้ญš"ึvาะ\ฯ้ภkc ืhฏฬผาใšuENเ…Gงช+ทะ๗–:ใ๏้็3{ัj-NG…H@zˆpว’่?x{ร๏๚—ฮgdO207=ว-Œๅ_~.„@<l[๑wญ;ษ1Ÿnๆห฿ˆ‰ฒาะ๋กาฏh]ฦ|๒vฺ.‡ฑŽ]g˜ข%„@ƒ*…๑U š6Y฿ฺCN[ิHดอRาะT€ล}–a๘%อ๎นv๏+๋อ5S9!ฺืง3ฎ5๓๎{#’ž„กH@บ"ภfฬ๙oํฎฎš>ำนฝ๋F๒๙ญ้นๅ`X๙!ะžZr่ด๎ฺเ฿คuฅGภAi cC+]!ŠNคกื J็ฐŠc}–€๓ujt–i—.ัซ)jB@D@ลษA ตลผ ทฺ}hฟปตiŠP[HCo1๖gƒ{6ข๋!อ/ยญL็,ธล้ˆ;! ฺ@ โXW๐>]ฎ ๘tฺv/าฅ':ํ! ฝล=ว Ž1a๏žœ๛š"%„@Œฌ*dงงˆภDaแฆtฮ=B‡–hดž€4๔0gU‹VฐœlO]๎ธ(cปฺOgำŸ่!ะUง๗$งaoม_^ฃeƒž™S๚]รzbะvาะ[P‚vxzlCmโ;ฬ[๔B ยB@Dœ€็Xฟัตื;ๅดํZOYย€4๔L ฯฑ‚gR{่rลDGfr%}๋Pu&:B Mn:p๏.5dL๓Sธ-…‘"ฺ>๏ลlL€แ31ึ3c=X๏๋77 7j๔, ำ.?ึฆด๊vซ…[65๓+–ี€ Œi่!—มsฒวt67„~3็ๆด้‰ˆ8ฏ฿z#1vaฆW<ํ ะ๖ ๎cถำผฏร+ˆ0p?3๛ภCLฉจ‹๎KฯZLV๋|>ซ%ย]fฮO‹–ˆดŒ€4๔Q{ฮ๐ณ๓7๊rS3๘ตำๆ”ƒฺไ#:†@uูฬm1X‹A{‚๙ต0ฐ7ฏำผ=rXผž&โ๛|ฆฟ๔7 —jk๕•ƒnvซษ๋(xIvg-‰2Ž2๓๎/ตh‰HKHCsฅhI mฟDธ<sO 1d‘กnใฦฝŒM{˜บใึ;จpAX^#.ตโ‹ธๆ๗uiฺฎถG…ญวŸ<าะCฌน็X๐&M.^‡ก]'ูทhา!ะ2โwRŠ็๚Lsๆ–9Ž–ฃวมดŒ‰]๎๊Zs่ฒ'ยฯsz๏xoฺLแLฎSZข>i่!1ฎ๖[y๖ฑXฃL=OฃžH P ญร ะ<0ฟ €ชณxŠ฿อภ ] ๋งุฅ๛uฅ ๙oฯ}ฆํj{dจ+Gั™€4๔f†็Xห๔i’ฏlจME–ฉiข)2กxbัผญวw=Ÿ๘(0ๆ†ขZ(IดO๔nฟ๒ูธพ'ฟโŸชaxŽu-ปHศNฅขjLb>i่!0๖œพ=ฏบค ๔‰ด]๚ฎ.=ับ ฟˆต˜วฬG๐^]บ ื๙+ƒฏๅฎ๎+›ฝ-๏๛๛w่เ(๋าuPl†4๔8Wœ์w ๔qMาฅŸหผ†ๆ฿Pำค'2B@™ภšล}ปืศ? „˜ฌ,(#x`\‘ฮ•–5zมึqฏl Dฦพ้ ญK5– ฎ‹€4๔บ0ี?h๕ู)xz\ใํฦฟ๓gา๙ฒฮ็๑๕'$#…ภ‹ ,๎ณ รถž-`ZH€๑ ๔ฎ!‡“็•ƒท็วhฝSศธึฬปrfL๊ํ ]3ฏh?0พฆY6ป—™.ษไK? A[$…ภจ ู ขฏj|/Dˆ7M€ฎ#บ _๙็ฑ$*Ž๕s]Bh(ตK๚ฐๅŒๅS~>าะ5ณฏ8ึฟxตfูห=B„ Ÿv•;ออบˆด@ล้ํ%ppE~จเˆB?๛Xษปwn)ฒ`—=๘6ทRพfๆ3ี…D!,าะ5’ญ-›’[”b`€@mจm๓y๛ฝฤ“ๅร+X๓A8[ใ> ษฺุl—LLอ—VŽไถ๊X71p˜†ึSwjzTถบีOวIHCืXRฟ8uG%ฝnT2ฐUว:oK#ฏV๔†6ฮ{๙ก*›—^GธLฺ._จCK4๔†ฎ‰้@ม~W(;?ีb…A_ฬุฅเฑ|„@C }ฏ#โK ฌ—Oผ ฌHN|๑f5ีbถฤL:ŽD}ฬดโงsฃ—†ฎฉถ•b๏นฤ%Mr*2%เฤดํฎ""ถษ ฐvyŸนqƒ> +Y'"ห.\;/ุอ*ฏ๗ }ณ@R-™หัชZ0†!" ]Uฯฑ‚ท?ฃ๔อ๕€ฯh๕‰OšpŠL T‹ึYฬ8ภถ-p'.ฺCเa"|"sฯฑ‚๎สGข2๐๓Œํพฟ=้ˆืัHCื0?<ง๗P€—hา.A ๓7ฆบพ2}๖า็ต‹‹`, Ty>๘ฒWcดƒMฐ๊#๘ท~๓ฟu ฌc/ู˜‰†wOแ €ฑ€ญ6เ‡MlG๐แ๚ไF‰ภ฿าเgฝ?ธqZฯแท?ซAK$4†ฎฆ็Xฟ๐n RกH/ฮ /ศ๖ฑกเ่ำ7๕mื=ฮ3Mะ/ ๔adเ?`0=ศ†๑เะF<ฐแ+žึ™ำ*7;ฑ{ฝบVรฎม"Aปดภปmn:]ถB+๘ข|aQ0gๆKWช ‰‚NาะiV—อ–kkeZdN เำ.้y–ึขจล:J1๛!b๚F\nฏ๐G๎"โป|ค˜ษญะ๒–ถ:ษM ซ‹ู]ด'3ฟl์๐ž ์ู™W๗#Pcพีฬ—g่โ):zHCWไX-f฿ฯLฑฺฝม‹บิ้Sๆฎx@1}18ส’์๋ฉF—่pจOƒฑDw๚์ฉ'_C„c5ด๊อ3w๖Sต7ม|ืฎcMฅjปO™ฝ๒มธึชใ–†ฎXีชำ{#ƒP”i‹9 ฅบฯ“็๋mมชำg๚gง ็k<$Hgผ/€แ‚h9 ^fฮqตLจ3HZมฉg3อxoš‘ั`๚ข™/-ˆL<ศว๊”บ ฯูR๋(ึ/†ฯืA|V&Wi‰หภHจ8ฝง๘(‚ฦภp=Kงฺ๎-‘Rp’;a(u4ˆ่€ดR๖ำvwiฅC๑5:นBW˜!ž“= ๋$ขdz; œุษWKQ‚F,›ืo1๏†~šOt ]ตgเMไขี$h๎T3ห ๗ฤyW>ƒ้-m9ซ˜ˆีE@z]˜Fไ9ึBG+HDฮ”—ฃ+uŽ์ืนาl1 U‹fผึH—6~๖u(9zDธŽ|Z(่๋ใ์าg€๐k๋ณŠศ(ฦฬผ{BDขI|าะ›œ.<`ยึทZี่[ญŒแฐ/๙๑ว™IŸ`Z“!้4๓ˆ้tพt…NQัาK`อโƒฆ๚F๗8UฏrSjทƒ๑M3๏_rๅำ$M{้ำ)W๎q๘xฆํfโhb”†d•ซ…์\&Zิค๙‹อึื&pฯ4ซ;น๑ซฑแ8€]5hซI0๎๑ว๗ฬu๏RkxแัใVOชœ]‚uอืื.ซ็ln :]kU!;=œ ข`K6}ษ้๒ใ•ไๅถ{tfฃ4๔&kแฌ+@8ฑI๓šš9๗˜—๋x…0๑9‘ุอ‹ฐฐF|ฮด9ๅ)็+J*…๑vTR3~’ˆพ™ข๎๏o;gIUMJฌว"๐๏mฅใ“ ์;ึุถœq™™wฯh‹oq๚าะ›œžc=ชใชO8ผ'็ผฅ0<'{`|>K^˜~‚ฎกฯ›ณWนหง…ชNoŽม_ฐO พU…ˆฯM็ส฿kc ‰u]-Z63.†7ฐ‰า็!ำvƒ๓ไำfาะ›(@ตุทณOฆ/7Ycฺ๎ิzt*๋p"k>฿Tฯ๘Pว0พ—bใ"ู˜&Tสรโ^1{<˜NฐW๘ถ์!8 6ธแBูฟปUุไ{๓œ๎าด้V+๛ฆs+๎m?dG ฝ‰๚W ึูD~กT??0m7xFV๗งZฬพ‡™พ`๛บBศ7แk้\๙ถะ\$Px๓ฆ0'่ไถื™้'์ใฬผาใ ,EdS^ส0q๚,€ญ#่L=/q$:i่M”฿sฌ ุ„้KL ฬ&O,šท๕ธิ๓Ÿ'๐งUcะcฯม>—šv๙z=zษTYoํหŒ3™๑v bH.W]ํฎฤ่ƒwบบเโโธvFส ?e์า[ฺƒ๘~ๅ *a2ี7fง๘ใiตP›ถซ๔bำฆ๕ซตo4KC<:$ž๐qบb’}๋€ม$hT์๛˜่`ผ3๙ฎaโำe็ภTขVฒo๒‰~ ใผ๓พd( ฅvIถ‘fํลN€\ก7ศฐR์}1ชAณ†ำฅฆ]๚”บPqฒG(ุ!L้ ‚ŽXซAืpUฺ.๕๊v†ฺ๊B๏Œ๘"ฬ•uผN๙๘i๙๒SA9yYT‹ู™้+๊z7G'!&œ–ษน฿ิฉ)Z†/xN๖2`๘%%ฅOl๕ไสe%‘฿†7ž}F—ฆ&ี9๛ฦตๆาš4c)ใญูฬcFฐiH”NแZCฤงฆsๅซc V‚~ ตห๛ฬมEญพ <ฆI็สQ>ีฏใgŠ4๔K์ญ?€๑ึอ^>|iปกผศ2Pด๖HฟหL–bŒa˜?ฆ~€ำธิ๒N฿^6xนmจถั‚๙`ไžมค0 *jะ]ใOLžWฎ(๊ˆyฤlฺu฿oๅ2ทฺžl’1‰ GzฅึuบZpyฦ.ึ€๋†‡nZฟN—E๋6+าธภR6๘<~๗?kœW๛ฬog`ู’2าy€งฆsฎำ๐ไƒX๐œำb+N฿#-ฺู7=คก7ภ+d็€H๙™03Ÿ’ษ—/oภuSCtณ[mปฮ8—ม็4%ะr#๚3ภแทLtท_๓‰๊๓ง–:-ๅo3ใฟƒyxีCTž…Vนu >?c—ƒใUๅ“ร[ษ’qYุ{ฤ3pQฦv#ฒ๚&!ล}Qšาะจyตh]ภŒณ0qhอเืถrีเสฑ^VฑUco‡=bเa˜e‚g0{lภcีฎ<ฌUงพซผFW|มsศก๕ตํู0v`๒ท7`lฯฬฏแ๕`ผก/ฉๆฦภ5l }ชgฮญมj๙$ภjว:ฤ฿tuอฅ?ึ๎{ำ.๏฿œถXฉ†AฯฑnpP&# }ภดW+j4e>|  ่› t๒6kX ๐jญ†ีd ๊๛La"ƒถูtJk˜ฤรŸ“›Y#๚›๏ำษ=sWธ‘ Qk)อทแฟ`Šnว๒]7ั๚๕คกืฯ žcญฐU&ฏสภw3ถ๛  U[ฯฑ> ฿i˜ ช%๖‘&ฐš d)Qคkิถเึ.สfปŒฏ€Y๋yๆฤฏชฃห~ เํษO}ž@ืฅ):m#ฐ หปวัW&ฯ\แต- qK›€ žฏซ๚Bธหฬน๛ลBฬƒ–†^gซŽ๕MNญs๘ˆร๘cฺvUืฐซ„ฐE๊อ3wๆTํ๓ |@W(ND4 l ฦ๗ปบฯŸ>{้ชPœˆh"่๘;ภฏMเmใพ 5Ž—†^gีtศB„หำ9๗”:]ถeุช์kRพ๑ฅฐ—ทด%นNtJ๔Cย9 ญ‹๚œชลc™๙Uฯd`nzŽ[Pี๛ฦHCฏƒ/X`T๗ฟ%ุH้%2>”ฑซ๊pู๖!ซ ฝป๘เ“Aรง8Ei›าถณ‰@O€้5๘WDu~IMุผฤ๕_M˜พิ„้‹fพด@YG" ฝ\žำท'เตŽกฃฉีjฏ›6oๅช๊ดฺ~๓๛มหsooตo๑๗_ม^ู>Œoerฅ…‹‹@ลฑ‚ว6=jอo4ํ๒‘aล(บ#PซYBจzk>ชg}ฏ5mw8#>ขt วe/๚8๓ )๖gภ๘iอฏ};Ž_Cb"ฒ!๐ ึb๒Š.ฺถ฿†bฑ6—†^G๙<ว๚<€`/d…Mปหฺ^ž๔๐™๐[ัGผOรA5 L;ุ”๙V๑ณž[w๕N๓์ !ะ•b๏นฤ%UgQ[ัฃšO์ฅกืQ%ฯ้ฝV๙%ฑ}ฆด้%::qฐ%ช|š%@ธ ฬื๚]ื๖บ์‰feฤNจ๐ }ณ@Ra[ŸL๚‘ษส †^0ฏ` ยพu ๒ๆœ™/๗+iDxuฟตoองc œ๗ฝKฤรHx๔7€ฏญ|m+๗๗H๒F T—อ–kส็"ำI้|้Šฆุฑ!ICฏฃดžcฝ ๚†ปฑง๊<<คŽฐ:ฤ[๛N8’^ฟนญมDฯ๙oมผHั/ฆฮq๏‰^xQา xŽ๕wีใฃฐอuา๊( }ŒŠฏY2cทZ-๕€โฤxาด5bkL์๔ oฬ@/3,{ฦ6™ฦ๗ ธ›ม%"r‡ถโ•ฒแFใลขต*N๖fอS๒Jธรฬนมฑย๒ii่c€๖Šึl0”n•หานro‹jy7มYโใ†g2! ฦส3ข•๑zใ6จœZ๏ปIบ3ญRl9oษŒม๎ืคศ฿ž‰ฆ1mฦ*†?ภŒวz๒ๅ?ฤ%—0โ๔ ึE œฅจฌiปvrก"‘อฅกธ๊๔žฤเ๏จิมWd์๒I*nฌ๕g๒_O>ํยเ_ด_ฒcะŸ1ˆb๗MฑK๗wzญโ˜_ีฑdฆYLœ%เ-&ี‘ว?๘ฝAผธซ;ต,I๛ใ{ล์๑`๚aŒF’JีvŸ2{ๅƒช:b_i่cpชฒ_!ขฯิ‡s ฃgšy๗kJ 5๔ข๏;ผ#1vแUvฐkˆXึ๔ภ๐8€วภ๔(ื˜๎๏ษฏ๘gˆพEZ‘ภภMN2บฦ๐\fPฟR$EŒ%5ƒ~ู3ง๔Gล#m>ธM5H6xVfNyนชŽุืG@๚X ฑฎฆ`ฝตยG๖5V€7†้ชBvบaคv"Sต๑ Oภ‹ืODใHฐม'ฌ7ภ|`ƒcƒ`6ิ˜Ÿ๏๒ฑ&5>๕h’ฎฦยซN๋•7ฟก}&6ค4%ฤ–ผภดหฟ ัGค‡WNั€rฬ5๓eๅ+}ๅ8" }ŒB{Žu+€ƒTๆC\ท|UษYl…@ซ TŠูำ%ญ<{€kฦี๘ดษ๓ส•V็ถ?ฯฑžQพณมe3_\ุฑŠ&าะวn่ฉฎฉ6mW8หoœ‰@p—ฆหภ5ฬฌ hวg ˆ>fๆJ7ดรyX>='{@จ่แg้œ๛A ฑญŸ€4šQX1ƒชEซฆ๘ล็ฆํพพ’ศH! ๊%์wภš€žzmยG—ฆŸ3?M๓ofฤใzฏ๑‡•aพีฬ—g(iˆqคก‚ชฒจ๗U”โว๊ฆ9ยภฟQ{sฺvWั[! ^I ZฬžศL‘ฺ‰Œ€2cซรMปธ6๎5ำs†6m7ฬ—WใŽYkาะGม9ะ฿๛ร็ปTˆ๘iปผ #! 4พsๆX—‚๐4ศ…!๑OJอJถ‘0ฤ[ฅ้9ฝ๘๛Š8sSD`E1ฏƒ€4๔ัฎะ๛ณ3ษงeupโŸ›ฑห็ซhˆญ%เ9ูห:=โLํ2ฦฝi9Kชs‹แU ูนLดH5~ƒiืฉ๙ารช:b?6i่ฃ0๒œ์1]76ฦQFศฒ %|b,^L ZฬพŸ™~*ทฅŸหdใ๚L}`ฑ๕Vร€๒Žy#ฯ• IDATำ!S๓ฅ•1ฉYฌร”†>J๙t์'kะc๛!มGˆ@ฅฟw๒๙ฮ…4v(Œหฬผ{ฦุฃ7"X="zR52bผ/wฏQี๛ฑ HC๕ ึOž76ฦ-`2๖ฯไV^ECl…@า ๏ึ=๎w s\ฟิkZๅƒฟ๒๐ษŒ]๚V(pE๔%คกฺะu<ซใLปฌ๔ฆผฬY!t^มบ„ccสaอะ ๑๚ํ_๑tโ๗๋ ซฤอDŸหไJ_Vั๚HC…Sตh]ลŒิ‡rไQ้ikวั~w ชhˆญH2ฏะ7 ไ/7บฮดKฑ๛BRuฌปxำa6 พฤดหช'ท)๘OŽฉ4๔ั๚ฏ™กฒ†ผjฺฎ™œ้$™ ฝžZr่6ตมร๒ฤ๛C˜cๆ%qJยsฌ[1ๆ˜ถ๛1E 1ฏƒ€4๔Q yŽ๒Zว- กฟ™viฯๆํลR$›@ีษ~šA_ํ 1mw๏8ๅโญ๋ม˜ฏ3aก™sQาใบHCญกฒ+Atp]$Gฤฟ1ํ๒;›ทK!\.<`ย„‰[=โถฎแn฿วใ†เ ’มุ‰lhิ™sfพšพfaฯฑ~เ8%YBฟ™ssJb\i่ฃ_กoง๏Wษฑ›ฮ•{›ต;!d^ั๚฿ำอ ุฝัgบ:“wG\ท๙ึw๔)€฿ ื?/3ํฒย]?ฝัŒฅVuฒ฿bะ)c็raฃฦฏ~ki่ฃ7๔ฟxc8_>’‹ฆ]V}ิผ{ฑ1&เ)฿!{i๒มQงตAใŒF6ฏณ๏aฆเKลd](SMŸฝt•.ฝ0u*Žu!ฃๆC=ช๑ซ฿Z๚h ฝ`=ยn๕ใ|้H:m—ีฌฝุ ค๐œ์Ž{ก๋๘ล œ›ฑ šแ9P่{A~ฐ๔ฮอุฟโk~Œึe{ล์ภด@1๏'MAQCฬ๋  ใ—ฅ7๑ขพ3žKUโY-‰บ“T‹ึ'˜qนŽœ™ด]พPEห[2c'ิR๗˜ชขณูvฉiปณ5่„.แ9ฝมc‡‹ญ1mW7ล0:฿\๚hW่Žl1ญ้iภ๔3_๚Hำ๖b(Jภ+XฟแH๕๔๕}ฉฎ๖๗ฬพฏcO๒uฆํnญž[๘ :ถฟ๐ŒiปSยVSo1dœ@ี้อ1ุQeแC;๔ฬนU๙ดฐ—วแญ"sTโ#เ[iคŠF+l+…๑UŠพคก+ฌื\๚่ =8Tฅ้-' t~ฺ.[o1dœ@ล้=…ภjงs๎2snำ{HŒV=/์ลcIซW่ˆฌ8/e lE€๕šKCฝกหfvชๆ+วัฅฆ]๚T๓๖b)’G ZดฮbฦEŠ™ุด]ๅg#ล ๅ\vฦ=f}“bŽก›{N๏Gพข#yห]`ฝๆาะGo่ซฌ= vคJๅS๋-†ŒBจฌณ‰ะิš๑๐c\lๆ]ล QFฎ†ฆ็่O™ถซt,i+ๆŠžปXmฺnบ๑&‡4๔Qf@ลฑEภซ&‰œ2คOL“I โXŸ%เ|•์|nฦ.+iŒๆ฿s,V‰/ฐ5m7๒+N๏ฉฆbฎrห]`ฝๆ‘ŸP๕&ฦ8ฯฑ‚$๖iV›?K็6k/vB ‰ผB๖4}]-๗๐V˜ฌ]gn๔+//่๐[ƒเฅ_ž5mWvฒjฌถlํฌ3@ธ๔ล#Fอsd)i่a่eบาะGํญมhด496ฐEำXtฏ˜=L?TหI฿†2/ฃZ˜ฑ7S๊^ต๘๐„iปMฟpซ่ปn๓ชcJป์๑ศตn(( }ด†๎Xม1‡Moั({นGxๆKh‘%เฌwƒ๐ ฅšyww%-{…์ ๚ขv,ฮFื๑๘@,rUฌg$ฬฅกึะีทŸ\bฺฎาzีHฬ Bดภ๊~k_฿วช.}ๆท๕ไหPีyนฝ็X+(‹—/๛^มบ„ณ”๎0s๎Jb\i่ฃ`ช:ึOh๚8ๅดํZuUB !0L€๘*ึฏWฦม๘‘™wOPึy‘@uqv/6่ฯชšD๘J:็~VU'l{ฯฑ~เ8E?ŽiปyE 1ฏƒ€4๔ัzั๚63Nฎƒใ–†iฺ๎;์ลT$’€็Xฐ‡j๒\ฃ3๓Jซ๊ŸฝŽซ๓a-ข๙fฎtƒฎธยาฉ:ฝ72๘}fบ:“/}@ECl๋# }ด†๎X็3ะ๔ทh๎ฯุ๎k๊+…Œ‰ภ*7;1๕ผฑRผปฒOƒxภD Ÿ๙ ƒŒG@Ÿk๏้9๖g…d x๋ NTฮDใ‹ฉีb๖ฬ๔3ๅ˜t32:ัำHžc`†Šพ์วกBฏ1[i่ฃ๐าฐ|ๆำvทiฌ$2บRด๚ศวl"๔1๐–†ˆ0๎ัr0-1๓+–5d+ƒ#Cภ+X๓Aธ^G@Dtv:W๚ชŠึš%3vซี†฿lŸจข3l“]โ‚P=ว๚ €7*ๅฬe3_œ’†ืE@๚hW่ลc™๙šบHnaPmOšf•ŸSัH‚ํ๐!เc@9ะuV๔:.3]—ษ—ด\Y%กQศq๕ู)xZญ-ฦef=ฃฝb6k0]งr”๒ห~ษดฯ7Kซm<ว ท™ฎๆ—ฮ2ํา%jb]i่ฃPช8ฝฝhm๚“๒WO™ปโฆ:ฐZ์=ฬ็1๒ฃ ๚_"ผtฎ|u‡#ํ˜๔ชE๋ืฬ8\cB๗๚ฬวื๛ๆ๛‹ๆm=>๕์๙ฎ1๘„ื๗ไ่ิ KKวŽx >มฬ•ƒ—๋ไ2i่ฃึ๒F+๏4s+~rc'?P่{ฺAtp‹ƒ_YซีN˜6oๅถุฏธk€ฮ๎/vอเ+@ฉgr+~?RHk4ต–๊>๚฿;ป- wฟ๕Ok๏˜รn:p’ั=nํ˜วภ„wgr๎ฏTuฤ~lาะGa๔ฌspฯFtญใ–G0๘]ปkNณi;ษV็ศ S3v้ญ๖+#เ9–าIcx{€@๗‚8x ~ญ๏ำN—0ฟdั{ำนาตQhฯ่Uูืค|Rโหd์ฟฅ/OํษฌsฝJCฅถฬ jัชถE๓XSƒ˜NJ็KWŒ5. ?tแถž8แ*€ŠBพม^๛Syซ’]…x$†Wจฒ'QG|๑"เ_Swศด`‡Z{…พY ฉjฌCƒฦ๔ํ_๑ดชŽุM@๚Œ4ผ๒ำvฯป=โ้›๚ถ๋๎๖†฿Zห5ž;y^๙‡m„๏Z<ิCภsฌG์Xฯุ(‰ำีyภQำY่ฑ8U.ส๓ฆ‘ุคกะ เํ@}๑ุเy]ฦ.Ÿิฌ}'ุm~a €"šฯฃ>3{๒+ั๘–็d†฿2๑‡oฺๅใ”@UqŽอนรดืว)๏8ว* }์†!9Fกศ+Lฉ`kำU…์๔QฐŸvิO–z(ๅพeส๔-•Šuๅขผ๒ษ‡mN'Žฯ‘=ง๗Z€฿ฃˆNฮณPุˆน4๔1hU๋Bงจ/๛ธiปฑฟ]ุL›๖ไ^w+@okฦพ 6ทฅŸหdi ม{๒‰ƒท'ฟ๋z"V}กฮ0s๎e๕ Žฮ(ฏ` ‚๊]…˜ถ๛ฑ่dีู‘HCฃพUง๗$Ge ฆบ'NŸฝ๔y8ฺVœ์w ๔๑8ล—C3โฤTWฌ^ฑ๏ฐ‡.ฝ้\iฺฎ๊แ&- ๕ฅn<ว ^d›ฆโœs2ถ{Š†ุึO@๚X ฝ฿สณล๕#}ๅHตทLตWIE#nถ•‚u8^ฒ\U– ดLูฉ๙าสึนO๕จณ"ฆŸิ;พอใnOO[kั~w ถ9ކ๋:๑Žˆ? ›95Œฟii่c ซ,ž๑2R๗5Mxุ฿cฺe-๛Rซลั๋อฯอƒฐ&ทฦฃv/O๚ƒ๗ƒ^ดsี"่ฌฏ๐ดˆ…'๒hw฿ืีžำท'เU ƒงsๅิtฤบ^าะว ตi๛ว็To—'j้šŽ#๋ภa“ี a‘ีฃ๋ฌo€๐I=jฺU๎ฃกT.}ุ๒Gด+ทHPื.}q9UฎEXCw# ฝฤžc=`—:†Ž8„Ÿgl๗อฺวษฎZศฮeขEqŠyKฑฦ๑อไNเ^oีข๕ f\^๏๘–Œc,งqฉฃาณ–?ำ!9ฉญ ˜qถขSฆํ๊:W1 N7—†^G…='๋”ซc่ศCbดsำ9n6๔+ธMทงชND์ำv๓‰Eย@ตฟ๏`๖˜ํฤ—ค—t\v‚—๒฿ผ@œฑฬปณฺ_—ไD ฝŽZ{๋"ฮชc่–†”ฑซฺ’DBJCฏฃ๐…์~ัˆG-ึa><คำ—oฌ]”อ ฆh ^uŒ๓8b,Wegxำิ9๎=ช:b_?i่uฐzjษกtืŸซc่‡แ๒tฮ=EE#สถ:6เyQ~คกิฌf฿พหด -{HศNฅb”๙Kl/%0ุ7ึN`ฆ๋šžaเš”๏urฃ๒œ์™]ข:งL•ข ฑA{^'0ฏ`=ยnuiุฆํพCม>าฆ'{3ๆirม๔ฆฉ๙RpvำŸี…]|โ`3ŸฉM‹Ÿ!ใ›f=MYGฺB า฿ป?๙œx@4๗@๐R์ฒด]๊ˆีcๅ_)๔ŒˆUWๅีดฝฦ๒%?ืK@z<=วZเ่:‡8ฌ“ฟฑzŽตภ$>-ƒ๚2vฉคชุWœ^ฏะ ๕ำv๗ึ # PYิ๛ชTjhZอ0Lƒ1A=ฬ˜@ฬซ|ƒV‘งปบjซฆฬ^๙`ยmy^ม๚›๚] บฮดKวถ<๘„;”†^็จ:V๐ยี…uqXงฎk|tง ›a[ฦฏฬผ๛ne ฺ่ไ&ๅฆๅ$6•ญ(xึ9ธg#บVฉฦFเฯคํฒา฿Kี’h/ ฝฮช,๎ณ รWบr$ยษ้œซtะKแถt˜็Xมแ?RuฦJ=[Xฟิ([XชX์#O@ืqิ่ ญ‘‡ƒฅกืYค๊ฒ™๒`mMรGFภUiŠFm='{1@ŸRŒ-ดฺžc๛สฟN)>ๆš๙๒•4ฤXDœ@ลฑพCภIŠaฎOOเId•‡uฤผAาะฆl‰๑w3๏พก—ฑ๊9ึuŽฉ7ุ‘N]#ะ๙iปtnฝŒ๓œK>ณ›—•cUU่‰m\xŽ๕oT‰—ˆtฎซข!ถอ†ทŠc]Mภ๛0yลPJํา์r,ฟaฺjyNM|‚™++฿ถ)Oฏ˜=LชWืฑ=ื:ฬฺ‹v็xฆvzศ฿่iศ่Kฆํ~^ƒŽH4H@zภ*N๏ฉf&ฏฺทn+N๏"ฯUแยŒ#2y๗&-ูV๚ณ3ษงeJฺŒ‚™w•rT๒/ฦB dž“=  Tฐมณ2sสสำจฦ‘D{i่ TฝZ˜ฑ7S๊LFJฟ0ํ’า๒75๚ญซE๋ืฬ8\E9ฬอ[*ลพท๛ฟS‰ภJำvQิs!Yีข๕mfœฌเ†ฺฤmv˜ท่Uฑoœ€4๔™yŽ์qฌฒ่Zำvทmะmค‡W๋็ผW%H&H&W‰Šฦ–lซN๏<ม*|ธhฺeญ๛ิ+#ฆB@;M'%v๔Zฺกk”† Pฯฑ~@mญดOšsKw4่:ฒรฝ‚uJ˜0ใณ™ผ๛•0’ฌz{g&GUํ๑ฉž™,d›ฎž@ล ~Šฌฒ#aาีูฆซ"K* ่ำ‡ ่c…'‘EADA|\@@ฒ™้๊$0]M FQAT“™๎ž“mfบฮฃ&อ Iฆง๋Vwu๗ฉ๏ห๘ๆ๙Ÿ฿ํฬ้ฎพuฯyL|ปGํtำvปบษ%jŽ@w*๚n‡ษ๓A: 1ํ/ื *IH ๚0Jั™ๅ5ตi$oลฎa๐ๅรDนใ๐ดnฺำ9ใ้ ๑•NlY๚ดgQš d“ั๋ˆ่2ฏd็ฎW‚2?จ฿๔บC=.ัMปอฃ†L๗@@ z ๐Tlซฅ่ฏฺั‘ใ6‘’wๅ๑ไๆxๆo%,ห;ฆธM8(ฤ/้Uฯ้๋ื2๗ฉทผ๊ศ|!4๙T์ฃฬ|ŸW_ฬ…H"s›W™_:)่%ฐห&cg๑%LืตGฬดŠvฃ^l(››ณŒ'ด*|:ผj๊ ด`ใE‹”OEž~ฃ›๖ั^ศ\!Tร=้qWy8Z-mหิ<๋ม—๔Vyํ’™ }žฟ๋ญฅO}นT๔J0-(็;ฆจ86›Š]AฬWซ๐รฬ฿Œ$2_Qก%B hr–แyใั—oฝ<๚ชซ้RะK\๎œeภQ%N˜ๆ็ณื^|•27฿=˜5zพ”นป˜๓0๒3บ™r๛ฌ} ~๘ฟร9[~(qวมQ-ณํg†'?ีF@ษ)Š๎๏2yำˆฅ—‚^โ2ไ’ัซAไญ™aฑทใ%Zดผe…๗*4๖:i8/f'‹ัฬฆŒSˆq €}Š_ไ˜Wtำ~O‘ce˜จ*นd์ว >วซii/์• š๙RะKไ˜k}?Uโ๔Mkะš๔๑mKฆUๅ-ใRฎ๗!‘4˜ตpวO์mm{๛็่,E฿—?๛ \1ํo๘“H Šฬฺ `ฌG#yดฝœž้1ผL฿F@ บ‡ืBฮ2วฮผใZCอZ{ฦฟฎเ๛ธญŠ๛ h ภ}ํ`ข‡%j๊m ๏S+•ฌผพไSั3™้ง^ณfฆ{"‰ด๛†Zฎ ‚๎aT<พฦzยžแมF ฆ*:5.9๐ฐi{=/นˆ !ฐ#œeธั<ŸฮH “ยfz‘ฎ<)่ึ kE?B Gู‹FะๆๆO;‘gYะ|ํฮ“vL$แ$—จIyหธ‹ณฝ'Gฅ›iฯ-Wฝ๛…mค +x-d“ฑŸ๑™ฅœ๓ป&&2k=๊jz61$Pฆve†hพO?X^ลค(ภ‹g…๛^๗ุ๊†ฆo?ลiB^cอๅ]udบBRะภฬ%งอ9KH]ฅ›๖• t%‘ณb฿๘‹2ตƒyD-ศซ#TPีใ€…ใ๖GT๙5ค ซแˆœeธ-?ƒNJพG@"ฆํ็A)%{๓:1—Œ-q ฿ฌแาp–ืeพ:œeฌ0ษซOŸ13zี‘๙j HAWฤS]ท1žง›™Ÿ+ฒ(U'S)Lj#3>Iุ)ิ)!H9+z:@๗+0ทZ7m•X —€tEฏƒœ ืถสฑดหtำžชศVเdบฺ#5 n}W…อ ะบู๑b…}Hx!Pน”๑๖Œ_$์๋ผ๊ศ|๕ค +dšK)0ฺ;Kฯ:xsQ4าข๏(x~ฟด ‰๎ธhธญYK &ณ„@ๅ t[ญr0๐ฉ็ซฑI‹Œ›‘๓,$ส HAWˆ4—4Nม๛ํ๒kซบ+ฤy+g๐ํ^๗ น„๎ซฝiผ@ฤ็…ใ™Cฮ‘B †ไ,ร}าไ R๚‘nฺŸR #>‚ฎjึ2: h๑*๋88ชeถŒWj˜ŸOE“™&(๛๚ไ๗ฯฤ๘z8a฿็“พศ ภ่NE฿ํ0ฝชย ฦกๆDๆYZขกž€tลL๓)ใฬ๘ŠWYตGฬ๔ฏ:ี2Ÿฮ uหถฑƒsธy7y๔พ€‡ ฤ?Žดež$Œ๎QTฆ j#Kw‚เS5๓r=‘iญถ๋ษฏtลซล๔ธก๐wฒ๕๚nธำŽŽกอ#Fh๏มพ็ปj‚ำรภ:b^าViษ›ฃ ห'™อ*ึA4„@ต่Nฦ๖wˆGj\ต๛Ž8‚๎ร2ไSฦฃฬ˜ซ@๚!ดOS Snกo๊uZ๚ฦˆ)TภบๆDZษ›งš$I(๛tศฃjU๐๊’‚๎ร"ๅRฦ,0ซ‘ึ’Gซิ!POT~wNเหยfๆ๚zโWนJA๗iีฒ–๑2๏๑.O๗๋f๚ฃuDAz" ๐ำywa๏7ัศ๔ิฟjฬU บOซ–ณขtฃ yr๘๐์ฬTh‰†ตO`p/ฯ_4aƒฏˆ˜™kkŸZ๕g(ง5\฿>ฅน 5บ'วํแ5แx&ๆUGๆืuํำhะpจp3@เI€6pF‘รฬš{@ศ๋็e@{^ๅฌj62๋๋‹Tํe›ณŒOŒxฝบพ[ๆ>๕–W!™๏?)่>2ฮ&ฃืัe*B!Ž– -ัจ]๙”aฒƒ“ˆ0ƒJส”ฑ „ลh.๛]I2ฉbบ“ัร"๗T8ฟ฿ฟฆ›๖ีKF‹€ŠVภz์qฺขุีใVรม๑็๐ชฉา‚ฮp&ษฺุ'เ๖`h็๙"ๅgไ? ~จทฅ๏ฌ}’ต‘aฮ2žpฌ‚lึF๑พ๒น’e’‚๎3่œปเ‹”„aดžศศ/V%0ซ_ค'k‹F_ใ‚ฯb[ท๕็bƒˆพ6ำท๚@TUPุQ ^ ว3_WแK4สC@ บฯœ?ฅฏVp๒™๋ดณ/ิxภ^ณ–n๐ูถศœภเฆK๗VจŠป?Egห ฿iภๅa3*z’ , ถโ#๒ุ์๑๊น฿9€žย(ž$Ÿฮหฒtส‚HAW†rืBนคq ( %๏š•`ฌV‘l{๋ค…พ`Zes ๏่f๚ฟ*๋AขoO g_{๛{sUŸจฏาM๛J!\]ค —aฝบŸ8Isึ( ตฑฟO;`ฯน๋้‰L•่JEฯะ˜~`t ,~้8ฺน-‰Ž?ยO›ศ.Šฝ‹Bบ  tq_๏{dgป šๅี‚^&JฟKะM๛Œ2Y—0 O—0ใ†XูัBงใ8gดฬ~า ทบฑ”K๗ ไ*b๚l8‘AภซกDฅ —i1๓Oฯ}M[ฦซษ„S#q๛aZขl9หpoฃบทSƒzˆp’U‰–ˆŽ@ฮŠ}เ œฑ]" โวอสผT-žซg>eสŒฯ+สราM;กHKd*@@ z™ก็Sฦ็˜q›ชฐDัpทฟฏDLD*J@ z…๐็ฃณFฯ+ VศัŸ0ปใ…š"Uf9หX`f™ร๚Ž.ัอ๔>‰ืฅ์เฦฺgผ_€งuำ>^‘–ศT˜€๔ .@.i„ThแูpOไXš`ฏBM‘*ึงŒd™ย๙†Nฝ'ฒ7อฐเ{ฐ: Kw‚๐)E้๖Cรแz›‚"=‘ฉ0)่\€๕ํSš ZฃปAnœ:t‹nฆฟคNO”สE gE-€โ~ฤ#b›™V†—ธภyhi“ฮกŠ๘ q™๑ลHยฎฺ๕ฆ™MลN&fe‡Iฺ่ฐ™พข8ึrพRะ+ผบนคq!7ฉดA„„œฺฅ’จZ๙d๋!Lก็TGbเ†>โ'ล3๎ว]^]–1Us7โ1NQ์แ9ดSฌYwrnฯ{€^0VM๒๔—p~ฐอSC3(*Rะ+ผlG๒›ศNLๅ#J๋>D73Jš5TQ]„ฯงŒo0ใ+ช’e Mะฮือท}ๅ๓ษก.zา‰๘ฤp<ฃ๊yiUถชFgเwฤFz „cT™fฦq‘„+Uzข RะฐV๋‡„~ คฬcU๘ืSง eš"ไฌeผDŠ6:1pWฤดฯ)ีlถ}๚*< ฦJีุ~žบฉูL_ฌBซ5Tฟูัz<้zdY๋9KAศ +๎ฦถ5+ๆk๔Dๆฟ’ขุุbใ 8๘ƒ@ŠŽฮ.Š~€4ZBDฏuำVyJฅ๊|Œั=1Pี๏๊uิzxฦoTq9ช^$ร‰)cwB`เไง1#ฃ๏V ˆะฆวm๗Q(นJ@ีqภฬXภ}‡จ: $›2N!E m้ํฟ๗IO# KH[ษ่^!ขgT5^q“ts[ไภŸ@ฎท SRะUPTคัŒต:ฤO*’&๓†C8ถ%nIฑฎศ)"ทŒป8ซŸ‰g~โUg๛๙๊๚l๓<ฬ\ฅทZึb+>"อซช0ฯ;tำŒB=‘ )่[œeภนŠmฝBก#ไ6›bชŠไ๒–๑;๗$วxIOุzาุษไ์ขุ1b›งฎาM๛JีjU/g๎›uฏพ9š?8ูศlฎUf’—บ๏e„ฅ"9+>ุ์ถŸœคHr›LGxีิ™ฒIN1UrYหุ@ภhRW๊ฆ}•GNฯYัUํM›๏ือฬGฝiิว์l*v1_ญ0~ …cšอeฟSจ)R$ Ÿะธ(]ฉhTcฒ•[cฌ'lU ”ซGม5‹ข‘!Rะ)ือŒฒ๓ฝท_‹lส๘yคn…nฺ'ึใ'็|2:›‰ฆ6ส~7๘ฒฐ™น~8>dluP๖ขฉฮ๔ƒ๋:Ÿ2neฦ็U;dโs"๑ฬ]ชuEฏ4])ใรฝ#ใ้ cไH2S[<‰์bฒ๛l:<เIงฏ)่_#^8/”“uฯม>NนUฦซNจ„–ถๅTฎ-‚Eศ-i…็W4โษอC4`)สะNe“ฦDธฅิ๙๎<~6mฅEห‹Ÿ ฬๅ…๓š๒cฒhU์็้pOdŠดญULต ไค Wม"ญ]2sbcกฯ}ไฤ‡ช๔—&๔0ึ\ฎ`งuภ ˜Eอ‘๙ฮqž๛ืig…ใ๗๘‘^.e<ฦ|ฺ–nฺ 53}ฐ)ำcLลIu๖…ูkึาNลบ"Wค Wม"นปวŽะvwภ6ชทLlะงŒo[’Wฏ-ŠCศYฦ฿x0ฆ่ ๗ฝ๒/•cdŒ‘CๅฑปŸ่ึฐ™>฿‹Fญฬๅ ด๎c–=ฤเ(ฮฉ๐ฟ_T์Uไ| จ~Iๆ’ฑsA์ž$็ว๕5†Zๅ49?ะ๎^3—4R ดyฌ4vฉCลฬ% AธiจqCœ@Ÿ ›้‡W๋?gu/6๎bฦYชseะ๙3}ซj]ัซRะซgญœfญ่ํ:ฯ„gจ!4MŠบ/tw)ชnใฃฺำุX<+๏๔บ=™xV7mฅผU3็z๖^ไฟศzFผก๘ต๒VซReพัแ้ใๆdฒมหพ๖ nŒ j„F˜์ฑEf>ปŽ™/SC›ัอฬ)jดชS%g ฬ๓ม}^#>าฏ'|๐+’>‚๎#\?ฅณํญ’r‹๚>ล๙SŸ6uฯน๋|าูํ(๏ฎๅ๐่ณ3?,r6iF„ฯ•2wงs˜>ฉ'า?VฆWEB[฿ฌ}  ธถ7h(œ(MW| [ฅ’Rะซtแ\นได g1อง4^แตFๆคW๛ค/ฒƒrIc>ศใy้;ะ$เn tี3ืb@็ญXœ‰ฏV|ศIa7O42=ลxจฅ1ฏ-<~ิ่1#˜ๆC^ š1ำiดEฒJ HAฏา…f;kลพ@เ๏๙˜ฦ฿฿๎6ต9‘vญ’หGูค๑:ฅ:ใoDห๖9ฅ#ทฝ~็โ่{5G›Aเ3|8เ บSง?ญ:ง ๋ญ]2sฦB฿พ0ˆNืใ้ƒฮA•—€๔๒๒๖%š๒[ค๏t๙z-๖“ž/IึhŠ^ฮ k|Nีmิ‘g ถ6็g<‡๙่–Dฦ๋Y๕~ZTฎ=ฐวฅ+พำ๑oŸฬ_าO'๗)OZA@ z –มป‰ฌ[Dเู•vฉะI\˜N,s;BษๅN;:&ด‰ณ๕'๘ _~IยB=nŸ^ภ•‹8ุื=ฮu__\0พฅ'์K}ััช' ฝ๊—pk๎๗u{Œน‚#|Li#3>Iุ๎‘•r๙@ Ÿ2.aฦ >H—]ฒ€ยอe๎‘ลuq vฅ[`ผO ?ŽงKosŸ่ึ€ฌ๔Xฤm)ธท๚๚Cด’๗๚™ฎ ›้+ŒQฯฺy+๖[VJณ<๙Fฬ\Rธ•‰˜Kง‚๐s฿ข๋q๒พYแ๒‚^~ๆพF์Nฦ๖wˆ๏,=Ÿ๎ต;ฃDxls˜ํ=gัF_ชC๑๕)cZแnจชึ๋ล๐*๛ZงZŽ๏ผe\สภ7‹?ig8๊cำแžHœๆ?่น‰ฯฐ#ห„ช" ฝช–ซ8ณƒ\–๙๘Œ๚6#ะˆ็ศกลญหpFๅ,ใ๋พ6œ9Aห i๕๐8•d%ฬ“?เ็.๔›ฃ81ูศlส๚Šเ‚ต๑ไl๐๕ฅžDŠ›ผ!>น%žษ7\FK gE่ไbว—2nธ'OธP77ธzGu->qRศiธจYtผ9ŠgK1๗‘pIKAฏฑ>lส˜FๅบuหธHOุ฿ฎaœeOmอข9ฃG4๔ุ`S๖เฅ$ฌวํ K™ZMs๒)รdฦt}ง7๖lžฝ๏•›|Œ!า5F@ z-่Ž้tฅขQษm1สT9ี 8S๚ชซ#ณข๛€ษmฏzฐ:U”˜๏ิ?o=๛`zx’วธฎw# oๆฐGฏุR3K๖ง ›[O‚^/\{์ระุ}œfLา]อiKฏ*Cฌบ1Pิก= ๐ัAL˜฿'์ฯั›*O๋ญุ{ ฬp˜*อ]่H1๗p-หKAฏๅี.ทฎvใHMƒ  ,m, ๘rุดkโy๊ ผD:Fว4Œฅ{˜17~ถ๓pฅnฺWฬ“R;นd์\ะภ๑ส~5Bฺๆ7jldี;ก;=!๊.Or ^5็ผe\หภWU๋– —ใณzยv‚ึไีHt‚3Bปเำ|O๑ฐžฐO๕=ŽจiRะkzy฿™œŒ#ไกษŸฃ)฿r5ŸŽgVิj฿าอ-Žถมกไ[ ‚๙"ฬผ^‘๘ešตb1ป฿”7หู‰;tำLา’5N@ z/๐ฮา|ไf™฿'สm}:๊Nm _ฺ|rf}"๗%ๅl*v˜/'`ไ๐”๖ฐฯ1ั5ตๅ‹ฮkส้บ /๚xPฬฟ–‹AืGฬ๔eร[?-vN@ zพ2|bšหSyํ$โ‹ย๑ฬ=uŠ]yฺ[๛ีพฤเฯ๚๕U ฟeเ6ดค< ๆ“ญ‡05<๐eฐล บ bฆo-C, Q'ค ืษB๏,Mทgs“ำปˆ™Œ2cX}Rฺฑชฃ๎žZ–;vููไ๐้ ด)P๎๐;ณศ์L5C;$ ทฒๆSฑ‹| #†œเ}€Cฤว3๗{—!๐oRะๅี€œeธฟXส฿ๆ’xฯธGœสฅภ๚๖)อ…Pำt€ง8 <๐]๛๎;€1Vƒ๘9ฐถJ ั“อm๎5ๅฌ่qบอ็.…sฬŒ3คcaอฟด*’ ๔Š`^ะผ2ƒๅพ^่ณบ™.ว1ตๅฮ-0๑ฒNl้ฃฆIOF0˜ mtBœใทx๕ฤ๙™žภ˜-ƒ‘ม}$฿dเ์2„โoDฺpผใน2ฦ”PuD@ z-๖Pฉ๎์}ภ„กฦช9ง8„ #ณ2/ฉืE!ฐ•[๑9ฺr 1ปั~ฎ|;์œr๚๚No™๛ิ[ฒBภ/Rะ"[ฅบํWม•แDฌ๊'๐Bฺˆ+ๅ๘ุ*}ุvฮŠžะทผปŒ6-bfฎ-cL Uงค ื้ย•v6๛)Ÿ9ิ8Ÿ~พ„ซ๊กk—ODv;]ษ่QัMo๗o-3˜nVญdหฬUยํ‚€tyi์’@6<นิš*ƒ‰โ_ทQ™๘ตš ไฌid๐๕ž]๎<๔;0'2'ฝบฑ%^‚^ฟk_TๆƒŸn.ใษr๏๔Ex†\-;ƒ‹Zฒบด~I๋dงZPๆ oโNเ[รfๆบ_PvRะหŽผ๚ๆŸ>ž๛๚Pผ’๎N๐ี3๓h%}H์`Xปdๆฤ†B๏ื ๔)pูรเณไ๕Y๒r€€ty!M o—2p}ั|Hฯ๓7ร ๛>฿Bˆpีp7r2๑E ธุ‡y ฎข4 ฯhเำšใ™ฟ)R!0lRะ‡ฌพ'ไO;‘ว=ˆf๏J“`เฏฎŠ˜ถDCฎ:#0๘uะฅๆU4uฦอzยพฐข$ธO่๒(…@ฮŠ#l^ฅพฃ‰็0ใM฿1nN&[JN2ง:ธGๆ~๒d_ะ๑vi๘xธอNVุ‡„ไบผJ&MลN&ๆ;่%‹(žศภ}๐ฐi?ฅXZไ*H ำŽŽiุLgณƒ‹A˜\A+ƒก๙ืิ฿pZ๘ค'Qy/โ@l% ]^ žฌ{lฺž ฮAHxR=™๑{"๚Ÿ =›~ฒ๏•›Tห‹^yไฌุL‚€•'โะQ˜่ฟ#๑๔5C”B ผค ——wอFห'c็1ฑปan\ภ’|‹€Ÿ8Œ{# ๛W๓&vvB sQ๋๛B Ÿ`๐ุ3(ˆุึX๛ดt สŠˆ HA—ื„2[?ญ๓7A|Ž2Q…B๎&: tฟใ๔฿™ฝ์ ฅEส#G#{S@์>rv‚G9ีำณDtA8ž™jaั* HAWISดtตGj!Gษภ-yฑvs"๗ภ๚ฌac๎ๆJฆMงc>@ำ+๔์๘๎3?าz๙โๆ“3๋kx)$ต! ฝF2ˆiไRัO1ำuดั฿ฟ=๑J=ศงค›ฟ+ๅnn mข๙`œฒป}\๙ >&โO…ใ™u! Ž€tu,Ei'Oaภฆ่โ*๔7"$ ภา‚ึุฑืฌฅชฤw`mv-ฑทV่Ÿอฬณ 4'ฐFท๋xnff.r ช" ฝช–ซzอฎoŸv@Asnpjeั `9ภ) ”าอŽซศ{ลฌฎ]2sฆB เฬ˜ย*fฆ๘ภฮ ~ุXเฏสYลC“‘ม" =X๋Q๓n๒–แnx๚.GTaฒY"<ๆ•ฤฺส๕ฃU“ฬๆ*ฬCฉๅWํ่ศ๑›q3ลภ<D'* เณ˜ป{Ž๓ลpbู๓>‡y!เ+)่พโ๑`ๅฌุ™D|€wU1ฅ~~OŒ_‚h%มYY๋gy๓ยyก๕cึ๊P่h€Ž†รGƒp€†j[G^vภ—H3•j[9๑ป+RะๅตQ1kอ=ฒaรฬ์žว\1#jฟเ ๚#/€๘`~A73ฏซ ใฏฺ›‹ข‘Bศูทภก@ด1ฟ„c็)ฒ๛ว๑VูซŒ+๕„ํสฺ่B@-)่jyŠZ w>ปอ-?ใKจ†)opฟƒ‘—‰eP—งซ๊jขฆฎ๑mK๒~'าHtBShbH+Ld‡๖$ข‰ ny๛๑ฌ@ผ@๛ฝ]๒พF๛ํฅ๚=oฟพnึถ๐ทๅ1ด ะ—พ‚๎;b P,ญ}ื ๘€ฑลฮซฑq๎ฃ d ุย ^oa`‹๛`ธ๕ถlC฿>กo๑@ะ ๗oAภH†3ยo5xb•ฝแe™฿|๛ะ-Rศฝ ”นี@@ z5ฌRy\฿>ฅน_k๚2?Wว…ฝฮV—tป‰่†B๏–Zๆ>ๅ!‘Kิ4)่5ฝผีk 5z์ศณภ๔€ฉ๎lฤ} ไ|3๗๕}W yฉKจА‚^๑%ล่NฦZยg>ฃ˜๑2ฆ 0^๘–พ†ฆ;ๅ@ :\IYฺงสk บ œ:ึ฿ท‰ว•s€รI+—|ะ"[`žžศ,š1๑#สI@>ก—“ถฤRF€,ะ๒ว>9L็``S˜\พ˜๕`X"อu|[tฎ2Rะซlมฤ๎; t=vยXญqฤm[FตL€ž'rn๐ึ–ป๗ฟrS-g*น แ‚>\b2>ะึ/i\่ื>IDg0๐@›sลศq7‡่H[zUฑ“dœจ7Rะ๋mล๋(฿bใ 0Ÿฆyภภ๑คrUfz”เะ๒8อฐP=ึลฉจ )่•แ.QหL sQ๋๛ด††ำษแำ@8ฌฬแ%\q๚,%ฦฝ6l~DnฉMF mค หkก๎tงข๏.ฐ6›˜@ˆUw‚“p'ฌ๑าF๑‹rœิล‰PK@ บZžขVexแผฆ๙ฉ ‚ Pภ๛ซ)…ภํ=ฦท฿D-๓Dกวร๑Ž็†ž"#„€(†€๔b(ษ˜บ!ฐพ}ฺโฉ >Ž€ฃชดoปโ๕๒๚ถWศ.8ฺ-ณ;lลๆDNARะๅฅ vCภ=~vฬฃŽfโใ˜q,h }hด rนgฆฏฐย!^3OO62›๏Z  ฝQR(/uMณก‡‚๘0">”๗ฟq €ฆ๒: Dดบล›ษC+"Oท>K 8p&&„@‚^g .้๚G Ÿl=šv;x?@€[|—A™ฟ๐'0^€†—5ฦŸ๚˜41‘Y[†๐B"HA/’ ^ ไฌ่>pด(ไ์ฆ<‰A‰แ๖)o =ษk๓_๘ŸDดฦaฌใ5Œ^า์<่สT! สD@ z™@K!P 7ฯ ๗ั–‰šƒLh4๋อŒ1ถฟ๛7ow†={มิ ` ศ›ท่%๐&๊%`‹ฺHŽณ–H[M่_ืธfฯน๋Š๑&c„€6)่ม^q'„€B (Rะ‹ย$ƒ„€B@›€๔`ฏธB@!P)่Ea’AB@! ‚M@ zฐืG ! „€(Š€๔ข0ษ ! „€ม& =ุ๋#๎„€B@E@ zQ˜dB@`‚์๕wB@! Š" ฝ(L2H! „@ฐ HA๖๚ˆ;! „€E‚^&$„€B ุFฤ Yช@IENDฎB`‚python-advanced-alchemy-1.0.1/docs/_static/theme.js000066400000000000000000000027551476663714600222700ustar00rootroot00000000000000function initDropdowns() { const dropdownToggles = document.querySelectorAll(".st-dropdown-toggle") const dropdowns = [...dropdownToggles].map(toggleEl => ({ toggleEl, contentEL: toggleEl.parentElement.querySelector(".st-dropdown-menu") })) const close = (dropdown) => { const {toggleEl, contentEL} = dropdown toggleEl.setAttribute("aria-expanded", "false") contentEL.classList.toggle("hidden", true) } const closeAll = () => dropdowns.forEach(close) const open = (dropdown) => { closeAll() dropdown.toggleEl.setAttribute("aria-expanded", "true") dropdown.contentEL.classList.toggle("hidden", false) const boundaries = [dropdown.contentEL, ...dropdownToggles] const clickOutsideListener = (event) => { const target = event.target if (!target) return if (!boundaries.some(b => b.contains(target))) { closeAll() document.removeEventListener("click", clickOutsideListener) } } document.addEventListener("click", clickOutsideListener) } dropdowns.forEach(dropdown => { dropdown.toggleEl.addEventListener("click", () => { if (dropdown.toggleEl.getAttribute("aria-expanded") === "true") { close(dropdown) } else { open(dropdown) } }) }) } window.addEventListener("DOMContentLoaded", () => { initDropdowns() }) python-advanced-alchemy-1.0.1/docs/_static/versioning.js000066400000000000000000000057041476663714600233460ustar00rootroot00000000000000const loadVersions = async () => { const res = await fetch( DOCUMENTATION_OPTIONS.URL_ROOT + "_static/versions.json", ); if (res.status !== 200) { return null; } return await res.json(); }; const addVersionWarning = (currentVersion, latestVersion) => { if (currentVersion === latestVersion) { return; } const header = document.querySelector(".bd-header__inner")?.parentElement; if (!header) { return; } const container = document.createElement("div"); container.id = "version-warning"; const warningText = document.createElement("span"); warningText.textContent = `You are viewing the documentation for ${ currentVersion === "dev" || parseInt(currentVersion) > parseInt(latestVersion) ? "a preview" : "an outdated" } version of Litestar.`; container.appendChild(warningText); const latestLink = document.createElement("a"); latestLink.textContent = "Click here to go to the latest version"; latestLink.href = DOCUMENTATION_OPTIONS.URL_ROOT + "../latest"; container.appendChild(latestLink); header.before(container); }; const formatVersionName = (version, isLatest) => version + (isLatest ? " (latest)" : ""); const addVersionSelect = (currentVersion, versionSpec) => { const navEnd = document.querySelector(".navbar-header-items__end"); if (!navEnd) { return; } const container = document.createElement("div"); container.classList.add("navbar-nav"); const dropdown = document.createElement("div"); dropdown.classList.add("dropdown"); container.appendChild(dropdown); const dropdownToggle = document.createElement("button"); dropdownToggle.classList.add("btn", "dropdown-toggle", "nav-item"); dropdownToggle.setAttribute("data-bs-toggle", "dropdown"); dropdownToggle.setAttribute("type", "button"); dropdownToggle.textContent = `Version: ${formatVersionName( currentVersion, currentVersion === versionSpec.latest, )}`; dropdown.appendChild(dropdownToggle); const dropdownContent = document.createElement("div"); dropdownContent.classList.add("dropdown-menu"); dropdown.appendChild(dropdownContent); for (const version of versionSpec.versions) { const navItem = document.createElement("li"); navItem.classList.add("nav-item"); const navLink = document.createElement("a"); navLink.classList.add("nav-link", "nav-internal"); navLink.href = DOCUMENTATION_OPTIONS.URL_ROOT + `../${version}`; navLink.textContent = formatVersionName( version, version === versionSpec.latest, ); navItem.appendChild(navLink); dropdownContent.appendChild(navItem); } navEnd.prepend(container); }; const setupVersioning = (versions) => { if (versions === null) { return; } const currentVersion = DOCUMENTATION_OPTIONS.VERSION; addVersionWarning(currentVersion, versions.latest); addVersionSelect(currentVersion, versions); }; window.addEventListener("DOMContentLoaded", () => { loadVersions().then(setupVersioning); }); python-advanced-alchemy-1.0.1/docs/_static/versions.json000066400000000000000000000000571476663714600233640ustar00rootroot00000000000000{ "versions": ["1", "latest"], "latest": "1" } python-advanced-alchemy-1.0.1/docs/changelog.rst000066400000000000000000000431611476663714600216570ustar00rootroot00000000000000:orphan: 0.x Changelog ============= .. changelog:: 1.0.1 :date: 2025-03-19 .. change:: properly serialize `Relationship` type hints :type: bugfix :pr: 422 Adds `sqlalchemy.orm.Relationship` to the supported type hints for the `SQLAlchemyDTO` .. changelog:: 1.0.0 :date: 2025-03-18 .. change:: remove deprecated packages removed in `v1.0.0` :type: misc :pr: 419 Removes deprecated packages and prepares for 1.0 release. .. change:: logic correction for window function :type: bugfix :pr: 421 Corrects the logic for using a count with a window function. .. changelog:: 0.34.0 :date: 2025-03-10 .. change:: allow custom `not_found` error messages :type: feature :pr: 417 :issue: 391 Enhance the SQLAlchemy exception wrapper to handle NotFoundError with custom error messages and improved error handling. This includes: - Adding a 'not_found' key to ErrorMessages type - Extending wrap_sqlalchemy_exception to catch and handle NotFoundError - Updating default error message templates with a not_found message - Adding unit tests for custom NotFoundError handling .. change:: Refactor Sanic extension for multi-config support :type: feature :pr: 415 :issue: 375 This commit refactors the Sanic extension for Advanced Alchemy: - Refactored configuration handling with support for multiple database configurations - Added methods for retrieving async and sync sessions, engines, and configs - Improved dependency injection with new provider methods - Simplified extension initialization and registration - Updated example and test files to reflect new extension structure - Removed deprecated methods and simplified the extension interface .. changelog:: 0.33.2 :date: 2025-03-09 .. change:: simplify session type hints in service providers :type: bugfix :pr: 414 Remove unnecessary scoped session type hints from service provider functions. Prevents the following exception from being incorrectly raised: `TypeError: Type unions may not contain more than one custom type - type typing.Union[sqlalchemy.ext.asyncio.session.AsyncSession, sqlalchemy.ext.asyncio.scoping.async_scoped_session[sqlalchemy.ext.asyncio.session.AsyncSession], NoneType] is not supported.` .. changelog:: 0.33.1 :date: 2025-03-07 .. change:: add session to namespace signature :type: feature :pr: 412 The new filter providers expect that the sessions are in the signature namespace. This ensures there are no issues when configuring the plugin. .. changelog:: 0.33.0 :date: 2025-03-07 .. change:: Add dependency factory utilities :type: feature :pr: 405 Introduces a new module `advanced_alchemy.extensions.litestar.providers` with comprehensive dependency injection utilities for SQLAlchemy services in Litestar. The module provides: - Dynamic filter configuration generation - Dependency caching mechanism - Flexible filter and pagination support - Singleton metaclass for dependency management - Configurable filter and search dependencies .. changelog:: 0.32.2 :date: 2025-02-26 .. change:: Litestar extension: Use ``SerializationPlugin`` instead of ``SerializationPluginProtocol`` :type: misc :pr: 401 Use ``SerializationPlugin`` instead of ``SerializationPluginProtocol`` .. changelog:: 0.32.1 :date: 2025-02-26 .. change:: Litestar extension: Use ``CLIPlugin`` instead of ``CLIPluginProtocol`` :type: misc :pr: 399 Internal change migrating from using Litestar's ``CLIPluginProtocol`` to ``CLIPlugin``. .. changelog:: 0.32.0 :date: 2025-02-23 .. change:: remove `limit` and `offset` from count statement :type: bugfix :pr: 395 Remove `limit` and `offset` from count statement .. change:: rename `force_basic_query_mode` :type: misc :pr: 396 Renames `force_basic_query_mode` to `count_with_window_function`. This is also exposed as a class/init parameter for the service and repository. .. change:: add Enum to default type decoders :type: feature :pr: 397 Extends the default `msgspec` type decoders to handle Enum types by converting them to their underlying value during serialization .. changelog:: 0.31.0 :date: 2025-02-18 .. change:: Fix reference in `changelog.py` :type: bugfix :pr: 383 Should link to the AA repo, not litestar :) .. change:: Query repository list method for custom queries :type: bugfix :pr: 379 :issue: 338 Fix query repositories list method according to [documentation](https://docs.advanced-alchemy.litestar.dev/latest/usage/repositories.html#query-repository). Now its return a list of tuples with values instead of first column of the query. .. change:: remove 3.8 support :type: misc :pr: 386 Removes 3.8 support and removes future annotations in a few places for better compatibility .. change:: remove future annotations :type: feature :pr: 387 This removes the usage of future annotations. .. change:: add `uniquify` to service and repo :type: feature :pr: 389 Exposes the `uniquify` flag in all functions on the repository and add to the service .. change:: improved default serializer :type: feature :pr: 390 Improves the default serializer so that it handles various types a bit better .. changelog:: 0.30.3 :date: 2025-01-26 .. change:: add `wrap_exceptions` option to exception handler. :type: feature :pr: 363 :issue: 356 When `wrap_exceptions` is `False`, the original SQLAlchemy error message will be raised instead of the wrapped Repository error Fixes #356 (Bug: `wrap_sqlalchemy_exception` masks db errors) .. change:: simplify configuration hash :type: feature :pr: 366 The hashing method on the SQLAlchemy configs can be simplified. This should be enough to define a unique configuration. .. change:: use `lifespan` context manager in Starlette and FastAPI :type: bugfix :pr: 368 :issue: 367 Modifies the Starlette and FastAPI integrations to use the `lifespan` context manager instead of the `startup`\`shutdown` hooks. If the application already has a lifespan set, it is wrapped so that both execute. .. changelog:: 0.30.2 :date: 2025-01-21 .. change:: add hash to config classes :type: feature :pr: 358 :issue: 357 Adds hash function to `SQLAlchemySyncConfig` and `SQLAlchemyAsyncConfig` classes. .. changelog:: 0.30.1 :date: 2025-01-20 .. change:: Using init db CLI command creates migrations directory in unexpected place :type: bugfix :pr: 354 :issue: 351 When initializing migrations with the CLI, if no directory is specified, the directory from the configuration will be used. .. changelog:: 0.30.0 :date: 2025-01-19 .. change:: standardize on `autocommit_include_redirect` :type: bugfix :pr: 349 The flask plugin incorrectly used the term `autocommit_with_redirect` instead of the existing `autocommit_include_redirect`. This changes makes the name consistent before we bump to a `1.x` release .. change:: implement default schema serializer :type: bugfix :pr: 350 This corrects an issue that caused the Flask extension to use the incorrect serializer for encoding JSON .. change:: refactored integration with CLI support :type: feature :pr: 352 Refactored the Starlette and FastAPI integration to support multiple configurations and sessions. Additionally, FastAPI will now have the database commands automatically registered with the FastAPI CLI. .. change:: reorganize Sanic extension :type: feature :pr: 353 The Sanic integration now aligns with the structure and idioms used in the other integrations. .. changelog:: 0.29.1 :date: 2025-01-17 .. change:: add convenience hooks for `to_model` operations :type: feature :pr: 347 The service layer has always has a `to_model` function that accepts data and optionally an operation name. It would return a SQLAlchemy model no matter the input you gave it. It is possible to move business logic into this `to_model` layer for populating fields on insert. (i.e. slug fields or tags, etc.). When having logic for `insert`, `update`, `delete`, and `upsert`, that function can be a bit overwhelcoming. Now, there are helper functions that you can use that is specific to each DML hook: * `to_model_on_create` * `to_model_on_update` * `to_model_on_delete` * `to_model_on_upsert` .. changelog:: 0.29.0 :date: 2025-01-17 .. change:: fully qualify all `datetime` module references :type: bugfix :pr: 341 All date time references are now full qualified to prevent any forward resolution issues with `from datetime import datetime` and `import datetime` .. change:: disabled `timezone` in alembic.ini :type: bugfix :pr: 344 Disabled `timezone` in alembic.ini to fix `alembic.util.exc.CommandError: Can't locate timezone: UTC` error while applying migrations Reference: https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file .. change:: various typing improvements for services :type: feature :pr: 342 :issue: 261 Improved typing in the service layer and adds a additional type guards. .. change:: Auto extend Flask CLI and add session integration :type: feature :pr: 111 The Advanced Alchemy alembic CLI is now auto-extended to your Flask application. The Flask extension now also has a session handling middleware for handling auto-commits. Last, but not least, there's an experimental async portal that integrates a long running asyncio loop for running async operations in Flask. Using `foo = portal.call()` you can get the result of an asynchronous function from a sync context. .. changelog:: 0.28.0 :date: 2025-01-13 .. change:: add `bind-key` option to CLI :type: feature :pr: 339 Adds a `bind-key` option to the Advance Alchemy CLI groups. When present, the Alembic configs will be injected with the corresponding key. .. changelog:: 0.27.1 :date: 2025-01-11 .. change:: correction for `3.8` and `3.9` type hints :type: bugfix :pr: 330 Makes a few corrections to type hints in examples and tests to ensure 3.8 and 3.9 support .. changelog:: 0.27.0 :date: 2025-01-11 .. change:: add `error_messages` as class level configuration :type: feature :pr: 315 Exposes ``error_messages`` as a class level configuration in the repository and service classes. .. change:: implement reusable CLI :type: feature :pr: 320 Exposes a reusable CLI for creating and updating releases. This can be used to extend any existing Click or Typer CLI. .. change:: adds additional type guard helpers :type: feature :pr: 322 Addition typing utilities to help with type checking and validation. .. changelog:: 0.26.0 :date: 2025-01-11 .. change:: `AsyncAttrs` & remove `noload` default :type: feature :pr: 305 This PR adds the `AsyncAttrs` to the default declarative bases for convenience. It also changes the `inherit_lazy_relationships == False` behavior to use `lazyload`. SQLAlchemy will be deprecating `noload` in version 2.1 .. change:: `litestar` DTO enhancements :type: feature :pr: 310 :issue: 306 The Litestar DTO has been enhanced with: - The SQLAlchemyDTOConfig's `exclude`, `include`, and `rename_fields` fields will now accept string or `InstrumentedAttributes` - DTO supports `WriteOnlyMapped` and `DynamicMapped` .. change:: add default exception handler for `litestar` integration :type: feature :pr: 308 :issue: 275 This adds a configuration option to automatically enable an exception handler for Repository errors. This will update the exception handler if you do not have one already configured for the RepositoryException class .. changelog:: 0.25.0 :date: 2025-01-11 .. change:: add max length for encrypted string :type: feature :pr: 290 The EncryptedString field now has the ability to validate against a set length. .. change:: `AsyncAttrs` & remove `noload` default :type: feature :pr: 305 This PR adds the `AsyncAttrs` to the default declarative bases for convenience. It also changes the `inherit_lazy_relationships == False` behavior to use `lazyload`. SQLAlchemy will be deprecating `noload` in version 2.1 .. changelog:: 0.24.0 :date: 2025-01-11 .. change:: remove lambda statement usage :type: feature :pr: 288 :issue: 286, 287 Removes the use of lambda statements in the repository and service classes. This has no change on the end user API, however, it should remove strange queries errors seen. .. changelog:: 0.23.0 :date: 2025-01-11 .. change:: regression caused by conditional import Sequence for pagination.py :type: bugfix :pr: 274 :issue: 272 Import Sequence directly from collections.abc Remove conditional import using TYPE_CHECKING Add noqa comment to suppress potential linter warnings .. change:: make sure `anyio` is optional :type: bugfix :pr: 278 When running standalone or with a synchronous web framework, `anyio` is not required. This PR ensures that there are no module loading failures due to the missing import. .. change:: Improved typing of `ModelDictT` :type: feature :pr: 277 Fixes typing issues in service https://github.com/litestar-org/advanced-alchemy/issues/265 This still doesn't solve the problem of UnknownVariableType if the subtypes of ModelDictT are not installed (eg: Pydantic) But at least it solves the problem of incompatibilities when they are installed .. changelog:: 0.22.0 :date: 2025-01-11 .. change:: CLI argument adjustment :type: bugfix :pr: 270 Changes the argument name so that it matches the name given in `click.option`. .. changelog:: 0.21.0 :date: 2025-01-11 .. change:: bind session to session class instead of to the session maker :type: bugfix :pr: 268 :issue: 267 binds session into sanic extension as expected in the original code, session maker was defined and then the dependency for session overwrites it with a session maker as the type. this seems non-ideal -- you can't get the session maker and when you ask for the session maker you get a session object instead, this looks at the sessionmaker `class_` property for adding the sanic dependency .. change:: correct regex mappings for duplicate and foreign key errors :type: bugfix :pr: 266 :issue: 262 Swap the variable names for DUPLICATE_KEY_REGEXES and FOREIGN_KEY_REGEXES to correctly match their contents. This ensures that the error detection for duplicate keys and foreign key violations works as intended across different database backends. .. change:: Dump all tables as JSON :type: feature :pr: 259 Adds a new CLI command to export tables to JSON. Similar to a Django dumpdata command. .. changelog:: <=0.20.0 :date: 2025-01-11 .. change:: CollectionFilter returns all entries if values is empty :type: bugfix :pr: 52 :issue: 51 Fixes #51 Bug: CollectionFilter returns all entries if values is empty a simple `1=-1` is appended into the `where` clause when an empty list is passed into the `in` statement. .. change:: better handle empty collection filters :type: bugfix :pr: 62 Currently, [this](https://github.com/cofin/litestar-fullstack/blob/main/src/app/lib/dependencies.py#L169) is how you can inject these filters in your app. When using the `id_filter` dependency on it's own, you have to have an additional not-null check before passing it into the repository. This change handles that and allows you to pass in all filters into the repository function without checking their nullability. .. change:: service `exists` should use `exists` from repository :type: bugfix :pr: 68 The service should use the repository's implementation of `exists` instead of a new one with a `count`. .. change:: do not set `id` with `item_id` when `None` :type: bugfix :pr: 67 This PR prevents the primary key from being overrwitten with `None` when using the service without the `item_id` parameter. .. change:: sqlalchemy dto for models non `Column` fields :type: bugfix :pr: 75 Examples of such fields are `ColumnClause` and `Label`, these are generated when using `sqlalchemy.func` - Fix SQLAlchemy dto generation for litestar when using models that have fields that are not instances of `Column`. Such fields arise from using expressions such as `func`. python-advanced-alchemy-1.0.1/docs/conf.py000066400000000000000000000274211476663714600204760ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # ruff: noqa: FIX002 PLR0911 ARG001 ERA001 from __future__ import annotations import datetime import os import warnings from functools import partial from typing import TYPE_CHECKING, Any from sqlalchemy.exc import SAWarning from advanced_alchemy.__metadata__ import __project__, __version__ if TYPE_CHECKING: from typing import Any from sphinx.addnodes import document # type: ignore[attr-defined,unused-ignore] from sphinx.application import Sphinx # -- Environmental Data ------------------------------------------------------ warnings.filterwarnings("ignore", category=SAWarning) # -- Project information ----------------------------------------------------- current_year = datetime.datetime.now().year # noqa: DTZ005 project = __project__ copyright = f"{current_year}, Litestar Organization" # noqa: A001 release = os.getenv("_ADVANCED-ALCHEMY_DOCS_BUILD_VERSION", __version__.rsplit(".")[0]) suppress_warnings = [ "autosectionlabel.*", "ref.python", # TODO: remove when https://github.com/sphinx-doc/sphinx/issues/4961 is fixed ] # -- General configuration --------------------------------------------------- extensions = [ "sphinx.ext.intersphinx", "sphinx.ext.autodoc", "sphinx.ext.napoleon", "sphinx.ext.autosectionlabel", "sphinx.ext.githubpages", "sphinx.ext.viewcode", "tools.sphinx_ext.missing_references", "tools.sphinx_ext.changelog", "sphinx_autodoc_typehints", "myst_parser", "auto_pytabs.sphinx_ext", "sphinx_copybutton", "sphinx.ext.todo", "sphinx.ext.viewcode", "sphinx_click", "sphinx_toolbox.collapse", "sphinx_design", "sphinx_togglebutton", "sphinx_paramlinks", ] exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] intersphinx_mapping = { "python": ("https://docs.python.org/3", None), "msgspec": ("https://jcristharif.com/msgspec/", None), "sqlalchemy": ("https://docs.sqlalchemy.org/en/20/", None), "alembic": ("https://alembic.sqlalchemy.org/en/latest/", None), "litestar": ("https://docs.litestar.dev/latest/", None), "click": ("https://click.palletsprojects.com/en/stable/", None), "anyio": ("https://anyio.readthedocs.io/en/stable/", None), "multidict": ("https://multidict.aio-libs.org/en/stable/", None), "cryptography": ("https://cryptography.io/en/latest/", None), "pydantic": ("https://docs.pydantic.dev/latest/", None), "sanic": ("https://sanic.readthedocs.io/en/latest/", None), "flask": ("https://flask.palletsprojects.com/en/stable/", None), "typing_extensions": ("https://typing-extensions.readthedocs.io/en/stable/", None), } PY_CLASS = "py:class" PY_EXC = "py:exc" PY_RE = r"py:.*" PY_METH = "py:meth" PY_ATTR = "py:attr" PY_OBJ = "py:obj" PY_FUNC = "py:func" nitpicky = True nitpick_ignore: list[str] = [] nitpick_ignore_regex: list[str] = [] auto_pytabs_min_version = (3, 9) auto_pytabs_max_version = (3, 13) napoleon_google_docstring = True napoleon_include_special_with_doc = True napoleon_use_admonition_for_examples = True napoleon_use_admonition_for_notes = True napoleon_use_admonition_for_references = False napoleon_attr_annotations = True autoclass_content = "class" autodoc_class_signature = "separated" autodoc_default_options = {"special-members": "__init__", "show-inheritance": True, "members": True} autodoc_member_order = "bysource" autodoc_typehints_format = "short" autodoc_type_aliases = { "ModelT": "advanced_alchemy.repository.typing.ModelT", "FilterTypeT": "advanced_alchemy.filters.FilterTypeT", "StatementTypeT": "advanced_alchemy.filters.StatementTypeT", "EngineT": "sqlalchemy.engine.Engine", "AsyncEngineT": "sqlalchemy.ext.asyncio.AsyncEngine", "SessionT": "sqlalchemy.orm.Session", "AsyncSessionT": "sqlalchemy.ext.asyncio.AsyncSession", "ConnectionT": "sqlalchemy.engine.Connection", "AsyncConnectionT": "sqlalchemy.ext.asyncio.AsyncConnection", "Mapper": "sqlalchemy.orm.Mapper", "Registry": "sqlalchemy.orm.registry", "RegistryType": "sqlalchemy.orm.registry", "Table": "sqlalchemy.schema.Table", "MetaData": "sqlalchemy.schema.MetaData", "FilterableRepository": "advanced_alchemy.repository._util.FilterableRepository", "SQLAlchemyAsyncRepositoryProtocol": "advanced_alchemy.repository._async.SQLAlchemyAsyncRepositoryProtocol", "SQLAlchemyAsyncRepository": "advanced_alchemy.repository.SQLAlchemyAsyncRepository", "SQLAlchemySyncRepositoryProtocol": "advanced_alchemy.repository._sync.SQLAlchemySyncRepositoryProtocol", "SQLAlchemySyncRepository": "advanced_alchemy.repository.SQLAlchemySyncRepository", "SQLAlchemyAsyncSlugRepositoryProtocol": "advanced_alchemy.repository._async.SQLAlchemyAsyncSlugRepositoryProtocol", "SQLAlchemySyncSlugRepositoryProtocol": "advanced_alchemy.repository._sync.SQLAlchemySyncSlugRepositoryProtocol", "ModelOrRowMappingT": "advanced_alchemy.repository.ModelOrRowMappingT", "ModelDTOT": "advanced_alchemy.service.ModelDTOT", "DTOData": "litestar.dto.data_structures.DTOData", "InstrumentedAttribute": "sqlalchemy.orm.attributes.InstrumentedAttribute", "BaseModel": "pydantic.BaseModel", "Struct": "msgspec.Struct", "TableArgsType": "sqlalchemy.orm.decl_base._TableArgsType", "DateTimeUTC": "advanced_alchemy.types.DateTimeUTC", "TypeEngine": "sqlalchemy.types.TypeEngine", "DeclarativeBase": "sqlalchemy.orm.DeclarativeBase", "UUIDBase": "advanced_alchemy.base.UUIDBase", "NanoIDBase": "advanced_alchemy.base.NanoIDBase", "BigIntBase": "advanced_alchemy.base.BigIntBase", "BigIntAuditBase": "advanced_alchemy.base.BigIntAuditBase", "DefaultBase": "advanced_alchemy.base.DefaultBase", "SQLQuery": "advanced_alchemy.base.SQLQuery", "UUIDv6PrimaryKey": "advanced_alchemy.base.UUIDv6PrimaryKey", "UUIDv7PrimaryKey": "advanced_alchemy.base.UUIDv7PrimaryKey", "NanoIDPrimaryKey": "advanced_alchemy.base.NanoIDPrimaryKey", "BigIntPrimaryKey": "advanced_alchemy.base.BigIntPrimaryKey", "CommonTableAttributes": "advanced_alchemy.base.CommonTableAttributes", "AuditColumns": "advanced_alchemy.base.AuditColumns", "UUIDPrimaryKey": "advanced_alchemy.base.UUIDPrimaryKey", "EngineConfig": "advanced_alchemy.config.EngineConfig", "AsyncSessionConfig": "advanced_alchemy.config.AsyncSessionConfig", "SyncSessionConfig": "advanced_alchemy.config.SyncSessionConfig", "EmptyType": "advanced_alchemy.utils.dataclass.EmptyType", "async_sessionmaker": "sqlalchemy.ext.asyncio.async_sessionmaker", "sessionmaker": "sqlalchemy.orm.sessionmaker", "SlugMixin": "advanced_alchemy.mixins.slug.SlugKey", "UniqueMixin": "advanced_alchemy.mixins.unique.UniqueMixin", "AsyncEngine": "sqlalchemy.ext.asyncio.AsyncEngine", "Engine": "sqlalchemy.engine.Engine", "sqlalchemy": "sqlalchemy", "RenameStrategy": "litestar.dto.types.RenameStrategy", "Union": "typing.Union", "Callable": "typing.Callable", "Any": "typing.Any", "Optional": "typing.Optional", "_EchoFlagType": "advanced_alchemy.config._EchoFlagType", } autodoc_mock_imports = [ "alembic", "sanic_ext.Extend", "sanic", "sqlalchemy.ext.asyncio.engine.create_async_engine", "_sa.create_engine._sphinx_paramlinks_creator", "sqlalchemy.Dialect", "sqlalchemy.orm.MetaData", # Add these new entries: "advanced_alchemy.config.engine.EngineConfig", "advanced_alchemy.config.asyncio.AsyncSessionConfig", "advanced_alchemy.config.sync.SyncSessionConfig", "advanced_alchemy.utils.dataclass.EmptyType", "advanced_alchemy.extensions.litestar.plugins.init.config.engine.EngineConfig", "sqlalchemy.ext.asyncio", "sqlalchemy.engine", "sqlalchemy.orm", ] autosectionlabel_prefix_document = True # Strip the dollar prompt when copying code # https://sphinx-copybutton.readthedocs.io/en/latest/use.html#strip-and-configure-input-prompts-for-code-cells copybutton_prompt_text = "$ " # -- Style configuration ----------------------------------------------------- html_theme = "shibuya" html_title = "Advanced Alchemy" html_short_title = "AA" pygments_style = "dracula" todo_include_todos = True html_static_path = ["_static"] html_favicon = "_static/favicon.png" templates_path = ["_templates"] html_js_files = ["versioning.js"] html_css_files = ["custom.css"] exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "PYPI_README.md"] html_show_sourcelink = True html_copy_source = True html_context = { "source_type": "github", "source_user": "litestar-org", "source_repo": "advanced-alchemy", "current_version": "latest", "version": release, } html_theme_options = { "logo_target": "/", "accent_color": "amber", "github_url": "https://github.com/litestar-org/advanced-alchemy", "discord_url": "https://discord.gg/dSDXd4mKhp", "navigation_with_keys": True, "globaltoc_expand_depth": 2, "light_logo": "_static/logo-default.png", "dark_logo": "_static/logo-default.png", "discussion_url": "https://discord.gg/dSDXd4mKhp", "nav_links": [ {"title": "Home", "url": "index"}, { "title": "About", "children": [ { "title": "Changelog", "url": "changelog", "summary": "All changes for Advanced Alchemy", }, { "title": "Litestar Organization", "summary": "Details about the Litestar organization, the team behind Advanced Alchemy", "url": "https://litestar.dev/about/organization", "icon": "org", }, { "title": "Releases", "summary": "Explore the release process, versioning, and deprecation policy for Advanced Alchemy", "url": "releases", "icon": "releases", }, { "title": "Contributing", "summary": "Learn how to contribute to the Advanced Alchemy project", "url": "contribution-guide", "icon": "contributing", }, { "title": "Code of Conduct", "summary": "Review the etiquette for interacting with the Advanced Alchemy community", "url": "https://github.com/litestar-org/.github?tab=coc-ov-file", "icon": "coc", }, { "title": "Security", "summary": "Overview of Advanced Alchemy's security protocols", "url": "https://github.com/litestar-org/.github?tab=coc-ov-file#security-ov-file", "icon": "coc", }, {"title": "Sponsor", "url": "https://github.com/sponsors/Litestar-Org", "icon": "heart"}, ], }, { "title": "Help", "children": [ { "title": "Discord Help Forum", "summary": "Dedicated Discord help forum", "url": "https://discord.gg/dSDXd4mKhp", "icon": "coc", }, { "title": "GitHub Discussions", "summary": "GitHub Discussions", "url": "https://github.com/litestar-org/advanced-alchemy/discussions", "icon": "coc", }, ], }, ], } def update_html_context( _app: Sphinx, _pagename: str, _templatename: str, context: dict[str, Any], _doctree: document, ) -> None: context["generate_toctree_html"] = partial(context["generate_toctree_html"], startdepth=0) def setup(app: Sphinx) -> dict[str, bool]: app.setup_extension("shibuya") return {"parallel_read_safe": True, "parallel_write_safe": True} python-advanced-alchemy-1.0.1/docs/contribution-guide.rst000066400000000000000000000000531476663714600235330ustar00rootroot00000000000000:orphan: .. include:: ../CONTRIBUTING.rst python-advanced-alchemy-1.0.1/docs/getting-started.rst000066400000000000000000000043331476663714600230330ustar00rootroot00000000000000=============== Getting Started =============== Advanced Alchemy is a carefully crafted, thoroughly tested, optimized companion library for :doc:`SQLAlchemy `. It provides :doc:`base classes `, :doc:`mixins `, :doc:`custom column types `, and implementations of the :doc:`repository ` and :doc:`service layer ` patterns to simplify your database operations. .. seealso:: It is built on: * `SQLAlchemy `_ * `Alembic `_ * `Typing Extensions `_ It's designed to work on its own or with your favorite web framework. We've built extensions for some of the most popular frameworks, so you can get the most out of Advanced Alchemy with minimal effort. * `Litestar `_ * `FastAPI `_ * `Starlette `_ * `Flask `_ * `Sanic `_ If your framework is not listed, don't worry! Advanced Alchemy is designed to be modular and easily integrated with any Python web framework. `Join our Discord `_ and we'll help you get started. Installation ------------ Install ``advanced-alchemy`` with your favorite Python package manager: .. tab-set:: .. tab-item:: pip :sync: key1 .. code-block:: bash :caption: Using pip python3 -m pip install advanced-alchemy .. tab-item:: uv .. code-block:: bash :caption: Using `UV `_ uv add advanced-alchemy .. tab-item:: pipx :sync: key2 .. code-block:: bash :caption: Using `pipx `_ pipx install advanced-alchemy .. tab-item:: pdm .. code-block:: bash :caption: Using `PDM `_ pdm add advanced-alchemy .. tab-item:: Poetry .. code-block:: bash :caption: Using `Poetry `_ poetry add advanced-alchemy python-advanced-alchemy-1.0.1/docs/index.rst000066400000000000000000000074421476663714600210410ustar00rootroot00000000000000:layout: landing :description: Advanced Alchemy is a carefully crafted, thoroughly tested, optimized companion library for SQLAlchemy. .. container:: :name: home-head .. container:: .. raw:: html .. container:: badges :name: badges .. image:: https://img.shields.io/github/actions/workflow/status/litestar-org/advanced-alchemy/publish.yml?labelColor=202235&logo=github&logoColor=edb641&label=Release :alt: GitHub Actions Latest Release Workflow Status .. image:: https://img.shields.io/github/actions/workflow/status/litestar-org/advanced-alchemy/ci.yml?labelColor=202235&logo=github&logoColor=edb641&label=Tests%20And%20Linting :alt: GitHub Actions CI Workflow Status .. image:: https://img.shields.io/github/actions/workflow/status/litestar-org/advanced-alchemy/docs.yml?labelColor=202235&logo=github&logoColor=edb641&label=Docs%20Build :alt: GitHub Actions Docs Build Workflow Status .. image:: https://img.shields.io/codecov/c/github/litestar-org/advanced-alchemy?labelColor=202235&logo=codecov&logoColor=edb641&label=Coverage :alt: Coverage .. image:: https://img.shields.io/pypi/v/advanced-alchemy?labelColor=202235&color=edb641&logo=python&logoColor=edb641 :alt: PyPI Version .. image:: https://img.shields.io/pypi/dm/advanced-alchemy?logo=python&label=advanced-alchemy%20downloads&labelColor=202235&color=edb641&logoColor=edb641 :alt: PyPI Downloads .. image:: https://img.shields.io/pypi/pyversions/advanced-alchemy?labelColor=202235&color=edb641&logo=python&logoColor=edb641 :alt: Supported Python Versions .. rst-class:: lead Advanced Alchemy is a carefully crafted, thoroughly tested, optimized companion library for :doc:`SQLAlchemy `. It provides :doc:`base classes `, :doc:`mixins `, :doc:`custom column types `, and implementations of the :doc:`repository ` and :doc:`service layer ` patterns to simplify your database operations. .. container:: buttons :doc:`Get Started ` `Usage Docs `_ `API Docs `_ .. grid:: 1 1 2 2 :padding: 0 :gutter: 2 .. grid-item-card:: :octicon:`versions` Changelog :link: changelog :link-type: doc The latest updates and enhancements to Advanced-Alchemy .. grid-item-card:: :octicon:`comment-discussion` Discussions :link: https://github.com/litestar-org/advanced-alchemy/discussions Join discussions, pose questions, or share insights. .. grid-item-card:: :octicon:`issue-opened` Issues :link: https://github.com/litestar-org/advanced-alchemy/issues Report issues or suggest new features. .. grid-item-card:: :octicon:`beaker` Contributing :link: contribution-guide :link-type: doc Contribute to Advanced Alchemy's growth with code, docs, and more. .. _sponsor-github: https://github.com/sponsors/litestar-org .. _sponsor-oc: https://opencollective.com/litestar .. _sponsor-polar: https://polar.sh/litestar-org .. toctree:: :titlesonly: :caption: Documentation :hidden: getting-started usage/index reference/index .. toctree:: :titlesonly: :caption: Contributing :hidden: changelog contribution-guide Available Issues Code of Conduct python-advanced-alchemy-1.0.1/docs/reference/000077500000000000000000000000001476663714600211275ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/alembic/000077500000000000000000000000001476663714600225235ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/alembic/commands.rst000066400000000000000000000001341476663714600250540ustar00rootroot00000000000000======== commands ======== .. automodule:: advanced_alchemy.alembic.commands :members: python-advanced-alchemy-1.0.1/docs/reference/alembic/index.rst000066400000000000000000000003751476663714600243710ustar00rootroot00000000000000======= alembic ======= API Reference for the ``Alembic`` module .. note:: Private methods and attributes are not included in the API reference. Available API References ------------------------ .. toctree:: :titlesonly: commands utils python-advanced-alchemy-1.0.1/docs/reference/alembic/utils.rst000066400000000000000000000001201476663714600244060ustar00rootroot00000000000000===== utils ===== .. automodule:: advanced_alchemy.alembic.utils :members: python-advanced-alchemy-1.0.1/docs/reference/base.rst000066400000000000000000000003721476663714600225750ustar00rootroot00000000000000==== base ==== .. automodule:: advanced_alchemy.base :members: :imported-members: :undoc-members: :show-inheritance: :no-index: advanced_alchemy.base.AdvancedDeclarativeBase.registry advanced_alchemy.base.BasicAttributes.to_dict python-advanced-alchemy-1.0.1/docs/reference/config/000077500000000000000000000000001476663714600223745ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/config/asyncio.rst000066400000000000000000000002311476663714600245670ustar00rootroot00000000000000======= asyncio ======= .. automodule:: advanced_alchemy.config.asyncio :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/config/common.rst000066400000000000000000000002251476663714600244150ustar00rootroot00000000000000====== common ====== .. automodule:: advanced_alchemy.config.common :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/config/engine.rst000066400000000000000000000002251476663714600243720ustar00rootroot00000000000000====== engine ====== .. automodule:: advanced_alchemy.config.engine :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/config/index.rst000066400000000000000000000004271476663714600242400ustar00rootroot00000000000000====== config ====== API Reference for the ``Config`` module .. note:: Private methods and attributes are not included in the API reference. Available API References ------------------------ .. toctree:: :titlesonly: asyncio common engine sync types python-advanced-alchemy-1.0.1/docs/reference/config/sync.rst000066400000000000000000000002151476663714600241000ustar00rootroot00000000000000==== sync ==== .. automodule:: advanced_alchemy.config.sync :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/config/types.rst000066400000000000000000000002211476663714600242650ustar00rootroot00000000000000===== types ===== .. automodule:: advanced_alchemy.config.types :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/exceptions.rst000066400000000000000000000002201476663714600240340ustar00rootroot00000000000000========== exceptions ========== .. automodule:: advanced_alchemy.exceptions :members: :no-index: ErrorMessages :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/extensions/000077500000000000000000000000001476663714600233265ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/extensions/fastapi/000077500000000000000000000000001476663714600247555ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/extensions/fastapi/index.rst000066400000000000000000000015061476663714600266200ustar00rootroot00000000000000======= fastapi ======= API Reference for the ``FastAPI`` extensions module .. note:: Private methods and attributes are not included in the API reference. ------------------------ .. automodule:: advanced_alchemy.extensions.fastapi :imported-members: advanced_alchemy.utils advanced_alchemy.base advanced_alchemy.exceptions advanced_alchemy.filters advanced_alchemy.mixins advanced_alchemy.operations advanced_alchemy.repository advanced_alchemy.service advanced_alchemy.types advanced_alchemy.alembic.commands.AlembicCommands advanced_alchemy.extensions.starlette.EngineConfig advanced_alchemy.extensions.starlette.SQLAlchemyAsyncConfig advanced_alchemy.extensions.starlette.SQLAlchemySyncConfig :members: :noindex: python-advanced-alchemy-1.0.1/docs/reference/extensions/flask/000077500000000000000000000000001476663714600244265ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/extensions/flask/index.rst000066400000000000000000000014641476663714600262740ustar00rootroot00000000000000===== flask ===== API Reference for the ``Flask`` extensions module .. note:: Private methods and attributes are not included in the API reference. ------------------------ .. automodule:: advanced_alchemy.extensions.flask :imported-members: advanced_alchemy.utils advanced_alchemy.base advanced_alchemy.exceptions advanced_alchemy.filters advanced_alchemy.mixins advanced_alchemy.operations advanced_alchemy.repository advanced_alchemy.service advanced_alchemy.types advanced_alchemy.alembic.commands.AlembicCommands :members: AlembicAsyncConfig AlembicSyncConfig AsyncSessionConfig EngineConfig SQLAlchemyAsyncConfig SQLAlchemySyncConfig SyncSessionConfig :noindex: python-advanced-alchemy-1.0.1/docs/reference/extensions/index.rst000066400000000000000000000005131476663714600251660ustar00rootroot00000000000000========== Extensions ========== API Reference for the ``Extensions`` module .. note:: Private methods and attributes are not included in the API reference. Available API References ------------------------ .. toctree:: :titlesonly: litestar/index flask/index sanic/index starlette/index fastapi/index python-advanced-alchemy-1.0.1/docs/reference/extensions/litestar/000077500000000000000000000000001476663714600251555ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/extensions/litestar/cli.rst000066400000000000000000000001241476663714600264530ustar00rootroot00000000000000=== cli === .. automodule:: advanced_alchemy.extensions.litestar.cli :members: python-advanced-alchemy-1.0.1/docs/reference/extensions/litestar/dto.rst000066400000000000000000000002751476663714600265010ustar00rootroot00000000000000=== dto === .. automodule:: advanced_alchemy.extensions.litestar.dto :members: :exclude-members: ModelDTOT ImproperConfigurationError DTOFieldDefinition ImproperConfigurationError python-advanced-alchemy-1.0.1/docs/reference/extensions/litestar/index.rst000066400000000000000000000031711476663714600270200ustar00rootroot00000000000000======== litestar ======== API Reference for the ``Litestar`` extensions module .. note:: Private methods and attributes are not included in the API reference. ------------------------ .. automodule:: advanced_alchemy.extensions.litestar :imported-members: advanced_alchemy.utils advanced_alchemy.base advanced_alchemy.exceptions advanced_alchemy.filters advanced_alchemy.mixins advanced_alchemy.operations advanced_alchemy.repository advanced_alchemy.service advanced_alchemy.types advanced_alchemy.alembic.commands.AlembicCommands advanced_alchemy.extensions.litestar.plugins.init.plugin.SQLAlchemyPlugin advanced_alchemy.extensions.litestar.plugins.init.plugin.SQLAlchemySerializationPlugin advanced_alchemy.extensions.litestar.dto.SQLAlchemyDTO advanced_alchemy.extensions.litestar.dto.SQLAlchemyDTOConfig :members: AlembicAsyncConfig AlembicSyncConfig AsyncSessionConfig EngineConfig SQLAlchemyAsyncConfig SQLAlchemyInitPlugin SQLAlchemyPlugin SQLAlchemySerializationPlugin SQLAlchemySyncConfig SyncSessionConfig async_autocommit_before_send_handler async_autocommit_handler_maker async_default_before_send_handler async_default_handler_maker sync_autocommit_before_send_handler sync_autocommit_handler_maker sync_default_before_send_handler sync_default_handler_maker Additional API References ------------------------- .. toctree:: :titlesonly: dto plugins cli python-advanced-alchemy-1.0.1/docs/reference/extensions/litestar/plugins.rst000066400000000000000000000003001476663714600273610ustar00rootroot00000000000000====== plugin ====== .. automodule:: advanced_alchemy.extensions.litestar.plugins :members: :exclude-members: :no-index: EngineConfig, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig python-advanced-alchemy-1.0.1/docs/reference/extensions/sanic/000077500000000000000000000000001476663714600244235ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/extensions/sanic/index.rst000066400000000000000000000014641476663714600262710ustar00rootroot00000000000000===== sanic ===== API Reference for the ``Sanic`` extensions module .. note:: Private methods and attributes are not included in the API reference. ------------------------ .. automodule:: advanced_alchemy.extensions.sanic :imported-members: advanced_alchemy.utils advanced_alchemy.base advanced_alchemy.exceptions advanced_alchemy.filters advanced_alchemy.mixins advanced_alchemy.operations advanced_alchemy.repository advanced_alchemy.service advanced_alchemy.types advanced_alchemy.alembic.commands.AlembicCommands :members: AlembicAsyncConfig AlembicSyncConfig AsyncSessionConfig EngineConfig SQLAlchemyAsyncConfig SQLAlchemySyncConfig SyncSessionConfig :noindex: python-advanced-alchemy-1.0.1/docs/reference/extensions/starlette/000077500000000000000000000000001476663714600253355ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/extensions/starlette/index.rst000066400000000000000000000015101476663714600271730ustar00rootroot00000000000000========= starlette ========= API Reference for the ``Starlette`` extensions module .. note:: Private methods and attributes are not included in the API reference. ------------------------ .. automodule:: advanced_alchemy.extensions.starlette :imported-members: advanced_alchemy.utils advanced_alchemy.base advanced_alchemy.exceptions advanced_alchemy.filters advanced_alchemy.mixins advanced_alchemy.operations advanced_alchemy.repository advanced_alchemy.service advanced_alchemy.types advanced_alchemy.alembic.commands.AlembicCommands :members: AlembicAsyncConfig AlembicSyncConfig AsyncSessionConfig EngineConfig SQLAlchemyAsyncConfig SQLAlchemySyncConfig SyncSessionConfig :noindex: python-advanced-alchemy-1.0.1/docs/reference/filters.rst000066400000000000000000000002071476663714600233300ustar00rootroot00000000000000======= filters ======= .. automodule:: advanced_alchemy.filters :members: :undoc-members: FilterTypes :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/index.rst000066400000000000000000000013471476663714600227750ustar00rootroot00000000000000============= API Reference ============= The API reference provides detailed documentation for all public classes, functions, and modules in Advanced Alchemy. Each section includes complete type information, usage examples, and links to related documentation. Core Components --------------- .. toctree:: :maxdepth: 2 :caption: Core API base mixins/index config/index operations types exceptions utils Repository & Services --------------------- .. toctree:: :maxdepth: 2 :caption: Repository & Services repository filters service Framework Integration --------------------- .. toctree:: :maxdepth: 2 :caption: Framework Support extensions/index alembic/index python-advanced-alchemy-1.0.1/docs/reference/mixins/000077500000000000000000000000001476663714600224365ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/reference/mixins/audit.rst000066400000000000000000000001211476663714600242700ustar00rootroot00000000000000====== audit ====== .. automodule:: advanced_alchemy.mixins.audit :members: python-advanced-alchemy-1.0.1/docs/reference/mixins/bigint.rst000066400000000000000000000001231476663714600244400ustar00rootroot00000000000000====== bigint ====== .. automodule:: advanced_alchemy.mixins.bigint :members: python-advanced-alchemy-1.0.1/docs/reference/mixins/index.rst000066400000000000000000000004301476663714600242740ustar00rootroot00000000000000====== mixins ====== Mixins that provide common columns and behavior for SQLAlchemy models. .. note:: Private methods and attributes are not included in the API reference. .. toctree:: :titlesonly: unique nanoid uuid sentinel bigint audit slug python-advanced-alchemy-1.0.1/docs/reference/mixins/nanoid.rst000066400000000000000000000001231476663714600244340ustar00rootroot00000000000000====== nanoid ====== .. automodule:: advanced_alchemy.mixins.nanoid :members: python-advanced-alchemy-1.0.1/docs/reference/mixins/sentinel.rst000066400000000000000000000001331476663714600250060ustar00rootroot00000000000000======== sentinel ======== .. automodule:: advanced_alchemy.mixins.sentinel :members: python-advanced-alchemy-1.0.1/docs/reference/mixins/slug.rst000066400000000000000000000001131476663714600241350ustar00rootroot00000000000000==== slug ==== .. automodule:: advanced_alchemy.mixins.slug :members: python-advanced-alchemy-1.0.1/docs/reference/mixins/unique.rst000066400000000000000000000001231476663714600244720ustar00rootroot00000000000000====== unique ====== .. automodule:: advanced_alchemy.mixins.unique :members: python-advanced-alchemy-1.0.1/docs/reference/mixins/uuid.rst000066400000000000000000000001131476663714600241310ustar00rootroot00000000000000==== uuid ==== .. automodule:: advanced_alchemy.mixins.uuid :members: python-advanced-alchemy-1.0.1/docs/reference/operations.rst000066400000000000000000000002361476663714600240450ustar00rootroot00000000000000========== operations ========== .. automodule:: advanced_alchemy.operations :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/repository.rst000066400000000000000000000002441476663714600241000ustar00rootroot00000000000000============ repositories ============ .. automodule:: advanced_alchemy.repository :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/service.rst000066400000000000000000000002261476663714600233210ustar00rootroot00000000000000======== services ======== .. automodule:: advanced_alchemy.service :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/types.rst000066400000000000000000000002121476663714600230200ustar00rootroot00000000000000===== types ===== .. automodule:: advanced_alchemy.types :members: :imported-members: :undoc-members: :show-inheritance: python-advanced-alchemy-1.0.1/docs/reference/utils.rst000066400000000000000000000003011476663714600230130ustar00rootroot00000000000000===== utils ===== .. automodule:: advanced_alchemy.utils :members: :imported-members: :undoc-members: :show-inheritance: :noindex: FilterableRepositoryProtocol.model_type python-advanced-alchemy-1.0.1/docs/releases.rst000066400000000000000000000053421476663714600215320ustar00rootroot00000000000000:orphan: ========================= Advanced Alchemy Releases ========================= Version Numbering ----------------- This library follows the `Semantic Versioning standard `_, using the ``..`` schema: **Major** Backwards incompatible changes have been made **Minor** Functionality was added in a backwards compatible manner **Patch** Bugfixes were applied in a backwards compatible manner Pre-release Versions ++++++++++++++++++++ Before a new major release, we will make ``alpha``, ``beta``, and release candidate (``rc``) releases, numbered as ``..``. For example, ``2.0.0alpha1``, ``2.0.0beta1``, ``2.0.0rc1``. - ``alpha`` Early developer preview. Features may not be complete and breaking changes can occur. - ``beta`` More stable preview release. Feature complete, no major breaking changes expected. - ``rc`` Release candidate. Feature freeze, only bugfixes until final release. Suitable for testing migration to the upcoming major release. Long-term Support Releases (LTS) -------------------------------- Major releases are designated as LTS releases for the life of that major release series. These releases will receive bugfixes for a guaranteed period of time as defined in `Supported Versions <#supported-versions>`_. Deprecation Policy ------------------ When a feature is going to be removed, a deprecation warning will be added in a **minor** release. The feature will continue to work for all releases in that major series, and will be removed in the next major release. For example, if a deprecation warning is added in ``1.1``, the feature will work throughout all ``1.x`` releases, and be removed in ``2.0``. Supported Versions ------------------ At any time, the Litestar organization will actively support: - The current major release series - The previous major release series - Any other designated LTS releases (Special cases) For example, if the current release is ``2.0``, we will actively support ``2.x`` and ``1.x``. When ``3.0`` is released, we will drop support for ``1.x``. Bugfixes will be applied to the current major release, and selectively backported to older supported versions based on severity and feasibility. Release Process --------------- Each major release cycle consists of a few phases: #. **Planning**: Define roadmap, spec out major features. Work should begin on implementation. #. **Development**: Active development on planned features. Ends with an alpha release and branch of ``A.B.x`` branch from `main`. #. **Bugfixes**: Only bugfixes, no new features. Progressively release beta, release candidates. Feature freeze at RC. Become more selective with backports to avoid regressions. python-advanced-alchemy-1.0.1/docs/usage/000077500000000000000000000000001476663714600202755ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/usage/cli.rst000066400000000000000000000200511476663714600215740ustar00rootroot00000000000000================= Command Line Tool ================= Advanced Alchemy provides a command-line interface (CLI) for common database operations and project management tasks. Installation ------------ The CLI is installed with Advanced Alchemy with the extra ``cli``: .. tab-set:: .. tab-item:: pip :sync: key1 .. code-block:: bash :caption: Using pip python3 -m pip install advanced-alchemy[cli] .. tab-item:: uv .. code-block:: bash :caption: Using `UV `_ uv add advanced-alchemy[cli] .. tab-item:: pipx :sync: key2 .. code-block:: bash :caption: Using `pipx `_ pipx install advanced-alchemy[cli] .. tab-item:: pdm .. code-block:: bash :caption: Using `PDM `_ pdm add advanced-alchemy[cli] .. tab-item:: Poetry .. code-block:: bash :caption: Using `Poetry `_ poetry add advanced-alchemy[cli] Basic Usage ----------- The CLI can be invoked using the ``alchemy`` command: .. code-block:: bash alchemy --help Global Options -------------- The following options are available for all commands: .. list-table:: Global options :header-rows: 1 :widths: 20 80 * - Option - Explanation * - ``--config`` TEXT - **Required**. Dotted path to SQLAlchemy config(s), it's an instance of ``SQLAlchemyConfig`` (sync or async). Example: ``--config path.to.alchemy-config.config`` * - ``--bind-key`` TEXT - Optional. Specify which SQLAlchemy config to use * - ``--no-prompt`` - Optional. Skip confirmation prompts * - ``--verbose`` - Optional. Enable verbose output Config ------ Here is an example of what **config** looks like. If the file is named ``alchemy-config.py``, you would need to use it like this ``--config path.to.alchemy-config.config`` .. code-block:: python :caption: alchemy-config.py from sqlalchemy import create_engine from advanced_alchemy.config import SQLAlchemyConfig # Create a test config using SQLite config = SQLAlchemyConfig( connection_url="sqlite:///test.db" ) Available Commands ------------------ show-current-revision ~~~~~~~~~~~~~~~~~~~~~ Show the current revision of the database: .. code-block:: bash alchemy show-current-revision --config path.to.alchemy-config.config downgrade ~~~~~~~~~ Downgrade database to a specific revision: .. code-block:: bash alchemy downgrade --config path.to.alchemy-config.config [REVISION] .. list-table:: Options :header-rows: 1 :widths: 20 80 * - Option - Explanation * - ``--sql`` - Generate SQL output for offline migrations * - ``--tag`` TEXT - Arbitrary tag for custom env.py scripts * - ``REVISION`` - Target revision (default: "-1") upgrade ~~~~~~~ Upgrade database to a specific revision: .. code-block:: bash alchemy upgrade --config path.to.alchemy-config.config [REVISION] .. list-table:: Options :header-rows: 1 :widths: 20 80 * - Option - Explanation * - ``--sql`` - Generate SQL output for offline migrations * - ``--tag`` TEXT - Arbitrary tag for custom env.py scripts * - ``REVISION`` - Target revision (default: "head") init ~~~~ Initialize migrations for the project: .. code-block:: bash alchemy init --config path.to.alchemy-config.config [DIRECTORY] .. list-table:: Options :header-rows: 1 :widths: 20 80 * - Option - Explanation * - ``--multidb`` - Support multiple databases * - ``--package`` - Create __init__.py for created folder (default: True) * - ``DIRECTORY`` - Directory for migration files (optional) make-migrations ~~~~~~~~~~~~~~~ Create a new migration revision: .. code-block:: bash alchemy make-migrations --config path.to.alchemy-config.config .. list-table:: Options :header-rows: 1 :widths: 30 70 * - Option - Explanation * - ``-m``, ``--message`` TEXT - Revision message * - ``--autogenerate``/ ``--no-autogenerate`` - Automatically detect changes (default: True) * - ``--sql`` - Export to .sql instead of writing to database * - ``--head`` TEXT - Base revision for new revision (default: "head") * - ``--splice`` - Allow non-head revision as the "head" * - ``--branch-label`` TEXT - Branch label for new revision * - ``--version-path`` TEXT - Specific path for version file * - ``--rev-id`` TEXT - Specific revision ID drop-all ~~~~~~~~ Drop all tables from the database: .. code-block:: bash alchemy drop-all --config path.to.alchemy-config.config dump-data ~~~~~~~~~ Dump specified tables from the database to JSON files: .. code-block:: bash alchemy dump-data --config path.to.alchemy-config.config --table TABLE_NAME .. list-table:: Options :header-rows: 1 :widths: 20 80 * - Option - Explanation * - ``--table`` TEXT - Name of table to dump (use '*' for all tables) * - ``--dir`` PATH - Directory to save JSON files (default: ./fixtures) Extending the CLI ----------------- If you're using Click in your project, you can extend Advanced Alchemy's CLI with your own commands. The CLI provides two main functions for integration: - ``get_alchemy_group()``: Get the base CLI group - ``add_migration_commands()``: Add migration-related commands to a group Basic Extension ~~~~~~~~~~~~~~~ Here's how to extend the CLI with your own commands: .. code-block:: python from advanced_alchemy.cli import get_alchemy_group, add_migration_commands import click # Get the base group alchemy_group = get_alchemy_group() # Add your custom commands @alchemy_group.command(name="my-command") @click.option("--my-option", help="Custom option") def my_command(my_option): """My custom command.""" click.echo(f"Running my command with option: {my_option}") # Add migration commands to your group add_migration_commands(alchemy_group) Custom Group Integration ~~~~~~~~~~~~~~~~~~~~~~~~ You can also integrate Advanced Alchemy's commands into your existing Click group: .. code-block:: python import click from advanced_alchemy.cli import add_migration_commands @click.group() def cli(): """My application CLI.""" pass # Add migration commands to your CLI group add_migration_commands(cli) @cli.command() def my_command(): """Custom command in your CLI.""" pass if __name__ == "__main__": cli() Typer integration ----------------- You can integrate Advanced Alchemy's CLI commands into your existing ``Typer`` application. Here's how: .. code-block:: python :caption: cli.py import typer from advanced_alchemy.cli import get_alchemy_group, add_migration_commands app = typer.Typer() @app.command() def hello(name: str) -> None: """Says hello to the world.""" typer.echo(f"Hello {name}") @app.callback() def callback(): """ Typer app, including Click subapp """ pass def create_cli() -> typer.Typer: """Create the CLI application with both Typer and Click commands.""" # Get the Click group from advanced_alchemy alchemy_group = get_alchemy_group() # Convert our Typer app to a Click command object typer_click_object = typer.main.get_command(app) # Add all migration commands from the alchemy group to our CLI typer_click_object.add_command(add_migration_commands(alchemy_group)) return typer_click_object if __name__ == "__main__": cli = create_cli() cli() After setting up the integration, you can use both your ``Typer`` commands and Advanced Alchemy commands: .. code-block:: bash # Use your Typer commands python cli.py hello Cody # Use Advanced Alchemy commands python cli.py alchemy upgrade --config path.to.config python cli.py alchemy make-migrations --config path.to.config python-advanced-alchemy-1.0.1/docs/usage/database_seeding.rst000066400000000000000000000415751476663714600243050ustar00rootroot00000000000000==================================== Database Seeding and Fixture Loading ==================================== Advanced Alchemy provides utilities for seeding your database with initial data through JSON fixtures. This documentation will show you how to create and load fixtures in both synchronous and asynchronous applications. Creating Fixtures ----------------- Fixtures in Advanced Alchemy are simple JSON files that contain the data you want to seed. Each fixture file should: 1. Contain a JSON object or a JSON array of objects, where each object represents a row in your database table 2. Include all required fields for your model **Example Fixture:** .. code-block:: json :caption: fixtures/products.json [ { "name": "Laptop", "description": "High-performance laptop with 16GB RAM and 1TB SSD", "price": 999.99, "in_stock": true }, { "name": "Smartphone", "description": "Latest smartphone model with 5G and advanced camera", "price": 699.99, "in_stock": true }, { "name": "Headphones", "description": "Noise-cancelling wireless headphones with 30-hour battery life", "price": 199.99, "in_stock": true }, { "name": "Smartwatch", "description": "Fitness tracker with heart rate monitor and GPS", "price": 149.99, "in_stock": false }, { "name": "Tablet", "description": "10-inch tablet with high-resolution display", "price": 349.99, "in_stock": true } ] Loading Fixtures ---------------- Advanced Alchemy provides both synchronous and asynchronous functions for loading fixtures: Synchronous Loading ~~~~~~~~~~~~~~~~~~~ .. code-block:: python from pathlib import Path from sqlalchemy import String, create_engine from sqlalchemy.orm import Mapped, mapped_column from advanced_alchemy.base import UUIDBase from advanced_alchemy.config import SQLAlchemySyncConfig, SyncSessionConfig from advanced_alchemy.repository import SQLAlchemySyncRepository from advanced_alchemy.utils.fixtures import open_fixture # Database connection string DATABASE_URL = "sqlite:///db.sqlite3" config = SQLAlchemySyncConfig( engine_instance=create_engine(DATABASE_URL), session_config=SyncSessionConfig(expire_on_commit=False) ) class Product(UUIDBase): """Product model.""" __tablename__ = "products" name: Mapped[str] = mapped_column(String(length=100)) description: Mapped[str | None] = mapped_column(String(length=500)) price: Mapped[float] in_stock: Mapped[bool] = mapped_column(default=True) # Repository class ProductRepository(SQLAlchemySyncRepository[Product]): """Product repository.""" model_type = Product # Set up fixtures path fixtures_path = Path(__file__).parent / "fixtures" def initialize_database(): """Initialize the database and create tables.""" print("Creating database tables...") with config.get_engine().begin() as conn: UUIDBase.metadata.create_all(conn) print("Tables created successfully") def seed_database(): """Seed the database with fixture data.""" print("Seeding database...") # Create a session with config.get_session() as session: # Create repository for product model product_repo = ProductRepository(session=session) # Load and add product data try: print(f"Attempting to load fixtures from {fixtures_path}/product.json") product_data = open_fixture(fixtures_path, "product") print(f"Loaded {len(product_data)} products from fixture") product_repo.add_many([Product(**item) for item in product_data]) session.commit() except FileNotFoundError: print(f"Could not find fixture file at {fixtures_path}/product.json") if __name__ == "__main__": # Initialize the database initialize_database() # Seed the database seed_database() Asynchronous Loading ~~~~~~~~~~~~~~~~~~~~ .. code-block:: python import asyncio from pathlib import Path from typing import Optional from sqlalchemy import String from sqlalchemy.ext.asyncio import create_async_engine from sqlalchemy.orm import Mapped, mapped_column from advanced_alchemy.base import UUIDBase from advanced_alchemy.config import AsyncSessionConfig, SQLAlchemyAsyncConfig from advanced_alchemy.repository import SQLAlchemyAsyncRepository from advanced_alchemy.utils.fixtures import open_fixture_async # Database connection string DATABASE_URL = "sqlite+aiosqlite:///db.sqlite3" config = SQLAlchemyAsyncConfig( engine_instance=create_async_engine(DATABASE_URL), session_config=AsyncSessionConfig(expire_on_commit=False) ) class Product(UUIDBase): """Product model.""" __tablename__ = "products" name: Mapped[str] = mapped_column(String(length=100)) description: Mapped[Optional[str]] = mapped_column(String(length=500)) price: Mapped[float] in_stock: Mapped[bool] = mapped_column(default=True) # Repository class ProductRepository(SQLAlchemyAsyncRepository[Product]): """Product repository.""" model_type = Product # Set up fixtures path fixtures_path = Path(__file__).parent / "fixtures" async def initialize_database(): """Initialize the database and create tables.""" print("Creating database tables...") async with config.get_engine().begin() as conn: await conn.run_sync(UUIDBase.metadata.create_all) print("Tables created successfully") async def seed_database(): """Seed the database with fixture data.""" print("Seeding database...") # Create a session async with config.get_session() as session: # Create repository for product model product_repo = ProductRepository(session=session) # Load and add product data try: print(f"Attempting to load fixtures from {fixtures_path}/product.json") product_data = await open_fixture_async(fixtures_path, "product") print(f"Loaded {len(product_data)} products from fixture") await product_repo.add_many([Product(**item) for item in product_data]) await session.commit() except FileNotFoundError: print(f"Could not find fixture file at {fixtures_path}/product.json") async def main(): """Main async function to run the example.""" # Initialize the database await initialize_database() # Seed the database await seed_database() if __name__ == "__main__": # Run the async main function asyncio.run(main()) Integration with Web Frameworks ------------------------------- Litestar ~~~~~~~~ .. code-block:: python from pathlib import Path from typing import Optional import uvicorn from litestar import Litestar from sqlalchemy import String from sqlalchemy.orm import Mapped, mapped_column from advanced_alchemy.base import UUIDBase from advanced_alchemy.extensions.litestar import ( AsyncSessionConfig, SQLAlchemyAsyncConfig, SQLAlchemyPlugin, ) from advanced_alchemy.repository import SQLAlchemyAsyncRepository from advanced_alchemy.utils.fixtures import open_fixture_async # Database connection string DATABASE_URL = "sqlite+aiosqlite:///db.sqlite3" # Set up fixtures path fixtures_path = Path(__file__).parent / "fixtures" session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string=DATABASE_URL, before_send_handler="autocommit", session_config=session_config, create_all=True, ) alchemy = SQLAlchemyPlugin(config=sqlalchemy_config) class Product(UUIDBase): """Product model.""" __tablename__ = "products" name: Mapped[str] = mapped_column(String(length=100)) description: Mapped[Optional[str]] = mapped_column(String(length=500)) price: Mapped[float] in_stock: Mapped[bool] = mapped_column(default=True) # Repository class ProductRepository(SQLAlchemyAsyncRepository[Product]): """Product repository.""" model_type = Product # Startup function to seed the database async def on_startup() -> None: """Seed the database during application startup.""" print("Running startup routine...") # Create a session and seed data async with sqlalchemy_config.get_session() as session: # Create repository for product model product_repo = ProductRepository(session=session) # Load and add product data try: print(f"Attempting to load fixtures from {fixtures_path}/product.json") product_data = await open_fixture_async(fixtures_path, "product") print(f"Loaded {len(product_data)} products from fixture") await product_repo.add_many([Product(**item) for item in product_data]) await session.commit() except FileNotFoundError: print(f"Could not find fixture file at {fixtures_path}/product.json") # Verify data was added products = await product_repo.list() print(f"Database seeded with {len(products)} products") # Create the Litestar application app = Litestar( on_startup=[on_startup], plugins=[alchemy], ) if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=8000) FastAPI ~~~~~~~ .. code-block:: python from contextlib import asynccontextmanager from pathlib import Path from typing import Optional import uvicorn from fastapi import FastAPI from sqlalchemy import String from sqlalchemy.orm import Mapped, mapped_column from advanced_alchemy.base import UUIDBase from advanced_alchemy.extensions.fastapi import ( AdvancedAlchemy, AsyncSessionConfig, SQLAlchemyAsyncConfig, ) from advanced_alchemy.repository import SQLAlchemyAsyncRepository from advanced_alchemy.utils.fixtures import open_fixture_async # Database connection string DATABASE_URL = "sqlite+aiosqlite:///db.sqlite3" # Set up fixtures path fixtures_path = Path(__file__).parent / "fixtures" class Product(UUIDBase): """Product model.""" __tablename__ = "products" name: Mapped[str] = mapped_column(String(length=100)) description: Mapped[Optional[str]] = mapped_column(String(length=500)) price: Mapped[float] in_stock: Mapped[bool] = mapped_column(default=True) # Repository class ProductRepository(SQLAlchemyAsyncRepository[Product]): """Product repository.""" model_type = Product # Lifespan context manager @asynccontextmanager async def lifespan(app: FastAPI): """Handle startup and shutdown events.""" # Startup: Initialize database and seed data print("Running startup routine...") # Create a session and seed data async with sqlalchemy_config.get_session() as session: # Create repository for product model product_repo = ProductRepository(session=session) # Load and add product data try: print(f"Attempting to load fixtures from {fixtures_path}/product.json") product_data = await open_fixture_async(fixtures_path, "product") print(f"Loaded {len(product_data)} products from fixture") await product_repo.add_many([Product(**item) for item in product_data]) await session.commit() except FileNotFoundError: print(f"Could not find fixture file at {fixtures_path}/product.json") # Verify data was added products = await product_repo.list() print(f"Database seeded with {len(products)} products") # Yield control back to FastAPI yield # Shutdown: Clean up resources if needed # This section runs when the application is shutting down print("Shutting down...") session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string=DATABASE_URL, commit_mode="autocommit", session_config=session_config, create_all=True, ) # Create the FastAPI application with lifespan app = FastAPI(lifespan=lifespan) alchemy = AdvancedAlchemy(config=sqlalchemy_config, app=app) if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=8000) Flask ~~~~~ .. code-block:: python from pathlib import Path from typing import Optional from flask import Flask from sqlalchemy import String from sqlalchemy.orm import Mapped, mapped_column from advanced_alchemy.base import UUIDBase from advanced_alchemy.extensions.flask import ( AdvancedAlchemy, SQLAlchemySyncConfig, SyncSessionConfig, ) from advanced_alchemy.repository import SQLAlchemySyncRepository from advanced_alchemy.utils.fixtures import open_fixture # Database connection string DATABASE_URL = "sqlite:///db.sqlite3" # Set up fixtures path fixtures_path = Path(__file__).parent / "fixtures" class Product(UUIDBase): """Product model.""" __tablename__ = "products" name: Mapped[str] = mapped_column(String(length=100)) description: Mapped[Optional[str]] = mapped_column(String(length=500)) price: Mapped[float] in_stock: Mapped[bool] = mapped_column(default=True) # Repository class ProductRepository(SQLAlchemySyncRepository[Product]): """Product repository.""" model_type = Product app = Flask(__name__) sqlalchemy_config = SQLAlchemySyncConfig( connection_string=DATABASE_URL, commit_mode="autocommit", session_config=SyncSessionConfig( expire_on_commit=False, ), create_all=True ) db = AdvancedAlchemy(config=sqlalchemy_config) db.init_app(app) with app.app_context(): # noqa: SIM117 # Seed data with db.get_session() as session: product_repo = ProductRepository(session=session) # Load and add product data try: print(f"Attempting to load fixtures from {fixtures_path}/product.json") product_data = open_fixture(fixtures_path, "product") print(f"Loaded {len(product_data)} products from fixture") product_repo.add_many([Product(**item) for item in product_data]) session.commit() except FileNotFoundError: print(f"Could not find fixture file at {fixtures_path}/product.json") # Verify data was added products = product_repo.list() print(f"Database seeded with {len(products)} products") if __name__ == "__main__": app.run(host="0.0.0.0", port=5000) Best Practices -------------- 1. **Directory Structure**: Keep your fixtures in a dedicated directory (e.g., ``fixtures/``). 2. **Naming Convention**: Name your fixture files after the corresponding table names. 3. **Idempotent Seeding**: Always check if data exists before seeding to avoid duplicates or update records. 4. **Dependencies**: Seed tables in the correct order to respect foreign key constraints. 5. **Data Validation**: Ensure your fixture data meets your model's constraints. 6. **Environment Separation**: Consider having different fixtures for development, testing, and production. 7. **Version Control**: Keep your fixtures under version control with your application code. Tips for Efficient Seeding -------------------------- - Use :func:`add_many (async) ` / :func:`add_many (sync) ` instead of adding objects one by one for better performance. - Use :func:`upsert_many (async) ` / :func:`upsert_many (sync) ` to update your data if you are updating prices for example. - You can use the database seeding from your cli, app startup or any route. - For large datasets, consider chunking the data into smaller batches. - When dealing with relationships, seed parent records before child records. - Consider using factory libraries like `Polyfactory `__ for generating test data. python-advanced-alchemy-1.0.1/docs/usage/frameworks/000077500000000000000000000000001476663714600224555ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/docs/usage/frameworks/fastapi.rst000066400000000000000000000124561476663714600246460ustar00rootroot00000000000000=================== FastAPI Integration =================== Advanced Alchemy's repository and service patterns work well within FastAPI applications. Basic Setup ----------- Configure SQLAlchemy with FastAPI: .. code-block:: python from typing import AsyncGenerator from fastapi import FastAPI from advanced_alchemy.extensions.fastapi import AdvancedAlchemy, AsyncSessionConfig, SQLAlchemyAsyncConfig sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", session_config=AsyncSessionConfig(expire_on_commit=False), create_all=True, commit_mode="autocommit", ) app = FastAPI() alchemy = AdvancedAlchemy(config=sqlalchemy_config, app=app) Models and Schemas ------------------ Define your SQLAlchemy models and Pydantic schemas: .. code-block:: python import datetime from uuid import UUID from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship from pydantic import BaseModel as _BaseModel from advanced_alchemy.base import UUIDAuditBase, UUIDBase class BaseModel(_BaseModel): """Extend Pydantic's BaseModel to enable ORM mode""" model_config = {"from_attributes": True} class AuthorModel(UUIDBase): __tablename__ = "author" name: Mapped[str] dob: Mapped[date | None] books: Mapped[list[BookModel]] = relationship(back_populates="author", lazy="noload") class BookModel(UUIDAuditBase): __tablename__ = "book" title: Mapped[str] author_id: Mapped[UUID] = mapped_column(ForeignKey("author.id")) author: Mapped[AuthorModel] = relationship(lazy="joined", innerjoin=True, viewonly=True) class Author(BaseModel): id: UUID | None name: str dob: datetime.date | None = None class AuthorCreate(BaseModel): name: str dob: datetime.date | None = None class AuthorUpdate(BaseModel): name: str | None = None dob: datetime.date | None = None Repository and Service ---------------------- Create repository and service classes: .. code-block:: python from typing import Annotated, AsyncGenerator, Optional from advanced_alchemy.extensions.fastapi import repository, service from fastapi import Depends from sqlalchemy.ext.asyncio import AsyncSession class AuthorService(service.SQLAlchemyAsyncRepositoryService[AuthorModel]): """Author service.""" class Repo(repository.SQLAlchemyAsyncRepository[AuthorModel]): """Author repository.""" model_type = AuthorModel repository_type = Repo Dependency Injection -------------------- Set up dependency injected into the request context. .. code-block:: python from fastapi import Request DatabaseSession = Annotated[AsyncSession, Depends(alchemy.provide_session())] Authors = Annotated[AuthorService, Depends(provide_authors_service)] async def provide_authors_service(db_session: DatabaseSession) -> AsyncGenerator[AuthorService, None]: """This provides the default Authors repository.""" async with AuthorService.new(session=db_session) as service: yield service Controllers ----------- Create controllers using the service: .. code-block:: python from fastapi import APIRouter, Depends from uuid import UUID from advanced_alchemy.extensions.fastapi import filters author_router = APIRouter() @author_router.get(path="/authors", response_model=filters.OffsetPagination[Author]) async def list_authors( authors_service: Authors, limit_offset: Annotated[filters.LimitOffset, Depends(provide_limit_offset_pagination)], ) -> filters.OffsetPagination[AuthorModel]: """List authors.""" results, total = await authors_service.list_and_count(limit_offset) return authors_service.to_schema(results, total, filters=[limit_offset]) @author_router.post(path="/authors", response_model=Author) async def create_author( authors_service: Authors, data: AuthorCreate, ) -> AuthorModel: """Create a new author.""" obj = await authors_service.create(data) return authors_service.to_schema(obj) @author_router.get(path="/authors/{author_id}", response_model=Author) async def get_author( authors_service: Authors, author_id: UUID, ) -> AuthorModel: """Get an existing author.""" obj = await authors_service.get(author_id) return authors_service.to_schema(obj) @author_router.patch(path="/authors/{author_id}", response_model=Author) async def update_author( authors_service: Authors, data: AuthorUpdate, author_id: UUID, ) -> AuthorModel: """Update an author.""" obj = await authors_service.update(data, item_id=author_id) return authors_service.to_schema(obj) @author_router.delete(path="/authors/{author_id}") async def delete_author( authors_service: Authors, author_id: UUID, ) -> None: """Delete an author from the system.""" _ = await authors_service.delete(author_id) Application Configuration ------------------------- Finally, configure your FastAPI application with the router: .. code-block:: python app.include_router(author_router) python-advanced-alchemy-1.0.1/docs/usage/frameworks/flask.rst000066400000000000000000000176641476663714600243250ustar00rootroot00000000000000Flask Integration ================= Advanced Alchemy provides seamless integration with Flask applications through its Flask extension. Installation ------------ The Flask extension is included with Advanced Alchemy by default. No additional installation is required. Basic Usage ----------- Here's a basic example of using Advanced Alchemy with Flask: .. code-block:: python from flask import Flask from sqlalchemy import select from advanced_alchemy.extensions.flask import ( AdvancedAlchemy, SQLAlchemySyncConfig, EngineConfig, ) app = Flask(__name__) db_config = SQLAlchemySyncConfig(connection_string="sqlite:///local.db", commit_mode="autocommit", create_all=True) alchemy = AdvancedAlchemy(db_config, app) # Use standard SQLAlchemy session in your routes @app.route("/users") def list_users(): session = alchemy.get_sync_session() users = session.execute(select(User)) return {"users": [user.dict() for user in users.scalars()]} Multiple Databases ------------------ Advanced Alchemy supports multiple database configurations: .. note:: The ``bind_key`` option is used to specify the database to use for a given session. When using multiple databases and you do not have at least one database with a ``bind_key`` of ``default``, and exception will be raised when calling ``db.get_session()`` without a bind key. This only applies when using multiple configuration. If you are using a single configuration, the engine will be returned even if the ``bind_key`` is not ``default``. .. code-block:: python configs = [ SQLAlchemySyncConfig(connection_string="sqlite:///users.db", bind_key="users"), SQLAlchemySyncConfig(connection_string="sqlite:///products.db", bind_key="products"), ] alchemy = AdvancedAlchemy(configs, app) # Get session for specific database users_session = alchemy.get_sync_session("users") products_session = alchemy.get_sync_session("products") Async Support ------------- Advanced Alchemy supports async SQLAlchemy with Flask: .. code-block:: python from advanced_alchemy.extensions.flask import ( AdvancedAlchemy, SQLAlchemyAsyncConfig, ) from sqlalchemy import select app = Flask(__name__) db_config = SQLAlchemyAsyncConfig(connection_string="postgresql+asyncpg://user:pass@localhost/db", create_all=True) alchemy = AdvancedAlchemy(db_config, app) # Use async session in your routes @app.route("/users") async def list_users(): session = alchemy.get_async_session() users = await session.execute(select(User)) return {"users": [user.dict() for user in users.scalars()]} You can also safely use an AsyncSession in your routes within a sync context. .. warning:: This is experimental and may change in the future. .. code-block:: python @app.route("/users") def list_users(): session = alchemy.get_async_session() users = alchemy.portal.call(session.execute, select(User)) return {"users": [user.dict() for user in users.scalars()]} Configuration ------------- SQLAlchemy Configuration ~~~~~~~~~~~~~~~~~~~~~~~~ Both sync and async configurations support these options: .. list-table:: :header-rows: 1 * - Option - Type - Description - Default * - ``engine_config`` - ``EngineConfig`` - SQLAlchemy engine configuration - Required * - ``bind_key`` - ``str`` - Key for multiple database support - "default" * - ``create_all`` - ``bool`` - Create tables on startup - ``False`` * - ``commit_mode`` - ``"autocommit", "autocommit_include_redirect", "manual"`` - Session commit behavior - ``"manual"`` Commit Modes ~~~~~~~~~~~~ The ``commit_mode`` option controls how database sessions are committed: - ``"manual"`` (default): No automatic commits - ``"autocommit"``: Commit on successful responses (2xx status codes) - ``"autocommit_include_redirect"``: Commit on successful responses and redirects (2xx and 3xx status codes) Services -------- The ``FlaskServiceMixin`` adds Flask-specific functionality to services: Here's an example of a service that uses the ``FlaskServiceMixin`` with all CRUD operations, route pagination, and msgspec serialization for JSON .. code-block:: python import datetime from typing import Optional from uuid import UUID from msgspec import Struct from flask import Flask from sqlalchemy.orm import Mapped, mapped_column from advanced_alchemy.extensions.flask import ( AdvancedAlchemy, FlaskServiceMixin, service, repository, SQLAlchemySyncConfig, base, ) class Author(base.UUIDBase): """Author model.""" name: Mapped[str] dob: Mapped[Optional[datetime.date]] class AuthorSchema(Struct): """Author schema.""" name: str id: Optional[UUID] = None dob: Optional[datetime.date] = None class AuthorService(FlaskServiceMixin, service.SQLAlchemySyncRepositoryService[Author]): class Repo(repository.SQLAlchemySyncRepository[Author]): model_type = Author repository_type = Repo app = Flask(__name__) config = SQLAlchemySyncConfig(connection_string="sqlite:///local.db", commit_mode="autocommit", create_all=True) alchemy = AdvancedAlchemy(config, app) @app.route("/authors", methods=["GET"]) def list_authors(): """List authors with pagination.""" page, page_size = request.args.get("currentPage", 1, type=int), request.args.get("pageSize", 10, type=int) limit_offset = filters.LimitOffset(limit=page_size, offset=page_size * (page - 1)) service = AuthorService(session=alchemy.get_sync_session()) results, total = service.list_and_count(limit_offset) response = service.to_schema(results, total, filters=[limit_offset], schema_type=AuthorSchema) return service.jsonify(response) @app.route("/authors", methods=["POST"]) def create_author(): """Create a new author.""" service = AuthorService(session=alchemy.get_sync_session()) obj = service.create(**request.get_json()) return service.jsonify(obj) @app.route("/authors/", methods=["GET"]) def get_author(author_id: UUID): """Get an existing author.""" service = AuthorService(session=alchemy.get_sync_session(), load=[Author.books]) obj = service.get(author_id) return service.jsonify(obj) @app.route("/authors/", methods=["PATCH"]) def update_author(author_id: UUID): """Update an author.""" service = AuthorService(session=alchemy.get_sync_session(), load=[Author.books]) obj = service.update(**request.get_json(), item_id=author_id) return service.jsonify(obj) @app.route("/authors/", methods=["DELETE"]) def delete_author(author_id: UUID): """Delete an author.""" service = AuthorService(session=alchemy.get_sync_session()) service.delete(author_id) return "", 204 The ``jsonify`` method is analogous to Flask's ``jsonify`` function. However, this implementation will serialize with the configured Advanced Alchemy serialize (i.e. Msgspec or Orjson based on installation). Database Migrations ------------------- When the extension is configured for Flask, database commands are automatically added to the Flask CLI. These are the same commands available to you when running the ``alchemy`` standalone CLI. Here's an example of the commands available to Flask .. code-block:: bash # Initialize migrations flask database init # Create a new migration flask database revision --autogenerate -m "Add users table" # Apply migrations flask database upgrade # Revert migrations flask database downgrade # Show migration history flask database history # Show all commands flask database --help python-advanced-alchemy-1.0.1/docs/usage/frameworks/litestar.rst000066400000000000000000000352671476663714600250530ustar00rootroot00000000000000==================== Litestar Integration ==================== .. seealso:: :external+litestar:doc:`Litestar's documentation for SQLAlchemy integration ` Advanced Alchemy provides first-class integration with Litestar through its SQLAlchemy plugin, which re-exports many of the modules within Advanced Alchemy. This guide demonstrates building a complete CRUD API for a book management system. Key Features ------------ - SQLAlchemy plugin for session and transaction management - Repository pattern for database operations - Service layer for business logic and data transformation - Built-in pagination and filtering - CLI tools for database migrations Basic Setup ----------- First, configure the SQLAlchemy plugin with Litestar. The plugin handles database connection, session management, and dependency injection: .. code-block:: python from litestar import Litestar from litestar.plugins.sqlalchemy import ( AsyncSessionConfig, SQLAlchemyAsyncConfig, SQLAlchemyPlugin, ) session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", before_send_handler="autocommit", session_config=session_config, create_all=True, ) alchemy = SQLAlchemyPlugin(config=sqlalchemy_config) SQLAlchemy Models ----------------- Define your SQLAlchemy models using Advanced Alchemy's enhanced base classes: .. code-block:: python import datetime from uuid import UUID from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship from litestar.plugins.sqlalchemy.base import UUIDAuditBase, UUIDBase class AuthorModel(UUIDBase): __tablename__ = "author" name: Mapped[str] dob: Mapped[datetime.date | None] books: Mapped[list[BookModel]] = relationship(back_populates="author", lazy="selectin") class BookModel(UUIDAuditBase): __tablename__ = "book" title: Mapped[str] author_id: Mapped[UUID] = mapped_column(ForeignKey("author.id")) author: Mapped[AuthorModel] = relationship(lazy="joined", innerjoin=True, viewonly=True) Pydantic Schemas ---------------- Define Pydantic schemas for input validation and response serialization: .. code-block:: python import datetime from pydantic import BaseModel, ConfigDict from uuid import UUID from typing import Optional class BaseSchema(BaseModel): """Base Schema with ORM mode enabled.""" model_config = ConfigDict(from_attributes=True) class Author(BaseSchema): """Author response schema.""" id: UUID name: str dob: Optional[datetime.date] = None class AuthorCreate(BaseSchema): """Schema for creating authors.""" name: str dob: Optional[datetime.date] = None class AuthorUpdate(BaseSchema): """Schema for updating authors.""" name: Optional[str] = None dob: Optional[datetime.date] = None class Book(BaseSchema): """Book response schema with author details.""" id: UUID title: str author_id: UUID author: Author class BookCreate(BaseSchema): """Schema for creating books.""" title: str author_id: UUID Repository and Service Layer ---------------------------- Create repository, service classes, and dependency injection provider function: .. code-block:: python from typing import AsyncGenerator from litestar.plugins.sqlalchemy.repository import SQLAlchemyAsyncRepository from litestar.plugins.sqlalchemy.service import SQLAlchemyAsyncRepositoryService from sqlalchemy.ext.asyncio import AsyncSession class AuthorRepository(SQLAlchemyAsyncRepository[AuthorModel]): """Author repository.""" model_type = AuthorModel class AuthorService(SQLAlchemyAsyncRepositoryService[AuthorModel]): """Author service.""" repository_type = AuthorRepository async def provide_authors_service(db_session: AsyncSession) -> AsyncGenerator[AuthorService, None]: """This provides the default Authors repository.""" async with AuthorService.new(session=db_session) as service: yield service Controllers ----------- Create a controller class to handle HTTP endpoints. The controller uses dependency injection for services and includes built-in pagination: .. code-block:: python from litestar import Controller, get, post, patch, delete from litestar.di import Provide from litestar.params import Parameter from litestar.pagination import OffsetPagination from litestar.repository.filters import LimitOffset class AuthorController(Controller): """Author CRUD endpoints.""" path = "/authors" dependencies = {"authors_service": Provide(provide_authors_service)} tags = ["Authors"] @get() async def list_authors( self, authors_service: AuthorService, limit_offset: LimitOffset, ) -> OffsetPagination[Author]: """List all authors with pagination.""" results, total = await authors_service.list_and_count(limit_offset) return authors_service.to_schema( data=results, total=total, filters=[limit_offset], schema_type=Author, ) @post() async def create_author( self, authors_service: AuthorService, data: AuthorCreate, ) -> Author: """Create a new author.""" obj = await authors_service.create(data) return authors_service.to_schema(data=obj, schema_type=Author) @get(path="/{author_id:uuid}") async def get_author( self, authors_service: AuthorService, author_id: UUID = Parameter( title="Author ID", description="The author to retrieve.", ), ) -> Author: """Get an existing author.""" obj = await authors_service.get(author_id) return authors_service.to_schema(data=obj, schema_type=Author) @patch(path="/{author_id:uuid}") async def update_author( self, authors_service: AuthorService, data: AuthorUpdate, author_id: UUID = Parameter( title="Author ID", description="The author to update.", ), ) -> Author: """Update an author.""" obj = await authors_service.update(data=data, item_id=author_id) return authors_service.to_schema(obj, schema_type=Author) @delete(path="/{author_id:uuid}") async def delete_author( self, authors_service: AuthorService, author_id: UUID = Parameter( title="Author ID", description="The author to delete.", ), ) -> None: """Delete an author from the system.""" _ = await authors_service.delete(author_id) Application Configuration ------------------------- Finally, configure your Litestar application with the plugin and dependencies: .. code-block:: python from litestar import Litestar from litestar.di import Provide from litestar.plugins.sqlalchemy.filters import FilterTypes, LimitOffset from litestar.plugins.sqlalchemy import ( AsyncSessionConfig, SQLAlchemyAsyncConfig, SQLAlchemyPlugin, ) session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", before_send_handler="autocommit", session_config=session_config, create_all=True, ) alchemy = SQLAlchemyPlugin(config=sqlalchemy_config) def provide_limit_offset_pagination( current_page: int = Parameter(ge=1, query="currentPage", default=1, required=False), page_size: int = Parameter( query="pageSize", ge=1, default=10, required=False, ), ) -> FilterTypes: """Add offset/limit pagination.""" return LimitOffset(page_size, page_size * (current_page - 1)) app = Litestar( route_handlers=[AuthorController], plugins=[alchemy], dependencies={"limit_offset": Provide(provide_limit_offset_pagination, sync_to_thread=False)}, ) Database Sessions ----------------- Sessions in Controllers ^^^^^^^^^^^^^^^^^^^^^^^ You can access the database session from the controller by using the `db_session` parameter, which is automatically injected by the SQLAlchemy plugin. The session is automatically committed at the end of the request. If an exception occurs, the session is rolled back: .. code-block:: python from litestar import Litestar, get from litestar.plugins.sqlalchemy import ( AsyncSessionConfig, SQLAlchemyAsyncConfig, SQLAlchemyPlugin, ) session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", before_send_handler="autocommit", session_config=session_config, create_all=True, ) # Create 'db_session' dependency. alchemy = SQLAlchemyPlugin(config=sqlalchemy_config) @get("/my-endpoint") async def my_controller(db_session: AsyncSession) -> str: # Access the database session here. return "Hello, World!" app = Litestar( route_handlers=[my_controller], plugins=[alchemy], ) Sessions in Middleware ^^^^^^^^^^^^^^^^^^^^^^ Dependency injection is not available in middleware. Instead, you can create a new session using the `provide_session` method: .. code-block:: python from litestar import Litestar from litestar.types import ASGIApp, Scope, Receive, Send from litestar.plugins.sqlalchemy import ( AsyncSessionConfig, SQLAlchemyAsyncConfig, SQLAlchemyPlugin, ) session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", before_send_handler="autocommit", session_config=session_config, create_all=True, ) alchemy = SQLAlchemyPlugin(config=sqlalchemy_config) def middleware_factory(app: ASGIApp) -> ASGIApp: async def my_middleware(scope: Scope, receive: Receive, send: Send) -> None: # NOTE: You can also access the app state from `ASGIConnection`. db_session = await alchemy.provide_session(scope["app"].state, scope) # Access the database session here. await db_session.close() ... await app(scope, receive, send) return my_middleware app = Litestar( route_handlers=[...], middleware=[middleware_factory], plugins=[alchemy] ) Database Migrations ------------------- Advanced Alchemy integrates with Litestar's CLI to provide database migration tools powered by Alembic. All alembic commands are integrated directly into the Litestar CLI. Command List ^^^^^^^^^^^^ To get a listing of available commands, run the following: .. code-block:: bash litestar database .. code-block:: bash Usage: app database [OPTIONS] COMMAND [ARGS]... Manage SQLAlchemy database components. โ•ญโ”€ Options โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ โ”‚ --help -h Show this message and exit. โ”‚ โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ โ•ญโ”€ Commands โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ โ”‚ downgrade Downgrade database to a specific revision. โ”‚ โ”‚ drop-all Drop all tables from the database. โ”‚ โ”‚ dump-data Dump specified tables from the database to JSON โ”‚ โ”‚ files. โ”‚ โ”‚ init Initialize migrations for the project. โ”‚ โ”‚ make-migrations Create a new migration revision. โ”‚ โ”‚ merge-migrations Merge multiple revisions into a single new revision. โ”‚ โ”‚ show-current-revision Shows the current revision for the database. โ”‚ โ”‚ stamp-migration Mark (Stamp) a specific revision as current without โ”‚ โ”‚ applying the migrations. โ”‚ โ”‚ upgrade Upgrade database to a specific revision. โ”‚ โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ Initializing a new project ^^^^^^^^^^^^^^^^^^^^^^^^^^ If you would like to initial set of alembic migrations, you can easily scaffold out new templates to setup a project. Assuming that you are using the default configuration for the SQLAlchemy configuration, you can run the following to initialize the migrations directory. .. code-block:: shell-session $ litestar database init ./migrations If you use a different path than `./migrations`, be sure to also set this in your SQLAlchemy config. For instance, if you'd like to use `./alembic`: .. code-block:: python config = SQLAlchemyAsyncConfig( alembic_config=AlembicAsyncConfig( script_location="./alembic/", ), ) And then run the following to initialize the migrations directory: .. code-block:: shell-session $ litestar database init ./alembic You will now be configured to use the alternate directory for migrations. Generate New Migrations ^^^^^^^^^^^^^^^^^^^^^^^ Once configured, you can run the following command to auto-generate new alembic migrations: .. code-block:: shell-session $ litestar database make-migrations Upgrading a Database ^^^^^^^^^^^^^^^^^^^^ You can upgrade a database to the latest version by running the following command: .. code-block:: shell-session $ litestar database upgrade python-advanced-alchemy-1.0.1/docs/usage/index.rst000066400000000000000000000024521476663714600221410ustar00rootroot00000000000000===== Usage ===== This guide demonstrates building a complete blog system using Advanced Alchemy's features. We'll create a system that supports: - Posts with tags and slugs - Tag management with automatic deduplication - Efficient querying and pagination - Type-safe database operations - Schema validation and transformation .. toctree:: :maxdepth: 2 :caption: Core Features modeling repositories services types cli database_seeding .. toctree:: :maxdepth: 2 :caption: Framework Integration frameworks/litestar frameworks/flask frameworks/fastapi The guide follows a practical approach: 1. **Modeling**: Define SQLAlchemy models with Advanced Alchemy's enhanced base classes 2. **Repositories**: Implement type-safe database operations using repositories 3. **Services**: Build business logic with automatic schema validation 4. **Framework Integration**: Integrate with Litestar and FastAPI Each section includes: - Concepts and usage overview - Complete code examples - Best practices - Performance considerations - Error handling strategies Prerequisites ------------- - Python 3.9+ - SQLAlchemy 2.0+ - Pydantic v2 or Msgspec (for schema validation) - Basic understanding of SQLAlchemy and async programming - Basic understanding of Pydantic or Msgspec python-advanced-alchemy-1.0.1/docs/usage/modeling.rst000066400000000000000000000231571476663714600226350ustar00rootroot00000000000000======== Modeling ======== Advanced Alchemy enhances SQLAlchemy's modeling capabilities with production-ready base classes, mixins, and specialized types. This guide demonstrates modeling for a blog system with posts and tags, showcasing key features and best practices. Base Classes ------------ Advanced Alchemy provides several base classes optimized for different use cases. Any model can utilize these pre-defined declarative bases from sqlchemy. Here's a brief overview of the included classes: .. list-table:: Base Classes and Features :header-rows: 1 :widths: 20 80 * - Base Class - Features * - ``BigIntBase`` - BIGINT primary keys for tables * - ``BigIntAuditBase`` - BIGINT primary keys for tables, Automatic created_at/updated_at timestamps * - ``UUIDBase`` - UUID primary keys * - ``UUIDv6Base`` - UUIDv6 primary keys * - ``UUIDv7Base`` - UUIDv7 primary keys * - ``UUIDAuditBase`` - UUID primary keys, Automatic created_at/updated_at timestamps * - ``UUIDv6AuditBase`` - UUIDv6 primary keys, Automatic created_at/updated_at timestamps * - ``UUIDv7AuditBase`` - Time-sortable UUIDv7 primary keys, Automatic created_at/updated_at timestamps * - ``NanoIDBase`` - URL-friendly unique identifiers, Shorter than UUIDs, collision resistant * - ``NanoIDAuditBase`` - URL-friendly IDs with audit timestamps, Combines Nanoid benefits with audit trails Mixins ------- Additionally, Advanced Alchemy provides mixins to enhance model functionality: .. list-table:: Available Mixins :header-rows: 1 :widths: 20 80 * - Mixin - Features * - ``SlugKey`` - | Adds URL-friendly slug field * - ``AuditColumns`` - | Automatic created_at/updated_at timestamps | Tracks record modifications * - ``UniqueMixin`` - | Automatic Select or Create for many-to-many relationships Basic Model Example ------------------- Let's start with a simple blog post model: .. code-block:: python import datetime from typing import Optional from advanced_alchemy.base import BigIntAuditBase from sqlalchemy.orm import Mapped, mapped_column class Post(BigIntAuditBase): """Blog post model with auto-incrementing ID and audit fields. Attributes: title: The post title content: The post content published: Publication status created_at: Timestamp of creation (from BigIntAuditBase) updated_at: Timestamp of last update (from BigIntAuditBase) """ title: Mapped[str] = mapped_column(index=True) content: Mapped[str] published: Mapped[bool] = mapped_column(default=False) published_at: Mapped[Optional[datetime.datetime]] = mapped_column(default=None) .. _many_to_many_relationships: Many-to-Many Relationships -------------------------- Let's implement a tagging system using a many-to-many relationship. This example demonstrates: - Association table configuration - Relationship configuration with lazy loading - Slug key mixin - Index creation .. code-block:: python from __future__ import annotations from sqlalchemy import Column, ForeignKey, Table from sqlalchemy.orm import relationship from sqlalchemy.orm import Mapped, mapped_column from advanced_alchemy.base import BigIntAuditBase, orm_registry, SlugKey from typing import List # Association table for post-tag relationship post_tag = Table( "post_tag", orm_registry.metadata, Column("post_id", ForeignKey("post.id", ondelete="CASCADE"), primary_key=True), Column("tag_id", ForeignKey("tag.id", ondelete="CASCADE"), primary_key=True) ) class Post(BigIntAuditBase): title: Mapped[str] = mapped_column(index=True) content: Mapped[str] published: Mapped[bool] = mapped_column(default=False) # Many-to-many relationship with tags tags: Mapped[List["Tag"]] = relationship( secondary=post_tag, back_populates="posts", lazy="selectin" ) class Tag(BigIntAuditBase, SlugKey): """Tag model with automatic slug generation. The SlugKey mixin automatically adds a slug field to the model. """ name: Mapped[str] = mapped_column(unique=True, index=True) posts: Mapped[List[Post]] = relationship( secondary=post_tag, back_populates="tags", viewonly=True ) If we want to interact with the models above, we might use something like the following: .. code-block:: python from sqlalchemy.ext.asyncio import AsyncSession async def add_tags_to_post( session: AsyncSession, post: Post, tag_names: list[str] ) -> Post: """Add tags to a post, looking up existing tags and creating new ones if needed.""" existing_tags = await session.scalars( select(Tag).filter(Tag.slug.in_([slugify(name) for name in tag_names])) ) new_tags = [Tag(name=name, slug=slugify(name)) for name in tag_names if name not in {tag.name for tag in existing_tags}] post.tags.extend(new_tags + list(existing_tags)) session.merge(post) await session.flush() return post Fortunately, we can remove some of this logic thanks to :class:`UniqueMixin`. .. _using_unique_mixin: Using :class:`UniqueMixin` -------------------------- :class:`UniqueMixin` provides automatic handling of unique constraints and merging of duplicate records. When using the mixin, you must implement two classmethods: :meth:`unique_hash ` and :meth:`unique_filter `. These methods enable: - Automatic lookup of existing records - Safe merging of duplicates - Atomic get-or-create operations - Configurable uniqueness criteria Let's enhance our Tag model with :class:`UniqueMixin`: .. code-block:: python from advanced_alchemy.base import BigIntAuditBase, SlugKey from advanced_alchemy.mixins import UniqueMixin from advanced_alchemy.utils.text import slugify from sqlalchemy.sql.elements import ColumnElement from typing import Hashable class Tag(BigIntAuditBase, SlugKey, UniqueMixin): """Tag model with unique name constraint and automatic slug generation. The UniqueMixin provides: - Automatic lookup of existing records - Safe merging of duplicates - Consistent slug generation """ name: Mapped[str] = mapped_column(unique=True, index=True) posts: Mapped[list[Post]] = relationship( secondary=post_tag, back_populates="tags", viewonly=True ) @classmethod def unique_hash(cls, name: str, slug: str | None = None) -> Hashable: """Generate a unique hash for deduplication.""" return slugify(name) @classmethod def unique_filter( cls, name: str, slug: str | None = None, ) -> ColumnElement[bool]: """SQL filter for finding existing records.""" return cls.slug == slugify(name) We can now take advantage of :meth:`UniqueMixin.as_unique_async` to simplify the logic. .. code-block:: python from sqlalchemy.ext.asyncio import AsyncSession async def add_tags_to_post( session: AsyncSession, post: Post, tag_names: list[str] ) -> Post: """Add tags to a post, creating new tags if needed.""" # The UniqueMixin automatically handles: # 1. Looking up existing tags # 2. Creating new tags if needed # 3. Merging duplicates post.tags = [ await Tag.as_unique_async(session, name=tag_text, slug=slugify(tag_text)) for tag_text in tag_names ] session.merge(post) await session.flush() return post Customizing Declarative Base ----------------------------- In case one of the built in declarative bases do not meet your needs (or you already have your own), Advanced Alchemy already supports customizing the ``DeclarativeBase`` class. Here's an example showing a class to generate a server-side UUID primary key for `postgres`: .. code-block:: python import datetime from uuid import UUID, uuid4 from advanced_alchemy.base import CommonTableAttributes, orm_registry from sqlalchemy import text from sqlalchemy.orm import ( DeclarativeBase, Mapped, declared_attr, mapped_column, orm_insert_sentinel, ) class ServerSideUUIDPrimaryKey: """UUID Primary Key Field Mixin.""" id: Mapped[UUID] = mapped_column(default=uuid4, primary_key=True, server_default=text("gen_random_uuid()")) """UUID Primary key column.""" # noinspection PyMethodParameters @declared_attr def _sentinel(cls) -> Mapped[int]: """Sentinel value required for SQLAlchemy bulk DML with UUIDs.""" return orm_insert_sentinel(name="sa_orm_sentinel") class ServerSideUUIDBase(ServerSideUUIDPrimaryKey, CommonTableAttributes, DeclarativeBase): """Base for all SQLAlchemy declarative models with the custom UUID primary key .""" registry = orm_registry # Using ServerSideUUIDBase class User(ServerSideUUIDBase): """User model with ServerSideUUIDBase.""" username: Mapped[str] = mapped_column(unique=True, index=True) email: Mapped[str] = mapped_column(unique=True) full_name: Mapped[str] is_active: Mapped[bool] = mapped_column(default=True) last_login: Mapped[datetime.datetime | None] = mapped_column(default=None) With this foundation in place, let's look at the repository pattern. python-advanced-alchemy-1.0.1/docs/usage/repositories.rst000066400000000000000000000167651476663714600235750ustar00rootroot00000000000000============ Repositories ============ Advanced Alchemy's repository pattern provides a clean, consistent interface for database operations. This pattern abstracts away the complexity of SQLAlchemy sessions and query-building while providing type-safe operations. Understanding Repositories -------------------------- A repository acts as a collection-like interface to your database models, providing: - Type-safe CRUD operations - Filtering and pagination - Bulk operations - Transaction management - Specialized repository types for common patterns Base Repository Types --------------------- .. list-table:: Repository Types :header-rows: 1 :widths: 30 70 * - Repository Class - Features * - ``SQLAlchemyAsyncRepository`` - | - Async session support | - Basic CRUD operations | - Filtering and pagination | - Bulk operations * - ``SQLAlchemyAsyncSlugRepository`` - | - Async session support | - All base repository features | - Slug-based lookups | - URL-friendly operations * - ``SQLAlchemyAsyncQueryRepository`` - | - Async session support | - Custom query execution | - Complex aggregations | - Raw SQL support * - ``SQLAlchemySyncRepository`` - | - Sync session support | - Basic CRUD operations | - Filtering and pagination | - Bulk operations * - ``SQLAlchemySyncSlugRepository`` - | - Sync session support | - All base repository features | - Slug-based lookups | - URL-friendly operations * - ``SQLAlchemySyncQueryRepository`` - | - Sync session support | - Custom query execution | - Complex aggregations | - Raw SQL support Basic Repository Usage ---------------------- .. note:: The following examples assumes the existence of the ``Post`` model defined in :ref:`many_to_many_relationships` and the ``Tag`` model defined in :ref:`using_unique_mixin`. Let's implement a basic repository for our blog post model: .. code-block:: python from advanced_alchemy.repository import SQLAlchemyAsyncRepository from sqlalchemy.ext.asyncio import AsyncSession class PostRepository(SQLAlchemyAsyncRepository[Post]): """Repository for managing blog posts.""" model_type = Post async def create_post(session: AsyncSession, title: str, content: str, author_id: UUID) -> Post: repository = PostRepository(session=session) return await repository.add( Post(title=title, content=content, author_id=author_id), auto_commit=True ) Filtering and Querying ---------------------- Advanced Alchemy provides powerful filtering capabilities: .. code-block:: python import datetime async def get_recent_posts(session: AsyncSession) -> list[Post]: repository = PostRepository(session=session) # Create filter for posts from last week return await repository.list( Post.published == True, Post.created_at > (datetime.datetime.utcnow() - timedelta(days=7)) ) Pagination ---------- `list_and_count` enables us to quickly create paginated queries that include a total count of rows. .. code-block:: python from advanced_alchemy.filters import LimitOffset async def get_paginated_posts( session: AsyncSession, page: int = 1, page_size: int = 20 ) -> tuple[list[Post], int]: repository = PostRepository(session=session) # Get page of results and total count results, total = await repository.list_and_count( LimitOffset(offset=page, limit=page_size) ) return results, total Bulk Operations --------------- Repositories support efficient bulk operations: Create Many ----------- .. code-block:: python async def create_posts(session: AsyncSession, data: list[tuple[str, str, UUID]]) -> list[Post]: repository = PostRepository(session=session) # Create posts return await repository.create_many( [Post(title=title, content=content, author_id=author_id) for title, content, author_id in data], auto_commit=True ) Update Many ----------- .. code-block:: python async def publish_posts(session: AsyncSession, post_ids: list[int]) -> list[Post]: repository = PostRepository(session=session) # Fetch posts to update posts = await repository.list(Post.id.in_(post_ids), published =False) # Update all posts for post in posts: post.published = True return await repository.update_many(posts) Delete Many ----------- .. code-block:: python async def delete_posts(session: AsyncSession, post_ids: list[int]) -> list[Post]: repository = PostRepository(session=session) return await repository.delete_many(Post.id.in_(post_ids)) Delete Where ------------- .. code-block:: python async def delete_unpublished_posts (session: AsyncSession) -> list[Post]: repository = PostRepository(session=session) return await repository.delete_where(Post.published == False) Transaction Management ---------------------- .. code-block:: python async def create_post_with_tags( session: AsyncSession, title: str, content: str, tag_names: list[str] ) -> Post: # Both repositories share the same transaction post_repo = PostRepository(session=session) tag_repo = TagRepository(session=session) async with session.begin(): # Create or get existing tags tags = [] for name in tag_names: tag = await tag_repo.get_one_or_none(name=name) if not tag: tag = await tag_repo.add(Tag(name=name, slug=slugify(name))) tags.append(tag) # Create post with tags post = await post_repo.add( Post(title=title, content=content, tags=tags), auto_commit=True ) return post .. seealso:: This is just to illustrate the concept. In practice, :class:`UniqueMixin` should be used to handle this lookup even more easily. See :ref:`using_unique_mixin`. Specialized Repositories ------------------------ Advanced Alchemy provides specialized repositories for common patterns. Slug Repository ~~~~~~~~~~~~~~~ For models using the :class:`SlugKey` mixin, there is a specialized Slug repository that adds a ``get_by_slug`` method: .. code-block:: python from advanced_alchemy.repository import SQLAlchemyAsyncSlugRepository class ArticleRepository(SQLAlchemyAsyncSlugRepository[Article]): """Repository for articles with slug-based lookups.""" model_type = Article async def get_article_by_slug(session: AsyncSession, slug: str) -> Article: repository = ArticleRepository(session=session) return await repository.get_by_slug(slug) Query Repository ---------------- For complex custom queries: .. code-block:: python from advanced_alchemy.repository import SQLAlchemyAsyncQueryRepository from sqlalchemy import select, func async def get_posts_per_author(db_session: AsyncSession) -> list[tuple[UUID, int]]: repository = SQLAlchemyAsyncQueryRepository(session=db_session) return await repository.list(select(Post.author_id, func.count(Post.id)).group_by(Post.author_id)) This covers the core functionality of repositories. The next section will explore services, which build upon repositories to provide higher-level business logic and data transformation. python-advanced-alchemy-1.0.1/docs/usage/services.rst000066400000000000000000000170121476663714600226530ustar00rootroot00000000000000======== Services ======== Services in Advanced Alchemy build upon repositories to provide higher-level business logic, data transformation, and schema validation. While repositories handle raw database operations, services handle the application's business rules and data transformation needs. Understanding Services ---------------------- Services provide: - Business logic abstraction - Data transformation using Pydantic or Msgspec models - Input validation - Complex operations involving multiple repositories - Consistent error handling - Automatic schema validation and transformation Basic Service Usage ------------------- Let's build upon our blog example by creating services for posts and tags: .. code-block:: python import datetime from typing import Optional, List from uuid import UUID from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService from pydantic import BaseModel # Pydantic schemas for validation class PostCreate(BaseModel): title: str content: str tag_names: List[str] class PostUpdate(BaseModel): title: Optional[str] = None content: Optional[str] = None published: Optional[bool] = None class PostResponse(BaseModel): id: int title: str content: str published: bool published_at: Optional[datetime.datetime] created_at: datetime.datetime updated_at: datetime.datetime tags: List["TagResponse"] model_config = {"from_attributes": True} class PostService(SQLAlchemyAsyncRepositoryService[Post]): """Service for managing blog posts with automatic schema validation.""" repository_type = PostRepository Service Operations ------------------ Services provide high-level methods for common operations: .. code-block:: python async def create_post( post_service: PostService, data: PostCreate, ) -> PostResponse: """Create a post with associated tags.""" post = await post_service.create( data, auto_commit=True, ) return post_service.to_schema(post, schema_type=PostResponse) async def update_post( post_service: PostService, post_id: int, data: PostUpdate, ) -> PostResponse: """Update a post.""" post = await post_service.update( item_id=post_id, data=data, auto_commit=True, ) return post_service.to_schema(post, schema_type=PostResponse) Complex Operations ------------------- Services can handle complex business logic involving multiple models. The code below shows a service coordinating posts and tags. .. note:: The following example assumes the existence of the ``Post`` model defined in :ref:`many_to_many_relationships` and the ``Tag`` model defined in :ref:`using_unique_mixin`. .. code-block:: python from typing import List from advanced_alchemy.exceptions import ErrorMessages from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService from advanced_alchemy.service.typing import ModelDictT from .models import Post, Tag class PostService(SQLAlchemyAsyncRepositoryService[Post, PostRepository]): default_load_options = [Post.tags] repository_type = PostRepository match_fields = ["name"] # Override creation behavior to handle tags async def create(self, data: ModelDictT[Post], **kwargs) -> Post: """Create a new post with tags, if provided.""" tags_added: list[str] = [] if isinstance(data, dict): data["id"] = data.get("id", uuid4()) tags_added = data.pop("tags", []) data = await self.to_model(data, "create") if tags_added: data.tags.extend( [ await Tag.as_unique_async(self.repository.session, name=tag_text, slug=slugify(tag_text)) for tag_text in tags_added ], ) return await super().create(data=data, **kwargs) # Override update behavior to handle tags async def update( self, data: ModelDictT[Post], item_id: Any | None = None, **kwargs, ) -> Post: """Update a post with tags, if provided.""" tags_updated: list[str] = [] if isinstance(data, dict): tags_updated.extend(data.pop("tags", None) or []) data["id"] = item_id data = await self.to_model(data, "update") existing_tags = [tag.name for tag in data.tags] tags_to_remove = [tag for tag in data.tags if tag.name not in tags_updated] tags_to_add = [tag for tag in tags_updated if tag not in existing_tags] for tag_rm in tags_to_remove: data.tags.remove(tag_rm) data.tags.extend( [ await Tag.as_unique_async(self.repository.session, name=tag_text, slug=slugify(tag_text)) for tag_text in tags_to_add ], ) return await super().update( data=data, item_id=item_id, **kwargs, ) # A custom write operation async def publish_post( self, post_id: int, publish: bool = True, ) -> PostResponse: """Publish or unpublish a post with timestamp.""" data = PostUpdate( published=publish, published_at=datetime.datetime.utcnow() if publish else None, ) post = await self.repository.update( item_id=post_id, data=data, auto_commit=True, ) return self.to_schema(post, schema_type=PostResponse) # A custom read operation async def get_trending_posts( self, days: int = 7, min_views: int = 100, ) -> List[PostResponse]: """Get trending posts based on view count and recency.""" posts = await self.post_service.list( Post.published == True, Post.created_at > (datetime.datetime.utcnow() - timedelta(days=days)), Post.view_count >= min_views, order_by=[Post.view_count.desc()], ) return self.post_service.to_schema(posts, schema_type=PostResponse) # Override the default `to_model` to handle slugs async def to_model(self, data: ModelDictT[Post], operation: str | None = None) -> Post: """Convert a dictionary, msgspec Struct, or Pydantic model to a Post model. """ if (is_msgspec_struct(data) or is_pydantic_model(data)) and operation in {"create", "update"} and data.slug is None: data.slug = await self.repository.get_available_slug(data.name) if is_dict(data) and "slug" not in data and operation == "create": data["slug"] = await self.repository.get_available_slug(data["name"]) if is_dict(data) and "slug" not in data and "name" in data and operation == "update": data["slug"] = await self.repository.get_available_slug(data["name"]) return await super().to_model(data, operation) Framework Integration --------------------- Services integrate seamlessly with both Litestar and FastAPI. - :doc:`frameworks/litestar` - :doc:`frameworks/fastapi` python-advanced-alchemy-1.0.1/docs/usage/types.rst000066400000000000000000000107231476663714600221760ustar00rootroot00000000000000===== Types ===== Advanced Alchemy provides several custom SQLAlchemy types. All types include: - Proper Python type annotations for modern IDE support - Automatic dialect-specific implementations - Consistent behavior across different database backends - Integration with SQLAlchemy's type system Here's a short example using multiple types: .. code-block:: python from sqlalchemy import Column from advanced_alchemy.types import ( DateTimeUTC, EncryptedString, GUID, JsonB, ) class User: id = Column(GUID, primary_key=True) created_at = Column(DateTimeUTC) password = Column(EncryptedString(key="secret-key")) preferences = Column(JsonB) DateTimeUTC ----------- - Ensures all datetime values are stored in UTC - Requires timezone information for input values - Automatically converts stored values to UTC timezone - Returns timezone-aware datetime objects .. code-block:: python from advanced_alchemy.types import DateTimeUTC class MyModel: created_at = Column(DateTimeUTC) Encrypted Types --------------- Two types for storing encrypted data with support for multiple encryption backends: EncryptedString ~~~~~~~~~~~~~~~ For storing encrypted string values with configurable length. .. code-block:: python from advanced_alchemy.types import EncryptedString class MyModel: secret = Column(EncryptedString(key="my-secret-key")) EncryptedText ~~~~~~~~~~~~~ For storing larger encrypted text content (CLOB). .. code-block:: python from advanced_alchemy.types import EncryptedText class MyModel: large_secret = Column(EncryptedText(key="my-secret-key")) Encryption Backends ~~~~~~~~~~~~~~~~~~~ Two encryption backends are available: - :class:`advanced_alchemy.types.FernetBackend`: Uses Python's cryptography library with Fernet encryption - :class:`advanced_alchemy.types.PGCryptoBackend`: Uses PostgreSQL's pgcrypto extension (PostgreSQL only) GUID ---- A platform-independent GUID/UUID type that adapts to different database backends: - PostgreSQL/DuckDB/CockroachDB: Uses native UUID type - MSSQL: Uses UNIQUEIDENTIFIER - Oracle: Uses RAW(16) - Others: Uses BINARY(16) or CHAR(32) .. code-block:: python from advanced_alchemy.types import GUID class MyModel: id = Column(GUID, primary_key=True) BigIntIdentity -------------- A BigInteger type that automatically falls back to Integer for SQLite: .. code-block:: python from advanced_alchemy.types import BigIntIdentity class MyModel: id = Column(BigIntIdentity, primary_key=True) JsonB ----- A JSON type that uses the most efficient JSON storage for each database: - PostgreSQL/CockroachDB: Uses native JSONB - Oracle: Uses Binary JSON (BLOB with JSON constraint) - Others: Uses standard JSON type .. code-block:: python from advanced_alchemy.types import JsonB class MyModel: data = Column(JsonB) Using Types with Alembic ------------------------ If you are not using Advanced Alchemy's built-in `alembic` templates, you need to properly configure your ``script.py.mako`` template. The key is to make the custom types available through the ``sa`` namespace that Alembic uses. Type Aliasing ~~~~~~~~~~~~~ In your ``script.py.mako``, you'll need both the imports and the type aliasing: .. code-block:: python :caption: script.py.mako """${message} Revision ID: ${up_revision} Revises: ${down_revision | comma,n} Create Date: ${create_date} """ import sqlalchemy as sa # ... # Import the types from advanced_alchemy.types import ( EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC ) # Create aliases in the sa namespace sa.GUID = GUID sa.DateTimeUTC = DateTimeUTC sa.ORA_JSONB = ORA_JSONB sa.EncryptedString = EncryptedString sa.EncryptedText = EncryptedText # ... .. note:: These assignments are necessary because alembic uses the ``sa`` namespace when generating migrations. Without these aliases, Alembic might not properly reference the custom types. This allows you to use the types in migrations like this: .. code-block:: python # In generated migration file def upgrade(): op.create_table( 'users', sa.Column('id', sa.GUID(), primary_key=True), sa.Column('created_at', sa.DateTimeUTC(), nullable=False), sa.Column('secret', sa.EncryptedString(), nullable=True), ) python-advanced-alchemy-1.0.1/examples/000077500000000000000000000000001476663714600200575ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/examples/__init__.py000066400000000000000000000000001476663714600221560ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/examples/fastapi_service.py000066400000000000000000000145131476663714600236040ustar00rootroot00000000000000# ruff: noqa: FA100 """This example demonstrates how to use the FastAPI CLI to manage the database.""" # /// script # dependencies = [ # "advanced_alchemy", # "fastapi[standard]", # "orjson" # ] # /// import datetime from collections.abc import AsyncGenerator from typing import Annotated, Optional from uuid import UUID from fastapi import APIRouter, Depends, FastAPI from pydantic import BaseModel from sqlalchemy import ForeignKey from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Mapped, mapped_column, relationship from advanced_alchemy.extensions.fastapi import ( AdvancedAlchemy, AsyncSessionConfig, SQLAlchemyAsyncConfig, base, filters, repository, service, ) # Models # ####################### class BookModel(base.UUIDAuditBase): __tablename__ = "book" title: Mapped[str] author_id: Mapped[UUID] = mapped_column(ForeignKey("author.id")) author: Mapped["AuthorModel"] = relationship(lazy="joined", innerjoin=True, viewonly=True) # the SQLAlchemy base includes a declarative model for you to use in your models. # The `Base` class includes a `UUID` based primary key (`id`) class AuthorModel(base.UUIDBase): # we can optionally provide the table name instead of auto-generating it __tablename__ = "author" name: Mapped[str] dob: Mapped[Optional[datetime.date]] books: Mapped[list[BookModel]] = relationship(back_populates="author", lazy="selectin") # The `AuditBase` class includes the same UUID` based primary key (`id`) and 2 # additional columns: `created` and `updated`. `created` is a timestamp of when the # record created, and `updated` is the last time the record was modified. # we will explicitly define the schema instead of using DTO objects for clarity. class Author(BaseModel): id: Optional[UUID] name: str dob: Optional[datetime.date] class AuthorCreate(BaseModel): name: str dob: Optional[datetime.date] class AuthorUpdate(BaseModel): name: Optional[str] dob: Optional[datetime.date] class AuthorService(service.SQLAlchemyAsyncRepositoryService[AuthorModel]): """Author repository.""" class Repo(repository.SQLAlchemyAsyncRepository[AuthorModel]): """Author repository.""" model_type = AuthorModel repository_type = Repo # ####################### # Application # ####################### sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", session_config=AsyncSessionConfig(expire_on_commit=False), create_all=True, ) app = FastAPI() alchemy = AdvancedAlchemy(config=sqlalchemy_config, app=app) DatabaseSession = Annotated[AsyncSession, Depends(alchemy.provide_session())] # ####################### # Dependencies # ####################### async def provide_authors_service(db_session: DatabaseSession) -> AsyncGenerator[AuthorService, None]: """This provides the default Authors repository.""" async with AuthorService.new(session=db_session) as service: yield service # we can optionally override the default `select` used for the repository to pass in # specific SQL options such as join details async def provide_author_details_service(db_session: DatabaseSession) -> AsyncGenerator[AuthorService, None]: """This provides a simple example demonstrating how to override the join options for the repository.""" async with AuthorService.new(load=[AuthorModel.books], session=db_session) as service: yield service def provide_limit_offset_pagination(current_page: int = 1, page_size: int = 10) -> filters.LimitOffset: """Add offset/limit pagination. Return type consumed by `Repository.apply_limit_offset_pagination()`. Parameters ---------- current_page : int LIMIT to apply to select. page_size : int OFFSET to apply to select. """ return filters.LimitOffset(page_size, page_size * (current_page - 1)) # ####################### # Routes # ####################### author_router = APIRouter() @author_router.get(path="/authors", response_model=service.OffsetPagination[Author]) async def list_authors( authors_service: Annotated[AuthorService, Depends(provide_authors_service)], limit_offset: Annotated[filters.LimitOffset, Depends(provide_limit_offset_pagination)], ) -> service.OffsetPagination[AuthorModel]: """List authors.""" results, total = await authors_service.list_and_count(limit_offset) return authors_service.to_schema(results, total, filters=[limit_offset]) @author_router.post(path="/authors", response_model=Author) async def create_author( authors_service: Annotated[AuthorService, Depends(provide_authors_service)], data: AuthorCreate, ) -> AuthorModel: """Create a new author.""" obj = await authors_service.create(data) return authors_service.to_schema(obj) # we override the authors_repo to use the version that joins the Books in @author_router.get(path="/authors/{author_id}", response_model=Author) async def get_author( authors_service: Annotated[AuthorService, Depends(provide_authors_service)], author_id: UUID, ) -> AuthorModel: """Get an existing author.""" obj = await authors_service.get(author_id) return authors_service.to_schema(obj) @author_router.patch( path="/authors/{author_id}", response_model=Author, ) async def update_author( authors_service: Annotated[AuthorService, Depends(provide_authors_service)], data: AuthorUpdate, author_id: UUID, ) -> AuthorModel: """Update an author.""" obj = await authors_service.update(data, item_id=author_id) return authors_service.to_schema(obj) @author_router.delete(path="/authors/{author_id}") async def delete_author( authors_service: Annotated[AuthorService, Depends(provide_authors_service)], author_id: UUID, ) -> None: """Delete a author from the system.""" _ = await authors_service.delete(author_id) app.include_router(author_router) if __name__ == "__main__": """Launches the FastAPI CLI with the database commands registered""" from fastapi_cli.cli import app as fastapi_cli_app # pyright: ignore[reportUnknownVariableType] from typer.main import get_group from advanced_alchemy.extensions.fastapi.cli import register_database_commands click_app = get_group(fastapi_cli_app) # pyright: ignore[reportUnknownArgumentType] click_app.add_command(register_database_commands(app)) click_app() python-advanced-alchemy-1.0.1/examples/flask/000077500000000000000000000000001476663714600211575ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/examples/flask/__init__.py000066400000000000000000000000001476663714600232560ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/examples/flask/flask_services.py000066400000000000000000000061241476663714600245370ustar00rootroot00000000000000from __future__ import annotations import datetime # noqa: TC003 import os from uuid import UUID # noqa: TC003 from flask import Flask, request from msgspec import Struct from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship from advanced_alchemy.extensions.flask import ( AdvancedAlchemy, FlaskServiceMixin, SQLAlchemySyncConfig, base, filters, repository, service, ) class Author(base.UUIDBase): """Author model.""" name: Mapped[str] dob: Mapped[datetime.date | None] books: Mapped[list[Book]] = relationship(back_populates="author", lazy="noload") class Book(base.UUIDAuditBase): """Book model.""" title: Mapped[str] author_id: Mapped[UUID] = mapped_column(ForeignKey("author.id")) author: Mapped[Author] = relationship(lazy="joined", innerjoin=True, viewonly=True) class AuthorService(service.SQLAlchemySyncRepositoryService[Author], FlaskServiceMixin): """Author service.""" class Repo(repository.SQLAlchemySyncRepository[Author]): """Author repository.""" model_type = Author repository_type = Repo class AuthorSchema(Struct): """Author schema.""" name: str id: UUID | None = None dob: datetime.date | None = None app = Flask(__name__) config = SQLAlchemySyncConfig(connection_string="sqlite:///local.db", commit_mode="autocommit", create_all=True) alchemy = AdvancedAlchemy(config, app) @app.route("/authors", methods=["GET"]) def list_authors(): """List authors with pagination.""" page, page_size = request.args.get("currentPage", 1, type=int), request.args.get("pageSize", 10, type=int) limit_offset = filters.LimitOffset(limit=page_size, offset=page_size * (page - 1)) service = AuthorService(session=alchemy.get_sync_session()) results, total = service.list_and_count(limit_offset) response = service.to_schema(results, total, filters=[limit_offset], schema_type=AuthorSchema) return service.jsonify(response) @app.route("/authors", methods=["POST"]) def create_author(): """Create a new author.""" service = AuthorService(session=alchemy.get_sync_session()) obj = service.create(**request.get_json()) return service.jsonify(obj) @app.route("/authors/", methods=["GET"]) def get_author(author_id: UUID): """Get an existing author.""" service = AuthorService(session=alchemy.get_sync_session(), load=[Author.books]) obj = service.get(author_id) return service.jsonify(obj) @app.route("/authors/", methods=["PATCH"]) def update_author(author_id: UUID): """Update an author.""" service = AuthorService(session=alchemy.get_sync_session(), load=[Author.books]) obj = service.update(**request.get_json(), item_id=author_id) return service.jsonify(obj) @app.route("/authors/", methods=["DELETE"]) def delete_author(author_id: UUID): """Delete an author.""" service = AuthorService(session=alchemy.get_sync_session()) service.delete(author_id) return "", 204 if __name__ == "__main__": app.run(debug=os.environ["ENV"] == "dev") python-advanced-alchemy-1.0.1/examples/litestar/000077500000000000000000000000001476663714600217065ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/examples/litestar/__init__.py000066400000000000000000000000001476663714600240050ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/examples/litestar/litestar_repo_only.py000066400000000000000000000155321476663714600262030ustar00rootroot00000000000000from __future__ import annotations import datetime # noqa: TC003 from typing import TYPE_CHECKING, Optional from uuid import UUID # noqa: TC003 from litestar import Litestar from litestar.controller import Controller from litestar.di import Provide from litestar.handlers.http_handlers.decorators import delete, get, patch, post from litestar.pagination import OffsetPagination from litestar.params import Parameter from pydantic import BaseModel as _BaseModel from pydantic import TypeAdapter from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship from advanced_alchemy.base import UUIDAuditBase, UUIDBase from advanced_alchemy.config import AsyncSessionConfig from advanced_alchemy.extensions.litestar.plugins import SQLAlchemyAsyncConfig, SQLAlchemyPlugin from advanced_alchemy.filters import LimitOffset from advanced_alchemy.repository import SQLAlchemyAsyncRepository if TYPE_CHECKING: from sqlalchemy.ext.asyncio import AsyncSession class BaseModel(_BaseModel): """Extend Pydantic's BaseModel to enable ORM mode""" model_config = {"from_attributes": True} # the SQLAlchemy base includes a declarative model for you to use in your models. # The `Base` class includes a `UUID` based primary key (`id`) class AuthorModel(UUIDBase): # we can optionally provide the table name instead of auto-generating it __tablename__ = "author" name: Mapped[str] dob: Mapped[Optional[datetime.date]] # noqa: UP007 books: Mapped[list[BookModel]] = relationship(back_populates="author", lazy="noload") # The `AuditBase` class includes the same UUID` based primary key (`id`) and 2 # additional columns: `created` and `updated`. `created` is a timestamp of when the # record created, and `updated` is the last time the record was modified. class BookModel(UUIDAuditBase): __tablename__ = "book" title: Mapped[str] author_id: Mapped[UUID] = mapped_column(ForeignKey("author.id")) author: Mapped[AuthorModel] = relationship(lazy="joined", innerjoin=True, viewonly=True) # we will explicitly define the schema instead of using DTO objects for clarity. class Author(BaseModel): id: UUID | None name: str dob: datetime.date | None = None class AuthorCreate(BaseModel): name: str dob: datetime.date | None = None class AuthorUpdate(BaseModel): name: str | None = None dob: datetime.date | None = None class AuthorRepository(SQLAlchemyAsyncRepository[AuthorModel]): """Author repository.""" model_type = AuthorModel async def provide_authors_repo(db_session: AsyncSession) -> AuthorRepository: """This provides the default Authors repository.""" return AuthorRepository(session=db_session) # we can optionally override the default `select` used for the repository to pass in # specific SQL options such as join details async def provide_author_details_repo(db_session: AsyncSession) -> AuthorRepository: """This provides a simple example demonstrating how to override the join options for the repository.""" return AuthorRepository(load=[AuthorModel.books], session=db_session) def provide_limit_offset_pagination( current_page: int = Parameter(ge=1, query="currentPage", default=1, required=False), page_size: int = Parameter( query="pageSize", ge=1, default=10, required=False, ), ) -> LimitOffset: """Add offset/limit pagination. Return type consumed by `Repository.apply_limit_offset_pagination()`. Parameters ---------- current_page : int LIMIT to apply to select. page_size : int OFFSET to apply to select. """ return LimitOffset(page_size, page_size * (current_page - 1)) class AuthorController(Controller): """Author CRUD""" dependencies = {"authors_repo": Provide(provide_authors_repo)} @get(path="/authors") async def list_authors( self, authors_repo: AuthorRepository, limit_offset: LimitOffset, ) -> OffsetPagination[Author]: """List authors.""" results, total = await authors_repo.list_and_count(limit_offset) type_adapter = TypeAdapter(list[Author]) return OffsetPagination[Author]( items=type_adapter.validate_python(results), total=total, limit=limit_offset.limit, offset=limit_offset.offset, ) @post(path="/authors") async def create_author( self, authors_repo: AuthorRepository, data: AuthorCreate, ) -> Author: """Create a new author.""" obj = await authors_repo.add( AuthorModel(**data.model_dump(exclude_unset=True, exclude_none=True)), ) await authors_repo.session.commit() return Author.model_validate(obj) # we override the authors_repo to use the version that joins the Books in @get(path="/authors/{author_id:uuid}", dependencies={"authors_repo": Provide(provide_author_details_repo)}) async def get_author( self, authors_repo: AuthorRepository, author_id: UUID = Parameter( # noqa: B008 title="Author ID", description="The author to retrieve.", ), ) -> Author: """Get an existing author.""" obj = await authors_repo.get(author_id) return Author.model_validate(obj) @patch( path="/authors/{author_id:uuid}", dependencies={"authors_repo": Provide(provide_author_details_repo)}, ) async def update_author( self, authors_repo: AuthorRepository, data: AuthorUpdate, author_id: UUID = Parameter( # noqa: B008 title="Author ID", description="The author to update.", ), ) -> Author: """Update an author.""" raw_obj = data.model_dump(exclude_unset=True, exclude_none=True) raw_obj.update({"id": author_id}) obj = await authors_repo.update(AuthorModel(**raw_obj)) await authors_repo.session.commit() return Author.model_validate(obj) @delete(path="/authors/{author_id:uuid}") async def delete_author( self, authors_repo: AuthorRepository, author_id: UUID = Parameter( # noqa: B008 title="Author ID", description="The author to delete.", ), ) -> None: """Delete a author from the system.""" _ = await authors_repo.delete(author_id) await authors_repo.session.commit() session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", session_config=session_config, create_all=True, ) # Create 'db_session' dependency. sqlalchemy_plugin = SQLAlchemyPlugin(config=sqlalchemy_config) app = Litestar( route_handlers=[AuthorController], plugins=[sqlalchemy_plugin], dependencies={"limit_offset": Provide(provide_limit_offset_pagination, sync_to_thread=False)}, ) python-advanced-alchemy-1.0.1/examples/litestar/litestar_service.py000066400000000000000000000152671476663714600256420ustar00rootroot00000000000000from __future__ import annotations import datetime # noqa: TC003 from typing import TYPE_CHECKING, Optional from uuid import UUID # noqa: TC003 from litestar import Litestar from litestar.controller import Controller from litestar.di import Provide from litestar.handlers.http_handlers.decorators import delete, get, patch, post from litestar.params import Parameter from pydantic import BaseModel as _BaseModel from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship from advanced_alchemy.base import UUIDAuditBase, UUIDBase from advanced_alchemy.extensions.litestar import ( AsyncSessionConfig, SQLAlchemyAsyncConfig, SQLAlchemyPlugin, ) from advanced_alchemy.filters import FilterTypes, LimitOffset from advanced_alchemy.repository import SQLAlchemyAsyncRepository from advanced_alchemy.service import OffsetPagination, SQLAlchemyAsyncRepositoryService if TYPE_CHECKING: from collections.abc import AsyncGenerator from sqlalchemy.ext.asyncio import AsyncSession class BaseModel(_BaseModel): """Extend Pydantic's BaseModel to enable ORM mode""" model_config = {"from_attributes": True} # the SQLAlchemy base includes a declarative model for you to use in your models. # The `Base` class includes a `UUID` based primary key (`id`) class AuthorModel(UUIDBase): # we can optionally provide the table name instead of auto-generating it __tablename__ = "author" name: Mapped[str] dob: Mapped[Optional[datetime.date]] # noqa: UP007 books: Mapped[list[BookModel]] = relationship(back_populates="author", lazy="noload") # The `AuditBase` class includes the same UUID` based primary key (`id`) and 2 # additional columns: `created` and `updated`. `created` is a timestamp of when the # record created, and `updated` is the last time the record was modified. class BookModel(UUIDAuditBase): __tablename__ = "book" title: Mapped[str] author_id: Mapped[UUID] = mapped_column(ForeignKey("author.id")) author: Mapped[AuthorModel] = relationship(lazy="joined", innerjoin=True, viewonly=True) # we will explicitly define the schema instead of using DTO objects for clarity. class Author(BaseModel): id: UUID | None name: str dob: datetime.date | None = None class AuthorCreate(BaseModel): name: str dob: datetime.date | None = None class AuthorUpdate(BaseModel): name: str | None = None dob: datetime.date | None = None class AuthorRepository(SQLAlchemyAsyncRepository[AuthorModel]): """Author repository.""" model_type = AuthorModel class AuthorService(SQLAlchemyAsyncRepositoryService[AuthorModel, AuthorRepository]): """Author repository.""" repository_type = AuthorRepository async def provide_authors_service(db_session: AsyncSession) -> AsyncGenerator[AuthorService, None]: """This provides the default Authors repository.""" async with AuthorService.new( session=db_session, ) as service: yield service # we can optionally override the default `select` used for the repository to pass in # specific SQL options such as join details async def provide_author_details_service(db_session: AsyncSession) -> AsyncGenerator[AuthorService, None]: """This provides a simple example demonstrating how to override the join options for the repository.""" async with AuthorService.new( session=db_session, load=[AuthorModel.books], ) as service: yield service def provide_limit_offset_pagination( current_page: int = Parameter(ge=1, query="currentPage", default=1, required=False), page_size: int = Parameter( query="pageSize", ge=1, default=10, required=False, ), ) -> FilterTypes: """Add offset/limit pagination. Parameters ---------- current_page : int LIMIT to apply to select. page_size : int OFFSET to apply to select. """ return LimitOffset(page_size, page_size * (current_page - 1)) class AuthorController(Controller): """Author CRUD""" dependencies = {"authors_service": Provide(provide_authors_service)} @get(path="/authors") async def list_authors(self, authors_service: AuthorService, limit_offset: LimitOffset) -> OffsetPagination[Author]: """List authors.""" results, total = await authors_service.list_and_count(limit_offset) return authors_service.to_schema(results, total, filters=[limit_offset], schema_type=Author) @post(path="/authors") async def create_author(self, authors_service: AuthorService, data: AuthorCreate) -> Author: """Create a new author.""" obj = await authors_service.create(data) return authors_service.to_schema(obj, schema_type=Author) # we override the authors_repo to use the version that joins the Books in @get(path="/authors/{author_id:uuid}", dependencies={"authors_service": Provide(provide_author_details_service)}) async def get_author( self, authors_service: AuthorService, author_id: UUID = Parameter( # noqa: B008 title="Author ID", description="The author to retrieve.", ), ) -> Author: """Get an existing author.""" obj = await authors_service.get(author_id) return authors_service.to_schema(obj, schema_type=Author) @patch( path="/authors/{author_id:uuid}", dependencies={"authors_service": Provide(provide_author_details_service)}, ) async def update_author( self, authors_service: AuthorService, data: AuthorUpdate, author_id: UUID = Parameter( # noqa: B008 title="Author ID", description="The author to update.", ), ) -> Author: """Update an author.""" obj = await authors_service.update(data, item_id=author_id, auto_commit=True) return authors_service.to_schema(obj, schema_type=Author) @delete(path="/authors/{author_id:uuid}") async def delete_author( self, authors_service: AuthorService, author_id: UUID = Parameter( # noqa: B008 title="Author ID", description="The author to delete.", ), ) -> None: """Delete a author from the system.""" _ = await authors_service.delete(author_id) session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", before_send_handler="autocommit", session_config=session_config, create_all=True, ) alchemy = SQLAlchemyPlugin(config=sqlalchemy_config) app = Litestar( route_handlers=[AuthorController], plugins=[alchemy], dependencies={"limit_offset": Provide(provide_limit_offset_pagination, sync_to_thread=False)}, ) python-advanced-alchemy-1.0.1/examples/sanic.py000066400000000000000000000062071476663714600215330ustar00rootroot00000000000000from __future__ import annotations import datetime # noqa: TC003 from uuid import UUID # noqa: TC003 from sanic import Sanic from sqlalchemy import ForeignKey from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Mapped, mapped_column, relationship from advanced_alchemy.extensions.sanic import ( AdvancedAlchemy, AsyncSessionConfig, SQLAlchemyAsyncConfig, base, filters, repository, service, ) # the SQLAlchemy base includes a declarative model for you to use in your models. # The `Base` class includes a `UUID` based primary key (`id`) class AuthorModel(base.UUIDBase): # we can optionally provide the table name instead of auto-generating it __tablename__ = "author" name: Mapped[str] dob: Mapped[datetime.date | None] books: Mapped[list[BookModel]] = relationship(back_populates="author", lazy="noload") # The `AuditBase` class includes the same UUID` based primary key (`id`) and 2 # additional columns: `created` and `updated`. `created` is a timestamp of when the # record created, and `updated` is the last time the record was modified. class BookModel(base.UUIDAuditBase): __tablename__ = "book" title: Mapped[str] author_id: Mapped[UUID] = mapped_column(ForeignKey("author.id")) author: Mapped[AuthorModel] = relationship(lazy="joined", innerjoin=True, viewonly=True) class AuthorService(service.SQLAlchemyAsyncRepositoryService[AuthorModel]): """Author service.""" class Repo(repository.SQLAlchemyAsyncRepository[AuthorModel]): """Author repository.""" model_type = AuthorModel repository_type = Repo # ####################### # Dependencies # ####################### async def provide_authors_service(db_session: AsyncSession) -> AuthorService: """This provides the default Authors repository.""" return AuthorService(session=db_session) # we can optionally override the default `select` used for the repository to pass in # specific SQL options such as join details async def provide_author_details_service( db_session: AsyncSession, ) -> AuthorService: """This provides a simple example demonstrating how to override the join options for the repository.""" return AuthorService(load=[AuthorModel.books], session=db_session) def provide_limit_offset_pagination( current_page: int = 1, page_size: int = 10, ) -> filters.LimitOffset: """Add offset/limit pagination. Return type consumed by `Repository.apply_limit_offset_pagination()`. Parameters ---------- current_page : int LIMIT to apply to select. page_size : int OFFSET to apply to select. """ return filters.LimitOffset(page_size, page_size * (current_page - 1)) # ####################### # Application # ####################### session_config = AsyncSessionConfig(expire_on_commit=False) sqlalchemy_config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///test.sqlite", session_config=session_config, ) # Create 'db_session' dependency. app = Sanic("AlchemySanicApp") alchemy = AdvancedAlchemy(sqlalchemy_config=sqlalchemy_config) alchemy.register(app) alchemy.add_session_dependency(AsyncSession) python-advanced-alchemy-1.0.1/examples/standalone.py000066400000000000000000000043311476663714600225620ustar00rootroot00000000000000from __future__ import annotations import pprint from pathlib import Path from typing import TYPE_CHECKING from sqlalchemy import create_engine from advanced_alchemy.base import UUIDBase from advanced_alchemy.config import SQLAlchemySyncConfig, SyncSessionConfig from advanced_alchemy.filters import LimitOffset from advanced_alchemy.repository import SQLAlchemySyncRepository from advanced_alchemy.utils.fixtures import open_fixture if TYPE_CHECKING: from sqlalchemy.orm import Mapped here = Path(__file__).parent config = SQLAlchemySyncConfig( engine_instance=create_engine("duckdb:///:memory:"), session_config=SyncSessionConfig(expire_on_commit=False) ) class USState(UUIDBase): # you can optionally override the generated table name by manually setting it. __tablename__ = "us_state_lookup" abbreviation: Mapped[str] name: Mapped[str] class USStateRepository(SQLAlchemySyncRepository[USState]): """US State repository.""" model_type = USState def run_script() -> None: """Load data from a fixture.""" # Initializes the database. with config.get_engine().begin() as conn: USState.metadata.create_all(conn) with config.get_session() as db_session: # 1) Load the JSON data into the US States table. repo = USStateRepository(session=db_session) fixture = open_fixture(here, USStateRepository.model_type.__tablename__) objs = repo.add_many([USStateRepository.model_type(**raw_obj) for raw_obj in fixture]) db_session.commit() pprint.pp(f"Created {len(objs)} new objects.") # 2) Select paginated data and total row count. created_objs, total_objs = repo.list_and_count(LimitOffset(limit=10, offset=0)) pprint.pp(f"Selected {len(created_objs)} records out of a total of {total_objs}.") # 3) Let's remove the batch of records selected. deleted_objs = repo.delete_many([new_obj.id for new_obj in created_objs]) pprint.pp(f"Removed {len(deleted_objs)} records out of a total of {total_objs}.") # 4) Let's count the remaining rows remaining_count = repo.count() pprint.pp(f"Found {remaining_count} remaining records after delete.") if __name__ == "__main__": run_script() python-advanced-alchemy-1.0.1/examples/standalone_json.py000066400000000000000000000043731476663714600236210ustar00rootroot00000000000000# ruff: noqa: PLR2004, S101 from __future__ import annotations import asyncio from typing import TYPE_CHECKING, Any from sqlalchemy.ext.asyncio import create_async_engine from advanced_alchemy.base import UUIDBase from advanced_alchemy.config import AsyncSessionConfig, SQLAlchemyAsyncConfig from advanced_alchemy.repository import SQLAlchemyAsyncRepository if TYPE_CHECKING: from sqlalchemy.orm import Mapped class Item(UUIDBase): name: Mapped[str] # using ``Mapped[dict]`` with an AA provided base will map it to ``JSONB`` data: Mapped[dict[str, Any]] class ItemRepository(SQLAlchemyAsyncRepository[Item]): """Item repository.""" model_type = Item config = SQLAlchemyAsyncConfig( engine_instance=create_async_engine("postgresql+psycopg://app:super-secret@localhost:5432/app"), session_config=AsyncSessionConfig(expire_on_commit=False), ) async def run_script() -> None: # Initializes the database. async with config.get_engine().begin() as conn: await conn.run_sync(Item.metadata.create_all) async with config.get_session() as db_session: repo = ItemRepository(session=db_session) # Add some data await repo.add_many( [ Item( name="Smartphone", data={"price": 599.99, "brand": "XYZ"}, ), Item( name="Laptop", data={"price": 1299.99, "brand": "ABC"}, ), Item( name="Headphones", data={"not_price": 149.99, "brand": "DEF"}, ), ], auto_commit=True, ) async with config.get_session() as db_session: repo = ItemRepository(session=db_session) # Do some queries with JSON operations assert await repo.exists(Item.data["price"].as_float() == 599.99, Item.data["brand"].as_string() == "XYZ") assert await repo.count(Item.data.op("?")("price")) == 2 products, total_products = await repo.list_and_count(Item.data.op("?")("not_price")) assert len(products) == 1 assert total_products == 1 assert products[0].name == "Headphones" if __name__ == "__main__": asyncio.run(run_script()) python-advanced-alchemy-1.0.1/examples/us_state_lookup.json000066400000000000000000000061351476663714600241770ustar00rootroot00000000000000[ { "name": "Alabama", "abbreviation": "AL" }, { "name": "Alaska", "abbreviation": "AK" }, { "name": "Arizona", "abbreviation": "AZ" }, { "name": "Arkansas", "abbreviation": "AR" }, { "name": "California", "abbreviation": "CA" }, { "name": "Colorado", "abbreviation": "CO" }, { "name": "Connecticut", "abbreviation": "CT" }, { "name": "Delaware", "abbreviation": "DE" }, { "name": "District Of Columbia", "abbreviation": "DC" }, { "name": "Florida", "abbreviation": "FL" }, { "name": "Georgia", "abbreviation": "GA" }, { "name": "Guam", "abbreviation": "GU" }, { "name": "Hawaii", "abbreviation": "HI" }, { "name": "Idaho", "abbreviation": "ID" }, { "name": "Illinois", "abbreviation": "IL" }, { "name": "Indiana", "abbreviation": "IN" }, { "name": "Iowa", "abbreviation": "IA" }, { "name": "Kansas", "abbreviation": "KS" }, { "name": "Kentucky", "abbreviation": "KY" }, { "name": "Louisiana", "abbreviation": "LA" }, { "name": "Maine", "abbreviation": "ME" }, { "name": "Maryland", "abbreviation": "MD" }, { "name": "Massachusetts", "abbreviation": "MA" }, { "name": "Michigan", "abbreviation": "MI" }, { "name": "Minnesota", "abbreviation": "MN" }, { "name": "Mississippi", "abbreviation": "MS" }, { "name": "Missouri", "abbreviation": "MO" }, { "name": "Montana", "abbreviation": "MT" }, { "name": "Nebraska", "abbreviation": "NE" }, { "name": "Nevada", "abbreviation": "NV" }, { "name": "New Hampshire", "abbreviation": "NH" }, { "name": "New Jersey", "abbreviation": "NJ" }, { "name": "New Mexico", "abbreviation": "NM" }, { "name": "New York", "abbreviation": "NY" }, { "name": "North Carolina", "abbreviation": "NC" }, { "name": "North Dakota", "abbreviation": "ND" }, { "name": "Ohio", "abbreviation": "OH" }, { "name": "Oklahoma", "abbreviation": "OK" }, { "name": "Oregon", "abbreviation": "OR" }, { "name": "Palau", "abbreviation": "PW" }, { "name": "Pennsylvania", "abbreviation": "PA" }, { "name": "Puerto Rico", "abbreviation": "PR" }, { "name": "Rhode Island", "abbreviation": "RI" }, { "name": "South Carolina", "abbreviation": "SC" }, { "name": "South Dakota", "abbreviation": "SD" }, { "name": "Tennessee", "abbreviation": "TN" }, { "name": "Texas", "abbreviation": "TX" }, { "name": "Utah", "abbreviation": "UT" }, { "name": "Vermont", "abbreviation": "VT" }, { "name": "Virginia", "abbreviation": "VA" }, { "name": "Washington", "abbreviation": "WA" }, { "name": "West Virginia", "abbreviation": "WV" }, { "name": "Wisconsin", "abbreviation": "WI" }, { "name": "Wyoming", "abbreviation": "WY" } ] python-advanced-alchemy-1.0.1/pyproject.toml000066400000000000000000000425431476663714600211650ustar00rootroot00000000000000[project] authors = [ { name = "Cody Fincher", email = "cody.fincher@gmail.com" }, { name = "Peter Schutt", email = "peter.github@proton.me" }, { name = "Janek Nouvertnรฉ", email = "j.a.nouvertne@posteo.de" }, { name = "Jacob Coffee", email = "jacob@z7x.org" }, ] classifiers = [ "Development Status :: 3 - Alpha", "Environment :: Web Environment", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python", "Topic :: Software Development", "Typing :: Typed", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "Topic :: Database", "Topic :: Database :: Database Engines/Servers", ] dependencies = [ "sqlalchemy>=2.0.20", "alembic>=1.12.0", "typing-extensions>=4.0.0", "greenlet", "eval-type-backport ; python_full_version < '3.10'", ] description = "Ready-to-go SQLAlchemy concoctions." keywords = ["sqlalchemy", "alembic", "litestar", "sanic", "fastapi", "flask"] license = { text = "MIT" } maintainers = [ { name = "Litestar Developers", email = "hello@litestar.dev" }, { name = "Cody Fincher", email = "cody@litestar.dev" }, { name = "Jacob Coffee", email = "jacob@litestar.dev" }, { name = "Janek Nouvertnรฉ", email = "janek@litestar.dev" }, { name = "Julien Courtes", email = "julien@litestar.dev" }, ] name = "advanced_alchemy" readme = "README.md" requires-python = ">=3.9" version = "1.0.1" [project.urls] Changelog = "https://docs.advanced-alchemy.litestar.dev/latest/changelog" Discord = "https://discord.gg/litestar" Documentation = "https://docs.advanced-alchemy.litestar.dev/latest/" Funding = "https://github.com/sponsors/litestar-org" Homepage = "https://docs.advanced-alchemy.litestar.dev/latest/" Issue = "https://github.com/litestar-org/advanced-alchemy/issues/" Source = "https://github.com/litestar-org/advanced-alchemy" [project.optional-dependencies] cli = ["rich-click"] nanoid = ["fastnanoid>=0.4.1"] uuid = ["uuid-utils>=0.6.1"] [project.scripts] alchemy = "advanced_alchemy.__main__:run_cli" [dependency-groups] build = ["bump-my-version"] cockroachdb = [ "asyncpg>=0.29.0", "psycopg2-binary>=2.9.10", "psycopg[binary,pool]>=3.2.3", "sqlalchemy-cockroachdb>=2.0.2", ] dev = [ { include-group = "build" }, { include-group = "lint" }, { include-group = "doc" }, { include-group = "test" }, { include-group = "litestar" }, { include-group = "fastapi" }, { include-group = "flask" }, { include-group = "sanic" }, { include-group = "sqlite" }, { include-group = "oracle" }, { include-group = "duckdb" }, { include-group = "mssql" }, { include-group = "mysql" }, { include-group = "spanner" }, { include-group = "cockroachdb" }, { include-group = "postgres" }, ] doc = [ "auto-pytabs[sphinx]>=0.5.0", "shibuya", "sphinx>=7.0.0; python_version <= \"3.9\"", "sphinx>=8.0.0; python_version >= \"3.10\"", "sphinx-autobuild>=2021.3.14", "sphinx-copybutton>=0.5.2", "sphinx-click>=6.0.0", "sphinx-design>=0.5.0", "sphinxcontrib-mermaid>=0.9.2", "sphinx-paramlinks>=0.6.0", "sphinx-togglebutton>=0.3.2", "sphinx-toolbox>=3.8.1", "myst-parser", "sphinx-autodoc-typehints", ] duckdb = ["duckdb>=1.1.2", "duckdb-engine>=0.13.4", "pytz>=2024.2"] fastapi = ["fastapi[all]>=0.115.3", "starlette"] flask = ["flask-sqlalchemy>=3.1.1", "flask[async]"] lint = [ "mypy>=1.13.0", "pre-commit>=3.5.0", "pyright>=1.1.386", "ruff>=0.7.1", "slotscheck>=0.16.5", "asyncpg-stubs", "types-Pillow", "types-PyMySQL", "types-PyYAML", "types-Pygments", "types-aiofiles", "types-colorama", "types-docutils", "types-psycopg2", "types-python-dateutil", "types-pytz", "types-ujson", ] litestar = ["litestar[cli]>=2.15.0"] mssql = ["aioodbc>=0.5.0", "pyodbc>=5.2.0"] mysql = ["asyncmy>=0.2.9"] oracle = ["oracledb>=2.4.1"] postgres = ["asyncpg>=0.29.0", "psycopg2-binary>=2.9.10", "psycopg[binary,pool]>=3.2.3"] sanic = ["sanic", "sanic-testing>=24.6.0", "sanic[ext]>=24.6.0"] spanner = ["sqlalchemy-spanner>=1.7.0"] sqlite = ["aiosqlite>=0.20.0"] test = [ "pydantic-extra-types", "rich-click", "coverage>=7.6.1", "pytest>=7.4.4", "pytest-asyncio>=0.23.8", "pytest-cov>=5.0.0", "pytest-databases", "pytest-lazy-fixtures>=1.1.1", "pytest-rerunfailures", "pytest-mock>=3.14.0", "pytest-sugar>=1.0.0", "pytest-xdist>=3.6.1", "pytest-click", "asgi-lifespan", "click", "time-machine>=2.15.0", ] [tool.bumpversion] allow_dirty = true commit = false commit_args = "--no-verify" current_version = "1.0.1" ignore_missing_files = false ignore_missing_version = false message = "chore(release): bump to v{new_version}" parse = "(?P\\d+)\\.(?P\\d+)\\.(?P\\d+)" regex = false replace = "{new_version}" search = "{current_version}" serialize = ["{major}.{minor}.{patch}"] sign_tags = false tag = false tag_message = "chore(release): v{new_version}" tag_name = "v{new_version}" [[tool.bumpversion.files]] filename = "pyproject.toml" replace = 'version = "{new_version}"' search = 'version = "{current_version}"' [build-system] build-backend = "hatchling.build" requires = ["hatchling"] [tool.hatch.build.targets.wheel] packages = [ "advanced_alchemy", "advanced_alchemy.extensions.litestar", "advanced_alchemy.extensions.sanic", "advanced_alchemy.extensions.starlette", ] [tool.pytest.ini_options] addopts = ["-q", "-ra"] asyncio_default_fixture_loop_scope = "function" asyncio_mode = "auto" filterwarnings = [ "ignore::DeprecationWarning:pkg_resources.*", "ignore:pkg_resources is deprecated as an API:DeprecationWarning", "ignore::DeprecationWarning:pkg_resources", "ignore::DeprecationWarning:google.rpc", "ignore::DeprecationWarning:google.gcloud", "ignore::DeprecationWarning:google.iam", "ignore::DeprecationWarning:google", "ignore::DeprecationWarning:websockets.connection", "ignore::DeprecationWarning:websockets.legacy", ] markers = [ "integration: SQLAlchemy integration tests", "asyncmy: SQLAlchemy MySQL (asyncmy) Tests", "asyncpg: SQLAlchemy Postgres (asyncpg) Tests", "psycopg_async: SQLAlchemy Postgres (psycopg async) Tests", "psycopg_sync: SQLAlchemy Postgres (psycopg sync) Tests", "aiosqlite: SQLAlchemy SQLite (aiosqlite) Tests", "sqlite: SQLAlchemy SQLite (sqlite) Tests", "oracledb_sync: SQLAlchemy Oracle (oracledb sync) Tests", "oracledb_async: SQLAlchemy Oracle (oracledb async) Tests", "spanner: SQLAlchemy Google Cloud Spanner (sqlalchemy-spanner) Tests", "duckdb: SQLAlchemy DuckDB (duckdb-engine) Tests", "mssql_sync: SQLAlchemy Microsoft SQL Server (pyodbc) Tests", "mssql_async: SQLAlchemy Microsoft SQL Server (aioodbc) Tests", "mock_async: SQLAlchemy async mock Tests", "mock_sync: SQLAlchemy sync mock Tests", "cockroachdb_sync: SQLAlchemy CockroachDB (psycopg2) Tests", "cockroachdb_async: SQLAlchemy CockroachDB (asyncpg) Tests", ] testpaths = ["tests"] [tool.coverage.run] branch = true concurrency = ["multiprocessing"] omit = [ "*/tests/*", "advanced_alchemy/alembic/templates/asyncio/env.py", "advanced_alchemy/alembic/templates/sync/env.py", "advanced_alchemy/extensions/litestar/cli.py", "advanced_alchemy/alembic/commands.py", "advanced_alchemy/types.py", "advanced_alchemy/operations.py", "advanced_alchemy/service/*", ] parallel = true relative_files = true [tool.coverage.report] exclude_lines = [ 'pragma: no cover', 'if TYPE_CHECKING:', 'except ImportError as e:', 'except ImportError:', '\.\.\.', 'raise NotImplementedError', 'if VERSION.startswith("1"):', 'if pydantic.VERSION.startswith("1"):', ] [tool.black] line-length = 120 [tool.ruff] exclude = [".venv", "node_modules"] line-length = 120 src = ["advanced_alchemy", "tests", "docs", "tools"] target-version = "py39" [tool.ruff.format] docstring-code-format = true docstring-code-line-length = 60 [tool.ruff.lint] extend-safe-fixes = ["TC"] fixable = ["ALL"] ignore = [ "A003", # flake8-builtins - class attribute {name} is shadowing a python builtin "A005", # flake8-builtins - module {name} shadows a Python standard-library module "B010", # flake8-bugbear - do not call setattr with a constant attribute value "D100", # pydocstyle - missing docstring in public module "D101", # pydocstyle - missing docstring in public class "D102", # pydocstyle - missing docstring in public method "D103", # pydocstyle - missing docstring in public function "D104", # pydocstyle - missing docstring in public package "D105", # pydocstyle - missing docstring in magic method "D106", # pydocstyle - missing docstring in public nested class "D107", # pydocstyle - missing docstring in __init__ "D202", # pydocstyle - no blank lines allowed after function docstring "D205", # pydocstyle - 1 blank line required between summary line and description "D415", # pydocstyle - first line should end with a period, question mark, or exclamation point "E501", # pydocstyle line too long, handled by black "PLW2901", # pylint - for loop variable overwritten by assignment target "RUF012", # Ruff-specific rule - annotated with classvar "ANN401", "FBT", "PLR0913", # too many arguments "PT", "TD", "ARG002", # ignore for now; investigate "ARG003", # ignore for now; investigate "PERF203", # ignore for now; investigate "PD011", # pandas "PLR0912", "ISC001", "COM812", "CPY001", "FA100", ] select = ["ALL"] [tool.ruff.lint.pydocstyle] convention = "google" [tool.ruff.lint.pyupgrade] # Preserve types, even if a file imports `from __future__ import annotations`. # keep-runtime-typing = true [tool.ruff.lint.mccabe] max-complexity = 14 [tool.ruff.lint.pep8-naming] classmethod-decorators = [ "sqlalchemy.ext.declarative.declared_attr", "sqlalchemy.orm.declared_attr.directive", "sqlalchemy.orm.declared_attr", ] [tool.ruff.lint.per-file-ignores] "advanced_alchemy/repository/*.py" = ['C901'] "examples/flask.py" = ["ANN"] "examples/flask/*.py" = ["ANN"] "tests/**/*.*" = [ "A", "ARG", "B", "BLE", "C901", "D", "DTZ", "EM", "FBT", "G", "N", "PGH", "PIE", "PLR", "PLW", "PTH", "RSE", "S", "S101", "SIM", "TC", "TRY", "SLF001", ] [tool.ruff.lint.flake8-tidy-imports] # Disallow all relative imports. ban-relative-imports = "all" [tool.ruff.lint.isort] known-first-party = ["advanced_alchemy", "tests"] [tool.slotscheck] exclude-modules = ''' ( (^|\.)test_ |^tests\.* |^tools\.* |^docs\.* |^examples\.* |^sqlalchemy\.( testing |ext\.mypy # see slotscheck/issues/178 ) |^alembic\.testing\.suite.* # Add this line to exclude Alembic test suite ) ''' include-modules = "advanced_alchemy.*" require-superclass = false strict-imports = true [tool.mypy] disallow_any_generics = false disallow_untyped_decorators = true implicit_reexport = false packages = ["advanced_alchemy", "tests", "docs", "examples"] python_version = "3.9" show_error_codes = true strict = true warn_redundant_casts = true warn_return_any = true warn_unreachable = true warn_unused_configs = true warn_unused_ignores = true [[tool.mypy.overrides]] disable_error_code = "attr-defined,type-var,union-attr" disallow_untyped_decorators = false module = "tests.*" warn_unused_ignores = false [[tool.mypy.overrides]] ignore_missing_imports = true module = [ "asyncmy", "pyodbc", "greenlet", "google.auth.*", "google.cloud.*", "google.protobuf.*", "pyarrow.*", "pytest_docker.*", "googleapiclient", "googleapiclient.*", "uuid_utils", "uuid_utils.*", ] [[tool.mypy.overrides]] module = "advanced_alchemy._serialization" warn_unused_ignores = false [[tool.mypy.overrides]] disallow_untyped_decorators = false module = "advanced_alchemy.extensions.litestar.cli" [[tool.mypy.overrides]] disallow_untyped_decorators = false module = "advanced_alchemy.types.json" [[tool.mypy.overrides]] module = "advanced_alchemy.service.typing" warn_unused_ignores = false [[tool.mypy.overrides]] module = "advanced_alchemy.alembic.templates.*.env" warn_unreachable = false [[tool.mypy.overrides]] disable_error_code = "no-untyped-call" disallow_untyped_decorators = false module = "advanced_alchemy.extensions.sanic" warn_unused_ignores = false [[tool.mypy.overrides]] module = "advanced_alchemy.base" warn_unused_ignores = false [[tool.mypy.overrides]] disallow_untyped_decorators = false module = "advanced_alchemy.extensions.litestar.cli" [[tool.mypy.overrides]] disable_error_code = "arg-type,no-any-return,no-untyped-def" disallow_untyped_decorators = false disallow_untyped_defs = false module = "examples.flask.*" [[tool.mypy.overrides]] disable_error_code = "unreachable" module = "tests.integration.test_repository" [tool.pyright] disableBytesTypePromotions = true exclude = [ "docs", "tests/unit/test_extensions", "tests/unit/test_repository.py", "tests/helpers.py", "tests/docker_service_fixtures.py", "examples/flask/flask_services.py", ] include = ["advanced_alchemy"] pythonVersion = "3.9" reportUnnecessaryTypeIgnoreComments = true reportUnusedFunction = false strict = ["advanced_alchemy/**/*"] venv = ".venv" venvPath = "." [tool.unasyncd] add_editors_note = true cache = true ruff_fix = true ruff_format = true update_docstrings = true [tool.unasyncd.files] "advanced_alchemy/repository/_async.py" = "advanced_alchemy/repository/_sync.py" "advanced_alchemy/repository/memory/_async.py" = "advanced_alchemy/repository/memory/_sync.py" "advanced_alchemy/service/_async.py" = "advanced_alchemy/service/_sync.py" [tool.unasyncd.per_file_add_replacements."advanced_alchemy/repository/_async.py"] SQLAlchemyAsyncMockRepository = "SQLAlchemySyncMockRepository" "SQLAlchemyAsyncQueryRepository" = "SQLAlchemySyncQueryRepository" SQLAlchemyAsyncRepository = "SQLAlchemySyncRepository" SQLAlchemyAsyncRepositoryProtocol = "SQLAlchemySyncRepositoryProtocol" "SQLAlchemyAsyncSlugRepository" = "SQLAlchemySyncSlugRepository" SQLAlchemyAsyncSlugRepositoryProtocol = "SQLAlchemySyncSlugRepositoryProtocol" "async_scoped_session" = "scoped_session" "sqlalchemy.ext.asyncio.AsyncSession" = "sqlalchemy.orm.Session" "sqlalchemy.ext.asyncio.scoping.async_scoped_session" = "sqlalchemy.orm.scoping.scoped_session" [tool.unasyncd.per_file_add_replacements."advanced_alchemy/repository/memory/_async.py"] SQLAlchemyAsyncMockRepository = "SQLAlchemySyncMockRepository" "SQLAlchemyAsyncMockSlugRepository" = "SQLAlchemySyncMockSlugRepository" SQLAlchemyAsyncRepository = "SQLAlchemySyncRepository" SQLAlchemyAsyncRepositoryProtocol = "SQLAlchemySyncRepositoryProtocol" "SQLAlchemyAsyncSlugRepository" = "SQLAlchemySyncSlugRepository" SQLAlchemyAsyncSlugRepositoryProtocol = "SQLAlchemySyncSlugRepositoryProtocol" "advanced_alchemy.repository._async.SQLAlchemyAsyncRepositoryProtocol" = "advanced_alchemy.repository._sync.SQLAlchemySyncRepositoryProtocol" "advanced_alchemy.repository._async.SQLAlchemyAsyncSlugRepositoryProtocol" = "advanced_alchemy.repository._sync.SQLAlchemySyncSlugRepositoryProtocol" "async_scoped_session" = "scoped_session" "sqlalchemy.ext.asyncio.AsyncEngine" = "sqlalchemy.Engine" "sqlalchemy.ext.asyncio.AsyncSession" = "sqlalchemy.orm.Session" "sqlalchemy.ext.asyncio.scoping.async_scoped_session" = "sqlalchemy.orm.scoping.scoped_session" [tool.unasyncd.per_file_add_replacements."advanced_alchemy/service/_async.py"] "AsyncIterator" = "Iterator" "SQLAlchemyAsyncConfigT" = "SQLAlchemySyncConfigT" SQLAlchemyAsyncMockRepository = "SQLAlchemySyncMockRepository" SQLAlchemyAsyncMockSlugRepository = "SQLAlchemySyncMockSlugRepository" SQLAlchemyAsyncQueryService = "SQLAlchemySyncQueryService" SQLAlchemyAsyncRepository = "SQLAlchemySyncRepository" SQLAlchemyAsyncRepositoryReadService = "SQLAlchemySyncRepositoryReadService" SQLAlchemyAsyncRepositoryService = "SQLAlchemySyncRepositoryService" "SQLAlchemyAsyncRepositoryT" = "SQLAlchemySyncRepositoryT" SQLAlchemyAsyncSlugRepository = "SQLAlchemySyncSlugRepository" "advanced_alchemy.config.asyncio.SQLAlchemyAsyncConfig" = "advanced_alchemy.config.sync.SQLAlchemySyncConfig" "advanced_alchemy.repository.SQLAlchemyAsyncQueryRepository" = "advanced_alchemy.repository.SQLAlchemySyncQueryRepository" "advanced_alchemy.repository.SQLAlchemyAsyncRepository" = "advanced_alchemy.repository.SQLAlchemySyncRepository" "advanced_alchemy.repository.SQLAlchemyAsyncRepositoryProtocol" = "advanced_alchemy.repository.SQLAlchemySyncRepositoryProtocol" "advanced_alchemy.repository.SQLAlchemyAsyncSlugRepository" = "advanced_alchemy.repository.SQLAlchemySyncSlugRepository" "advanced_alchemy.repository.SQLAlchemyAsyncSlugRepositoryProtocol" = "advanced_alchemy.repository.SQLAlchemySyncSlugRepositoryProtocol" "advanced_alchemy.repository.memory.SQLAlchemyAsyncMockRepository" = "advanced_alchemy.repository.memory.SQLAlchemySyncMockRepository" "advanced_alchemy.repository.memory.SQLAlchemyAsyncMockSlugRepository" = "advanced_alchemy.repository.memory.SQLAlchemySyncMockSlugRepository" "advanced_alchemy.repository.typing.SQLAlchemyAsyncRepositoryT" = "advanced_alchemy.repository.typing.SQLAlchemySyncRepositoryT" "async_scoped_session" = "scoped_session" "collections.abc.AsyncIterator" = "collections.abc.Iterator" "sqlalchemy.ext.asyncio.AsyncSession" = "sqlalchemy.orm.Session" "sqlalchemy.ext.asyncio.scoping.async_scoped_session" = "sqlalchemy.orm.scoping.scoped_session" [tool.codespell] ignore-words-list = "selectin" skip = 'pdm.lock, uv.lock, examples/us_state_lookup.json, docs/_static/favicon.svg' python-advanced-alchemy-1.0.1/sonar-project.properties000066400000000000000000000031541476663714600231500ustar00rootroot00000000000000sonar.organization=litestar-api sonar.projectKey=litestar-org_advanced-alchemy sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3.9, 3.10, 3.11, 3.12, 3.13 sonar.sourceEncoding=UTF-8 sonar.sources=advanced_alchemy sonar.tests=tests sonar.coverage.exclusions=\ examples/*.py, \ tests/*.py, \ tests/**/*.py, \ advanced_alchemy/cli.py, \ advanced_alchemy/extensions/litestar/cli.py, \ advanced_alchemy/filters.py, \ advanced_alchemy/service/typing.py, \ advanced_alchemy/service/_typing.py, \ advanced_alchemy/service/_util.py, \ advanced_alchemy/alembic/templates/asyncio/env.py, \ advanced_alchemy/alembic/templates/sync/env.py, \ tests/integration/conftest.py, \ advanced_alchemy/service/_sync.py, \ advanced_alchemy/service/_async.py, \ advanced_alchemy/service/pagination.py, \ advanced_alchemy/extensions/litestar/plugins/init/config/*.py, \ advanced_alchemy/extensions/sanic/config.py, \ advanced_alchemy/extensions/sanic/extension.py sonar.cpd.exclusions=\ advanced_alchemy/repository/memory/_sync.py, \ advanced_alchemy/repository/memory/_async.py, \ advanced_alchemy/filters.py, \ advanced_alchemy/repository/_sync.py, \ advanced_alchemy/repository/_async.py, \ advanced_alchemy/service/_sync.py, \ advanced_alchemy/service/_async.py, \ advanced_alchemy/alembic/templates/sync/env.py, \ examples/*.py, \ examples/fastapi.py, \ tests/integration/conftest.py, \ advanced_alchemy/extensions/litestar/plugins/init/config/*.py, \ advanced_alchemy/extensions/sanic/config.py, \ advanced_alchemy/extensions/sanic/extension.py sonar.projectName=Advanced Alchemy python-advanced-alchemy-1.0.1/tests/000077500000000000000000000000001476663714600174035ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/__init__.py000066400000000000000000000000001476663714600215020ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/conftest.py000066400000000000000000000000631476663714600216010ustar00rootroot00000000000000pytest_plugins = ["tests.docker_service_fixtures"] python-advanced-alchemy-1.0.1/tests/docker-compose.yml000066400000000000000000000046771476663714600230560ustar00rootroot00000000000000services: postgres: image: docker.io/postgres:latest ports: - "5423:5432" # use a non-standard port here environment: POSTGRES_PASSWORD: super-secret postgres14: image: docker.io/postgres:14 ports: - "5424:5432" # use a non-standard port here environment: POSTGRES_PASSWORD: super-secret mysql: image: docker.io/mysql:latest ports: - "3360:3306" # use a non-standard port here environment: MYSQL_ROOT_PASSWORD: super-secret MYSQL_PASSWORD: super-secret MYSQL_USER: app MYSQL_DATABASE: db MYSQL_ROOT_HOST: "%" LANG: C.UTF-8 oracle18c: image: docker.io/gvenzl/oracle-xe:18-slim-faststart ports: - "1512:1521" # use a non-standard port here environment: ORACLE_PASSWORD: super-secret APP_USER_PASSWORD: super-secret APP_USER: app oracle23c: image: docker.io/gvenzl/oracle-free:23-slim-faststart ports: - "1513:1521" # use a non-standard port here environment: ORACLE_PASSWORD: super-secret APP_USER_PASSWORD: super-secret APP_USER: app spanner: image: gcr.io/cloud-spanner-emulator/emulator:latest ports: - "9010:9010" # Init (Create Instance) spanner_init: image: gcr.io/google.com/cloudsdktool/cloud-sdk:332.0.0-slim command: > bash -c 'gcloud config configurations create emulator && gcloud config set auth/disable_credentials true && gcloud config set project $${PROJECT_ID} && gcloud config set auth/disable_credentials true && gcloud spanner instances create $${INSTANCE_NAME} --config=emulator-config --description=Emulator --nodes=1' environment: PROJECT_ID: emulator-test-project INSTANCE_NAME: test-instance DATABASE_NAME: test-database depends_on: - spanner mssql: image: mcr.microsoft.com/mssql/server:2022-latest ports: - "1344:1433" # use a non-standard port here environment: SA_PASSWORD: Super-secret1 MSSQL_PID: Developer ACCEPT_EULA: Accepted MSSQL_TCP_PORT: 1433 cockroachdb: image: docker.io/cockroachdb/cockroach:latest command: start-single-node --insecure restart: "no" expose: - "8080" - "26257" ports: - "26257:26257" - "8880:8080" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8080/health?ready=1"] interval: 3s timeout: 3s retries: 5 python-advanced-alchemy-1.0.1/tests/docker_service_fixtures.py000066400000000000000000000223741476663714600247050ustar00rootroot00000000000000from __future__ import annotations import asyncio import contextlib import os import re import subprocess import sys import timeit from collections.abc import Awaitable, Generator from pathlib import Path from typing import Any, Callable import asyncmy import asyncpg import oracledb import psycopg import pyodbc import pytest from google.auth.credentials import AnonymousCredentials from google.cloud import spanner from oracledb.exceptions import DatabaseError, OperationalError from tests.helpers import wrap_sync async def wait_until_responsive( check: Callable[..., Awaitable], timeout: float, pause: float, **kwargs: Any, ) -> None: """Wait until a service is responsive. Args: check: Coroutine, return truthy value when waiting should stop. timeout: Maximum seconds to wait. pause: Seconds to wait between calls to `check`. **kwargs: Given as kwargs to `check`. """ ref = timeit.default_timer() now = ref while (now - ref) < timeout: # sourcery skip if await check(**kwargs): return await asyncio.sleep(pause) now = timeit.default_timer() msg = "Timeout reached while waiting on service!" raise RuntimeError(msg) USE_LEGACY_DOCKER_COMPOSE: bool = bool(os.environ.get("USE_LEGACY_DOCKER_COMPOSE", None)) class DockerServiceRegistry: def __init__(self, worker_id: str) -> None: self._running_services: set[str] = set() self.docker_ip = self._get_docker_ip() self._base_command = ["docker-compose"] if USE_LEGACY_DOCKER_COMPOSE else ["docker", "compose"] self._base_command.extend( [ f"--file={Path(__file__).parent / 'docker-compose.yml'}", f"--project-name=advanced_alchemy-{worker_id}", ], ) def _get_docker_ip(self) -> str: docker_host = os.environ.get("DOCKER_HOST", "").strip() if not docker_host or docker_host.startswith("unix://"): return "127.0.0.1" if match := re.match(r"^tcp://(.+?):\d+$", docker_host): return match[1] msg = f'Invalid value for DOCKER_HOST: "{docker_host}".' raise ValueError(msg) def run_command(self, *args: str) -> None: command = [*self._base_command, *args] subprocess.run(command, check=True, capture_output=True) async def start( self, name: str, *, check: Callable[..., Any], timeout: float = 30, pause: float = 0.1, **kwargs: Any, ) -> None: if name not in self._running_services: if await wrap_sync(check)(self.docker_ip, **kwargs): self._running_services.add(name) return self.run_command("up", "-d", name) self._running_services.add(name) await wait_until_responsive( check=wrap_sync(check), timeout=timeout, pause=pause, host=self.docker_ip, **kwargs, ) def stop(self, name: str) -> None: pass def down(self) -> None: self.run_command("down", "-t", "5") @pytest.fixture(scope="session") def docker_services(worker_id: str) -> Generator[DockerServiceRegistry, None, None]: if os.getenv("GITHUB_ACTIONS") == "true" and sys.platform != "linux": pytest.skip("Docker not available on this platform") registry = DockerServiceRegistry(worker_id) try: yield registry finally: registry.down() @pytest.fixture(scope="session") def docker_ip(docker_services: DockerServiceRegistry) -> Generator[str, None, None]: yield docker_services.docker_ip async def mysql_responsive(host: str) -> bool: try: conn = await asyncmy.connect( host=host, port=3360, user="app", database="db", password="super-secret", ) async with conn.cursor() as cursor: await cursor.execute("select 1 as is_available") resp = await cursor.fetchone() return resp[0] == 1 # type: ignore except asyncmy.errors.OperationalError: # pyright: ignore[reportAttributeAccessIssue] return False @pytest.fixture() async def mysql_service(docker_services: DockerServiceRegistry) -> None: await docker_services.start("mysql", timeout=45, pause=1, check=mysql_responsive) async def postgres_responsive(host: str) -> bool: try: conn = await asyncpg.connect( host=host, port=5423, user="postgres", database="postgres", password="super-secret", ) except (ConnectionError, asyncpg.CannotConnectNowError): return False try: return (await conn.fetchrow("SELECT 1"))[0] == 1 # type: ignore finally: await conn.close() @pytest.fixture() async def postgres_service(docker_services: DockerServiceRegistry) -> None: await docker_services.start("postgres", check=postgres_responsive) async def postgres14_responsive(host: str) -> bool: try: conn = await asyncpg.connect( host=host, port=5424, user="postgres", database="postgres", password="super-secret", ) except (ConnectionError, asyncpg.CannotConnectNowError): return False try: return (await conn.fetchrow("SELECT 1"))[0] == 1 # type: ignore finally: await conn.close() @pytest.fixture() async def postgres14_service(docker_services: DockerServiceRegistry) -> None: await docker_services.start("postgres", check=postgres_responsive) def oracle23c_responsive(host: str) -> bool: try: conn = oracledb.connect( host=host, port=1513, user="app", service_name="FREEPDB1", password="super-secret", ) with conn.cursor() as cursor: cursor.execute("SELECT 1 FROM dual") resp = cursor.fetchone() return resp[0] == 1 # type: ignore except (OperationalError, DatabaseError, Exception): return False @pytest.fixture() async def oracle23c_service(docker_services: DockerServiceRegistry) -> None: await docker_services.start("oracle23c", check=oracle23c_responsive, timeout=120) def oracle18c_responsive(host: str) -> bool: try: conn = oracledb.connect( host=host, port=1512, user="app", service_name="xepdb1", password="super-secret", ) with conn.cursor() as cursor: cursor.execute("SELECT 1 FROM dual") resp = cursor.fetchone() return resp[0] == 1 # type: ignore except (OperationalError, DatabaseError, Exception): return False @pytest.fixture() async def oracle18c_service(docker_services: DockerServiceRegistry) -> None: await docker_services.start("oracle18c", check=oracle18c_responsive, timeout=120) def spanner_responsive(host: str) -> bool: try: os.environ["SPANNER_EMULATOR_HOST"] = "localhost:9010" os.environ["GOOGLE_CLOUD_PROJECT"] = "emulator-test-project" spanner_client = spanner.Client(project="emulator-test-project", credentials=AnonymousCredentials()) # type: ignore[no-untyped-call] instance = spanner_client.instance("test-instance") with contextlib.suppress(Exception): instance.create() database = instance.database("test-database") with contextlib.suppress(Exception): database.create() with database.snapshot() as snapshot: resp = next(iter(snapshot.execute_sql("SELECT 1"))) return resp[0] == 1 # type: ignore except Exception: return False @pytest.fixture() async def spanner_service(docker_services: DockerServiceRegistry) -> None: os.environ["SPANNER_EMULATOR_HOST"] = "localhost:9010" await docker_services.start("spanner", timeout=60, check=spanner_responsive) async def mssql_responsive(host: str) -> bool: await asyncio.sleep(1) try: port = 1344 user = "sa" database = "master" conn = pyodbc.connect( connstring=f"encrypt=no; TrustServerCertificate=yes; driver={{ODBC Driver 18 for SQL Server}}; server={host},{port}; database={database}; UID={user}; PWD=Super-secret1", timeout=2, ) with conn.cursor() as cursor: cursor.execute("select 1 as is_available") resp = cursor.fetchone() return resp[0] == 1 # type: ignore except Exception: return False @pytest.fixture() async def mssql_service(docker_services: DockerServiceRegistry) -> None: await docker_services.start("mssql", timeout=60, pause=1, check=mssql_responsive) async def cockroachdb_responsive(host: str) -> bool: try: with psycopg.connect("postgresql://root@127.0.0.1:26257/defaultdb?sslmode=disable") as conn: with conn.cursor() as cursor: cursor.execute("select 1 as is_available") resp = cursor.fetchone() return resp[0] == 1 # type: ignore except Exception: return False @pytest.fixture() async def cockroachdb_service(docker_services: DockerServiceRegistry) -> None: await docker_services.start("cockroachdb", timeout=60, pause=1, check=cockroachdb_responsive) python-advanced-alchemy-1.0.1/tests/fixtures/000077500000000000000000000000001476663714600212545ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/fixtures/__init__.py000066400000000000000000000000001476663714600233530ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/fixtures/bigint/000077500000000000000000000000001476663714600225305ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/fixtures/bigint/__init__.py000066400000000000000000000000001476663714600246270ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/fixtures/bigint/models.py000066400000000000000000000076141476663714600243750ustar00rootroot00000000000000"""Example domain objects for testing.""" from __future__ import annotations import datetime from sqlalchemy import Column, FetchedValue, ForeignKey, String, Table, func from sqlalchemy.orm import Mapped, declared_attr, mapped_column, relationship from sqlalchemy.orm.decl_base import _TableArgsType as TableArgsType # pyright: ignore[reportPrivateUsage] from advanced_alchemy.base import BigIntAuditBase, BigIntBase, merge_table_arguments from advanced_alchemy.mixins import SlugKey from advanced_alchemy.types import EncryptedString from advanced_alchemy.types.encrypted_string import EncryptedText class BigIntAuthor(BigIntAuditBase): """The Author domain object.""" name: Mapped[str] = mapped_column(String(length=100)) string_field: Mapped[str] = mapped_column(String(20), default="static value", nullable=True) dob: Mapped[datetime.date] = mapped_column(nullable=True) books: Mapped[list[BigIntBook]] = relationship( lazy="selectin", back_populates="author", cascade="all, delete", ) class BigIntBook(BigIntBase): """The Book domain object.""" title: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore author_id: Mapped[int] = mapped_column(ForeignKey("big_int_author.id")) # pyright: ignore author: Mapped[BigIntAuthor] = relationship( # pyright: ignore lazy="joined", innerjoin=True, back_populates="books", ) class BigIntSlugBook(BigIntBase, SlugKey): """The Book domain object with a slug key.""" title: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore author_id: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore @declared_attr.directive @classmethod def __table_args__(cls) -> TableArgsType: return merge_table_arguments( cls, table_args={"comment": "Slugbook"}, ) class BigIntEventLog(BigIntAuditBase): """The event log domain object.""" logged_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.now()) # pyright: ignore payload: Mapped[dict] = mapped_column(default=lambda: {}) # pyright: ignore class BigIntModelWithFetchedValue(BigIntBase): """The ModelWithFetchedValue BigIntBase.""" val: Mapped[int] # pyright: ignore updated: Mapped[datetime.datetime] = mapped_column( # pyright: ignore server_default=func.current_timestamp(), onupdate=func.current_timestamp(), server_onupdate=FetchedValue(), ) bigint_item_tag = Table( "bigint_item_tag", BigIntBase.metadata, Column("item_id", ForeignKey("big_int_item.id"), primary_key=True), # pyright: ignore Column("tag_id", ForeignKey("big_int_tag.id"), primary_key=True), # pyright: ignore ) class BigIntItem(BigIntBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore description: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore tags: Mapped[list[BigIntTag]] = relationship(secondary=lambda: bigint_item_tag, back_populates="items") class BigIntTag(BigIntBase): """The event log domain object.""" name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore items: Mapped[list[BigIntItem]] = relationship(secondary=lambda: bigint_item_tag, back_populates="tags") class BigIntRule(BigIntAuditBase): """The rule domain object.""" name: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore config: Mapped[dict] = mapped_column(default=lambda: {}) # pyright: ignore class BigIntSecret(BigIntBase): """The secret domain model.""" secret: Mapped[str] = mapped_column( EncryptedString(key="super_secret"), ) long_secret: Mapped[str] = mapped_column( EncryptedText(key="super_secret"), ) length_validated_secret: Mapped[str] = mapped_column( EncryptedString(key="super_secret", length=10), nullable=True, ) python-advanced-alchemy-1.0.1/tests/fixtures/bigint/repositories.py000066400000000000000000000116231476663714600256340ustar00rootroot00000000000000"""Example domain objects for testing.""" from __future__ import annotations from advanced_alchemy.repository import ( SQLAlchemyAsyncRepository, SQLAlchemyAsyncSlugRepository, SQLAlchemySyncRepository, SQLAlchemySyncSlugRepository, ) from advanced_alchemy.repository.memory import ( SQLAlchemyAsyncMockRepository, SQLAlchemyAsyncMockSlugRepository, SQLAlchemySyncMockRepository, SQLAlchemySyncMockSlugRepository, ) from tests.fixtures.bigint.models import ( BigIntAuthor, BigIntBook, BigIntEventLog, BigIntItem, BigIntModelWithFetchedValue, BigIntRule, BigIntSecret, BigIntSlugBook, BigIntTag, ) class RuleAsyncRepository(SQLAlchemyAsyncRepository[BigIntRule]): """Rule repository.""" model_type = BigIntRule class RuleAsyncMockRepository(SQLAlchemyAsyncMockRepository[BigIntRule]): """Rule repository.""" model_type = BigIntRule class RuleSyncMockRepository(SQLAlchemySyncMockRepository[BigIntRule]): """Rule repository.""" model_type = BigIntRule class AuthorAsyncRepository(SQLAlchemyAsyncRepository[BigIntAuthor]): """Author repository.""" model_type = BigIntAuthor class AuthorAsyncMockRepository(SQLAlchemyAsyncMockRepository[BigIntAuthor]): model_type = BigIntAuthor class AuthorSyncMockRepository(SQLAlchemySyncMockRepository[BigIntAuthor]): model_type = BigIntAuthor class BookAsyncRepository(SQLAlchemyAsyncRepository[BigIntBook]): """Book repository.""" model_type = BigIntBook class BookAsyncMockRepository(SQLAlchemyAsyncMockRepository[BigIntBook]): """Book repository.""" model_type = BigIntBook class BookSyncMockRepository(SQLAlchemySyncMockRepository[BigIntBook]): """Book repository.""" model_type = BigIntBook class EventLogAsyncRepository(SQLAlchemyAsyncRepository[BigIntEventLog]): """Event log repository.""" model_type = BigIntEventLog class ModelWithFetchedValueAsyncRepository(SQLAlchemyAsyncRepository[BigIntModelWithFetchedValue]): """BigIntModelWithFetchedValue repository.""" model_type = BigIntModelWithFetchedValue class SecretAsyncRepository(SQLAlchemyAsyncRepository[BigIntSecret]): """Secret repository.""" model_type = BigIntSecret class TagAsyncRepository(SQLAlchemyAsyncRepository[BigIntTag]): """Tag repository.""" model_type = BigIntTag class TagAsyncMockRepository(SQLAlchemyAsyncMockRepository[BigIntTag]): """Tag repository.""" model_type = BigIntTag class TagSyncMockRepository(SQLAlchemySyncMockRepository[BigIntTag]): """Tag repository.""" model_type = BigIntTag class SecretSyncRepository(SQLAlchemySyncRepository[BigIntSecret]): """Secret repository.""" model_type = BigIntSecret class ItemAsyncRepository(SQLAlchemyAsyncRepository[BigIntItem]): """Item repository.""" model_type = BigIntItem class ItemAsyncMockRepository(SQLAlchemyAsyncMockRepository[BigIntItem]): """Item repository.""" model_type = BigIntItem class ItemSyncMockRepository(SQLAlchemySyncMockRepository[BigIntItem]): """Item repository.""" model_type = BigIntItem class SecretAsyncMockRepository(SQLAlchemyAsyncMockRepository[BigIntSecret]): """Secret repository.""" model_type = BigIntSecret class SecretSyncMockRepository(SQLAlchemySyncMockRepository[BigIntSecret]): """Secret repository.""" model_type = BigIntSecret class AuthorSyncRepository(SQLAlchemySyncRepository[BigIntAuthor]): """Author repository.""" model_type = BigIntAuthor class BookSyncRepository(SQLAlchemySyncRepository[BigIntBook]): """Book repository.""" model_type = BigIntBook class EventLogSyncRepository(SQLAlchemySyncRepository[BigIntEventLog]): """Event log repository.""" model_type = BigIntEventLog class RuleSyncRepository(SQLAlchemySyncRepository[BigIntRule]): """Rule repository.""" model_type = BigIntRule class ModelWithFetchedValueSyncRepository(SQLAlchemySyncRepository[BigIntModelWithFetchedValue]): """ModelWithFetchedValue repository.""" model_type = BigIntModelWithFetchedValue class TagSyncRepository(SQLAlchemySyncRepository[BigIntTag]): """Tag repository.""" model_type = BigIntTag class ItemSyncRepository(SQLAlchemySyncRepository[BigIntItem]): """Item repository.""" model_type = BigIntItem class SlugBookAsyncRepository(SQLAlchemyAsyncSlugRepository[BigIntSlugBook]): """Slug Book repository.""" _uniquify_results = True model_type = BigIntSlugBook class SlugBookSyncRepository(SQLAlchemySyncSlugRepository[BigIntSlugBook]): """Slug Book repository.""" _uniquify_results = True model_type = BigIntSlugBook class SlugBookAsyncMockRepository(SQLAlchemyAsyncMockSlugRepository[BigIntSlugBook]): """Book repository.""" model_type = BigIntSlugBook class SlugBookSyncMockRepository(SQLAlchemySyncMockSlugRepository[BigIntSlugBook]): """Book repository.""" model_type = BigIntSlugBook python-advanced-alchemy-1.0.1/tests/fixtures/bigint/services.py000066400000000000000000000216221476663714600247300ustar00rootroot00000000000000"""Example domain objects for testing.""" from __future__ import annotations from advanced_alchemy.service import ( SQLAlchemyAsyncRepositoryService, SQLAlchemySyncRepositoryService, ) from advanced_alchemy.service.typing import ModelDictT, is_dict_with_field, is_dict_without_field, schema_dump from tests.fixtures.bigint.models import ( BigIntAuthor, BigIntBook, BigIntEventLog, BigIntItem, BigIntModelWithFetchedValue, BigIntRule, BigIntSecret, BigIntSlugBook, BigIntTag, ) from tests.fixtures.bigint.repositories import ( AuthorAsyncMockRepository, AuthorAsyncRepository, AuthorSyncMockRepository, AuthorSyncRepository, BookAsyncMockRepository, BookAsyncRepository, BookSyncMockRepository, BookSyncRepository, EventLogAsyncRepository, EventLogSyncRepository, ItemAsyncMockRepository, ItemAsyncRepository, ItemSyncMockRepository, ItemSyncRepository, ModelWithFetchedValueAsyncRepository, ModelWithFetchedValueSyncRepository, RuleAsyncMockRepository, RuleAsyncRepository, RuleSyncMockRepository, RuleSyncRepository, SecretAsyncRepository, SecretSyncRepository, SlugBookAsyncMockRepository, SlugBookAsyncRepository, SlugBookSyncMockRepository, SlugBookSyncRepository, TagAsyncMockRepository, TagAsyncRepository, TagSyncMockRepository, TagSyncRepository, ) # Services class SecretAsyncService(SQLAlchemyAsyncRepositoryService[BigIntSecret, SecretAsyncRepository]): """Rule repository.""" repository_type = SecretAsyncRepository class RuleAsyncService(SQLAlchemyAsyncRepositoryService[BigIntRule, RuleAsyncRepository]): """Rule repository.""" repository_type = RuleAsyncRepository class RuleAsyncMockService(SQLAlchemyAsyncRepositoryService[BigIntRule, RuleAsyncMockRepository]): """Rule repository.""" repository_type = RuleAsyncMockRepository class RuleSyncMockService(SQLAlchemySyncRepositoryService[BigIntRule, RuleSyncMockRepository]): """Rule repository.""" repository_type = RuleSyncMockRepository class AuthorAsyncService(SQLAlchemyAsyncRepositoryService[BigIntAuthor, AuthorAsyncRepository]): """Author repository.""" repository_type = AuthorAsyncRepository class AuthorAsyncMockService(SQLAlchemyAsyncRepositoryService[BigIntAuthor, AuthorAsyncMockRepository]): """Author repository.""" repository_type = AuthorAsyncMockRepository class AuthorSyncMockService(SQLAlchemySyncRepositoryService[BigIntAuthor, AuthorSyncMockRepository]): """Author repository.""" repository_type = AuthorSyncMockRepository class BookAsyncService(SQLAlchemyAsyncRepositoryService[BigIntBook, BookAsyncRepository]): """Book repository.""" repository_type = BookAsyncRepository class BookAsyncMockService(SQLAlchemyAsyncRepositoryService[BigIntBook, BookAsyncMockRepository]): """Book repository.""" repository_type = BookAsyncMockRepository class BookSyncMockService(SQLAlchemySyncRepositoryService[BigIntBook, BookSyncMockRepository]): """Book repository.""" repository_type = BookSyncMockRepository class EventLogAsyncService(SQLAlchemyAsyncRepositoryService[BigIntEventLog, EventLogAsyncRepository]): """Event log repository.""" repository_type = EventLogAsyncRepository class ModelWithFetchedValueAsyncService( SQLAlchemyAsyncRepositoryService[BigIntModelWithFetchedValue, ModelWithFetchedValueAsyncRepository] ): """BigIntModelWithFetchedValue repository.""" repository_type = ModelWithFetchedValueAsyncRepository class TagAsyncService(SQLAlchemyAsyncRepositoryService[BigIntTag, TagAsyncRepository]): """Tag repository.""" repository_type = TagAsyncRepository class TagAsyncMockService(SQLAlchemyAsyncRepositoryService[BigIntTag, TagAsyncMockRepository]): """Tag repository.""" repository_type = TagAsyncMockRepository class TagSyncMockService(SQLAlchemySyncRepositoryService[BigIntTag, TagSyncMockRepository]): """Tag repository.""" repository_type = TagSyncMockRepository class ItemAsyncService(SQLAlchemyAsyncRepositoryService[BigIntItem, ItemAsyncRepository]): """Item repository.""" repository_type = ItemAsyncRepository class ItemAsyncMockService(SQLAlchemyAsyncRepositoryService[BigIntItem, ItemAsyncMockRepository]): """Item repository.""" repository_type = ItemAsyncMockRepository class ItemSyncMockService(SQLAlchemySyncRepositoryService[BigIntItem, ItemSyncMockRepository]): """Item repository.""" repository_type = ItemSyncMockRepository class RuleSyncService(SQLAlchemySyncRepositoryService[BigIntRule, RuleSyncRepository]): """Rule repository.""" repository_type = RuleSyncRepository class AuthorSyncService(SQLAlchemySyncRepositoryService[BigIntAuthor, AuthorSyncRepository]): """Author repository.""" repository_type = AuthorSyncRepository class BookSyncService(SQLAlchemySyncRepositoryService[BigIntBook, BookSyncRepository]): """Book repository.""" repository_type = BookSyncRepository class EventLogSyncService(SQLAlchemySyncRepositoryService[BigIntEventLog, EventLogSyncRepository]): """Event log repository.""" repository_type = EventLogSyncRepository class ModelWithFetchedValueSyncService( SQLAlchemySyncRepositoryService[BigIntModelWithFetchedValue, ModelWithFetchedValueSyncRepository] ): """BigIntModelWithFetchedValue repository.""" repository_type = ModelWithFetchedValueSyncRepository class SecretSyncService(SQLAlchemySyncRepositoryService[BigIntSecret, SecretSyncRepository]): """Rule repository.""" repository_type = SecretSyncRepository class TagSyncService(SQLAlchemySyncRepositoryService[BigIntTag, TagSyncRepository]): """Tag repository.""" repository_type = TagSyncRepository class ItemSyncService(SQLAlchemySyncRepositoryService[BigIntItem, ItemSyncRepository]): """Item repository.""" repository_type = ItemSyncRepository # Slug book class SlugBookAsyncService(SQLAlchemyAsyncRepositoryService[BigIntSlugBook]): """Book repository.""" repository_type = SlugBookAsyncRepository match_fields = ["title"] async def to_model(self, data: ModelDictT[BigIntSlugBook], operation: str | None = None) -> BigIntSlugBook: data = schema_dump(data) if is_dict_without_field(data, "slug") and operation == "create": data["slug"] = await self.repository.get_available_slug(data["title"]) if is_dict_without_field(data, "slug") and is_dict_with_field(data, "title") and operation == "update": data["slug"] = await self.repository.get_available_slug(data["title"]) return await super().to_model(data, operation) class SlugBookSyncService(SQLAlchemySyncRepositoryService[BigIntSlugBook, SlugBookSyncRepository]): """Book repository.""" repository_type = SlugBookSyncRepository match_fields = ["title"] def to_model( self, data: ModelDictT[BigIntSlugBook], operation: str | None = None, ) -> BigIntSlugBook: data = schema_dump(data) if is_dict_without_field(data, "slug") and operation == "create": data["slug"] = self.repository.get_available_slug(data["title"]) if is_dict_without_field(data, "slug") and is_dict_with_field(data, "title") and operation == "update": data["slug"] = self.repository.get_available_slug(data["title"]) return super().to_model(data, operation) class SlugBookAsyncMockService(SQLAlchemyAsyncRepositoryService[BigIntSlugBook, SlugBookAsyncMockRepository]): """Book repository.""" repository_type = SlugBookAsyncMockRepository match_fields = ["title"] async def to_model( self, data: ModelDictT[BigIntSlugBook], operation: str | None = None, ) -> BigIntSlugBook: data = schema_dump(data) if is_dict_without_field(data, "slug") and operation == "create": data["slug"] = await self.repository.get_available_slug(data["title"]) if is_dict_without_field(data, "slug") and is_dict_with_field(data, "title") and operation == "update": data["slug"] = await self.repository.get_available_slug(data["title"]) return await super().to_model(data, operation) class SlugBookSyncMockService(SQLAlchemySyncRepositoryService[BigIntSlugBook, SlugBookSyncMockRepository]): """Book repository.""" repository_type = SlugBookSyncMockRepository match_fields = ["title"] def to_model( self, data: ModelDictT[BigIntSlugBook], operation: str | None = None, ) -> BigIntSlugBook: data = schema_dump(data) if is_dict_without_field(data, "slug") and operation == "create": data["slug"] = self.repository.get_available_slug(data["title"]) if is_dict_without_field(data, "slug") and is_dict_with_field(data, "title") and operation == "update": data["slug"] = self.repository.get_available_slug(data["title"]) return super().to_model(data, operation) python-advanced-alchemy-1.0.1/tests/fixtures/uuid/000077500000000000000000000000001476663714600222225ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/fixtures/uuid/__init__.py000066400000000000000000000000001476663714600243210ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/fixtures/uuid/models.py000066400000000000000000000075321476663714600240660ustar00rootroot00000000000000"""Example domain objects for testing.""" from __future__ import annotations import datetime from uuid import UUID from sqlalchemy import Column, FetchedValue, ForeignKey, String, Table, func from sqlalchemy.orm import Mapped, declared_attr, mapped_column, relationship from advanced_alchemy.base import ( TableArgsType, # pyright: ignore[reportPrivateUsage] UUIDAuditBase, UUIDBase, UUIDv6Base, UUIDv7Base, merge_table_arguments, ) from advanced_alchemy.mixins import SlugKey from advanced_alchemy.types.encrypted_string import EncryptedString, EncryptedText class UUIDAuthor(UUIDAuditBase): """The UUIDAuthor domain object.""" name: Mapped[str] = mapped_column(String(length=100)) # pyright: ignore string_field: Mapped[str] = mapped_column(String(20), default="static value", nullable=True) # pyright: ignore dob: Mapped[datetime.date] = mapped_column(nullable=True) # pyright: ignore books: Mapped[list[UUIDBook]] = relationship( lazy="selectin", back_populates="author", cascade="all, delete", ) class UUIDBook(UUIDBase): """The Book domain object.""" title: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore author_id: Mapped[UUID] = mapped_column(ForeignKey("uuid_author.id")) # pyright: ignore author: Mapped[UUIDAuthor] = relationship(lazy="joined", innerjoin=True, back_populates="books") # pyright: ignore class UUIDSlugBook(UUIDBase, SlugKey): """The Book domain object with a slug key.""" title: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore author_id: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore @declared_attr.directive @classmethod def __table_args__(cls) -> TableArgsType: return merge_table_arguments( cls, table_args={"comment": "Slugbook"}, ) class UUIDEventLog(UUIDAuditBase): """The event log domain object.""" logged_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.now()) # pyright: ignore payload: Mapped[dict] = mapped_column(default={}) # pyright: ignore class UUIDSecret(UUIDv7Base): """The secret domain model.""" secret: Mapped[str] = mapped_column( EncryptedString(key="super_secret"), ) long_secret: Mapped[str] = mapped_column( EncryptedText(key="super_secret"), ) length_validated_secret: Mapped[str] = mapped_column( EncryptedString(key="super_secret", length=10), nullable=True, ) class UUIDModelWithFetchedValue(UUIDv6Base): """The ModelWithFetchedValue UUIDBase.""" val: Mapped[int] # pyright: ignore updated: Mapped[datetime.datetime] = mapped_column( # pyright: ignore server_default=func.current_timestamp(), onupdate=func.current_timestamp(), server_onupdate=FetchedValue(), ) uuid_item_tag = Table( "uuid_item_tag", UUIDBase.metadata, Column("item_id", ForeignKey("uuid_item.id"), primary_key=True), # pyright: ignore Column("tag_id", ForeignKey("uuid_tag.id"), primary_key=True), # pyright: ignore ) class UUIDItem(UUIDBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore description: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore tags: Mapped[list[UUIDTag]] = relationship(secondary=lambda: uuid_item_tag, back_populates="items") class UUIDTag(UUIDAuditBase): """The event log domain object.""" name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore items: Mapped[list[UUIDItem]] = relationship(secondary=lambda: uuid_item_tag, back_populates="tags") class UUIDRule(UUIDAuditBase): """The rule domain object.""" name: Mapped[str] = mapped_column(String(length=250)) # pyright: ignore config: Mapped[dict] = mapped_column(default=lambda: {}) # pyright: ignore python-advanced-alchemy-1.0.1/tests/fixtures/uuid/repositories.py000066400000000000000000000113641476663714600253300ustar00rootroot00000000000000"""Example domain objects for testing.""" from __future__ import annotations from advanced_alchemy.repository import ( SQLAlchemyAsyncRepository, SQLAlchemyAsyncSlugRepository, SQLAlchemySyncRepository, SQLAlchemySyncSlugRepository, ) from advanced_alchemy.repository.memory import ( SQLAlchemyAsyncMockRepository, SQLAlchemyAsyncMockSlugRepository, SQLAlchemySyncMockRepository, SQLAlchemySyncMockSlugRepository, ) from tests.fixtures.uuid.models import ( UUIDAuthor, UUIDBook, UUIDEventLog, UUIDItem, UUIDModelWithFetchedValue, UUIDRule, UUIDSecret, UUIDSlugBook, UUIDTag, ) class SecretAsyncRepository(SQLAlchemyAsyncRepository[UUIDSecret]): """Secret repository.""" model_type = UUIDSecret class RuleAsyncRepository(SQLAlchemyAsyncRepository[UUIDRule]): """Rule repository.""" model_type = UUIDRule class RuleAsyncMockRepository(SQLAlchemyAsyncMockRepository[UUIDRule]): """Rule repository.""" model_type = UUIDRule class RuleSyncMockRepository(SQLAlchemySyncMockRepository[UUIDRule]): """Rule repository.""" model_type = UUIDRule class AuthorAsyncRepository(SQLAlchemyAsyncRepository[UUIDAuthor]): """Author repository.""" model_type = UUIDAuthor class AuthorAsyncMockRepository(SQLAlchemyAsyncMockRepository[UUIDAuthor]): model_type = UUIDAuthor class AuthorSyncMockRepository(SQLAlchemySyncMockRepository[UUIDAuthor]): model_type = UUIDAuthor class BookAsyncRepository(SQLAlchemyAsyncRepository[UUIDBook]): """Book repository.""" model_type = UUIDBook class BookAsyncMockRepository(SQLAlchemyAsyncMockRepository[UUIDBook]): """Book repository.""" model_type = UUIDBook class BookSyncMockRepository(SQLAlchemySyncMockRepository[UUIDBook]): """Book repository.""" model_type = UUIDBook class SlugBookAsyncRepository(SQLAlchemyAsyncSlugRepository[UUIDSlugBook]): """Book repository.""" _uniquify_results = True model_type = UUIDSlugBook class SlugBookSyncRepository(SQLAlchemySyncSlugRepository[UUIDSlugBook]): """Slug Book repository.""" _uniquify_results = True model_type = UUIDSlugBook class SlugBookAsyncMockRepository(SQLAlchemyAsyncMockSlugRepository[UUIDSlugBook]): """Book repository.""" model_type = UUIDSlugBook class SlugBookSyncMockRepository(SQLAlchemySyncMockSlugRepository[UUIDSlugBook]): """Book repository.""" model_type = UUIDSlugBook class EventLogAsyncRepository(SQLAlchemyAsyncRepository[UUIDEventLog]): """Event log repository.""" model_type = UUIDEventLog class ModelWithFetchedValueAsyncRepository(SQLAlchemyAsyncRepository[UUIDModelWithFetchedValue]): """ModelWithFetchedValue repository.""" model_type = UUIDModelWithFetchedValue class TagAsyncRepository(SQLAlchemyAsyncRepository[UUIDTag]): """Tag repository.""" model_type = UUIDTag class TagAsyncMockRepository(SQLAlchemyAsyncMockRepository[UUIDTag]): """Tag repository.""" model_type = UUIDTag class TagSyncMockRepository(SQLAlchemySyncMockRepository[UUIDTag]): """Tag repository.""" model_type = UUIDTag class ItemAsyncRepository(SQLAlchemyAsyncRepository[UUIDItem]): """Item repository.""" model_type = UUIDItem class ItemAsyncMockRepository(SQLAlchemyAsyncMockRepository[UUIDItem]): """Item repository.""" model_type = UUIDItem class ItemSyncMockRepository(SQLAlchemySyncMockRepository[UUIDItem]): """Item repository.""" model_type = UUIDItem class SecretAsyncMockRepository(SQLAlchemyAsyncMockRepository[UUIDSecret]): """Secret repository.""" model_type = UUIDSecret class SecretSyncMockRepository(SQLAlchemySyncMockRepository[UUIDSecret]): """Secret repository.""" model_type = UUIDSecret class AuthorSyncRepository(SQLAlchemySyncRepository[UUIDAuthor]): """Author repository.""" model_type = UUIDAuthor class BookSyncRepository(SQLAlchemySyncRepository[UUIDBook]): """Book repository.""" model_type = UUIDBook class SecretSyncRepository(SQLAlchemySyncRepository[UUIDSecret]): """Secret repository.""" model_type = UUIDSecret class EventLogSyncRepository(SQLAlchemySyncRepository[UUIDEventLog]): """Event log repository.""" model_type = UUIDEventLog class RuleSyncRepository(SQLAlchemySyncRepository[UUIDRule]): """Rule repository.""" model_type = UUIDRule class ModelWithFetchedValueSyncRepository(SQLAlchemySyncRepository[UUIDModelWithFetchedValue]): """ModelWithFetchedValue repository.""" model_type = UUIDModelWithFetchedValue class TagSyncRepository(SQLAlchemySyncRepository[UUIDTag]): """Tag repository.""" model_type = UUIDTag class ItemSyncRepository(SQLAlchemySyncRepository[UUIDItem]): """Item repository.""" model_type = UUIDItem python-advanced-alchemy-1.0.1/tests/fixtures/uuid/services.py000066400000000000000000000215031476663714600244200ustar00rootroot00000000000000"""Example domain objects for testing.""" from __future__ import annotations from advanced_alchemy.service import ( SQLAlchemyAsyncRepositoryService, SQLAlchemySyncRepositoryService, ) from advanced_alchemy.service.typing import ( ModelDictT, is_dict_with_field, is_dict_without_field, schema_dump, ) from tests.fixtures.uuid.models import ( UUIDAuthor, UUIDBook, UUIDEventLog, UUIDItem, UUIDModelWithFetchedValue, UUIDRule, UUIDSecret, UUIDSlugBook, UUIDTag, ) from tests.fixtures.uuid.repositories import ( AuthorAsyncMockRepository, AuthorAsyncRepository, AuthorSyncMockRepository, AuthorSyncRepository, BookAsyncMockRepository, BookAsyncRepository, BookSyncMockRepository, BookSyncRepository, EventLogAsyncRepository, EventLogSyncRepository, ItemAsyncMockRepository, ItemAsyncRepository, ItemSyncMockRepository, ItemSyncRepository, ModelWithFetchedValueAsyncRepository, ModelWithFetchedValueSyncRepository, RuleAsyncMockRepository, RuleAsyncRepository, RuleSyncMockRepository, RuleSyncRepository, SecretAsyncRepository, SecretSyncRepository, SlugBookAsyncMockRepository, SlugBookAsyncRepository, SlugBookSyncMockRepository, SlugBookSyncRepository, TagAsyncMockRepository, TagAsyncRepository, TagSyncMockRepository, TagSyncRepository, ) class SecretAsyncService(SQLAlchemyAsyncRepositoryService[UUIDSecret, SecretAsyncRepository]): """Rule repository.""" repository_type = SecretAsyncRepository class RuleAsyncService(SQLAlchemyAsyncRepositoryService[UUIDRule, RuleAsyncRepository]): """Rule repository.""" repository_type = RuleAsyncRepository class RuleAsyncMockService(SQLAlchemyAsyncRepositoryService[UUIDRule, RuleAsyncMockRepository]): """Rule repository.""" repository_type = RuleAsyncMockRepository class RuleSyncMockService(SQLAlchemySyncRepositoryService[UUIDRule, RuleSyncMockRepository]): """Rule repository.""" repository_type = RuleSyncMockRepository class AuthorAsyncService(SQLAlchemyAsyncRepositoryService[UUIDAuthor, AuthorAsyncRepository]): """Author repository.""" repository_type = AuthorAsyncRepository class AuthorAsyncMockService(SQLAlchemyAsyncRepositoryService[UUIDAuthor, AuthorAsyncMockRepository]): """Author repository.""" repository_type = AuthorAsyncMockRepository class AuthorSyncMockService(SQLAlchemySyncRepositoryService[UUIDAuthor, AuthorSyncMockRepository]): """Author repository.""" repository_type = AuthorSyncMockRepository class BookAsyncService(SQLAlchemyAsyncRepositoryService[UUIDBook, BookAsyncRepository]): """Book repository.""" repository_type = BookAsyncRepository class BookAsyncMockService(SQLAlchemyAsyncRepositoryService[UUIDBook, BookAsyncMockRepository]): """Book repository.""" repository_type = BookAsyncMockRepository class BookSyncMockService(SQLAlchemySyncRepositoryService[UUIDBook, BookSyncMockRepository]): """Book repository.""" repository_type = BookSyncMockRepository class EventLogAsyncService(SQLAlchemyAsyncRepositoryService[UUIDEventLog, EventLogAsyncRepository]): """Event log repository.""" repository_type = EventLogAsyncRepository class ModelWithFetchedValueAsyncService( SQLAlchemyAsyncRepositoryService[UUIDModelWithFetchedValue, ModelWithFetchedValueAsyncRepository] ): """UUIDModelWithFetchedValue repository.""" repository_type = ModelWithFetchedValueAsyncRepository class TagAsyncService(SQLAlchemyAsyncRepositoryService[UUIDTag, TagAsyncRepository]): """Tag repository.""" repository_type = TagAsyncRepository class TagAsyncMockService(SQLAlchemyAsyncRepositoryService[UUIDTag, TagAsyncMockRepository]): """Tag repository.""" repository_type = TagAsyncMockRepository class TagSyncMockService(SQLAlchemySyncRepositoryService[UUIDTag, TagSyncMockRepository]): """Tag repository.""" repository_type = TagSyncMockRepository class ItemAsyncService(SQLAlchemyAsyncRepositoryService[UUIDItem, ItemAsyncRepository]): """Item repository.""" repository_type = ItemAsyncRepository class ItemAsyncMockService(SQLAlchemyAsyncRepositoryService[UUIDItem, ItemAsyncMockRepository]): """Item repository.""" repository_type = ItemAsyncMockRepository class ItemSyncMockService(SQLAlchemySyncRepositoryService[UUIDItem, ItemSyncMockRepository]): """Item repository.""" repository_type = ItemSyncMockRepository class RuleSyncService(SQLAlchemySyncRepositoryService[UUIDRule, RuleSyncRepository]): """Rule repository.""" repository_type = RuleSyncRepository class AuthorSyncService(SQLAlchemySyncRepositoryService[UUIDAuthor, AuthorSyncRepository]): """Author repository.""" repository_type = AuthorSyncRepository class BookSyncService(SQLAlchemySyncRepositoryService[UUIDBook, BookSyncRepository]): """Book repository.""" repository_type = BookSyncRepository class EventLogSyncService(SQLAlchemySyncRepositoryService[UUIDEventLog, EventLogSyncRepository]): """Event log repository.""" repository_type = EventLogSyncRepository class ModelWithFetchedValueSyncService( SQLAlchemySyncRepositoryService[UUIDModelWithFetchedValue, ModelWithFetchedValueSyncRepository] ): """UUIDModelWithFetchedValue repository.""" repository_type = ModelWithFetchedValueSyncRepository class TagSyncService(SQLAlchemySyncRepositoryService[UUIDTag, TagSyncRepository]): """Tag repository.""" repository_type = TagSyncRepository class ItemSyncService(SQLAlchemySyncRepositoryService[UUIDItem, ItemSyncRepository]): """Item repository.""" repository_type = ItemSyncRepository class SecretSyncService(SQLAlchemySyncRepositoryService[UUIDSecret, SecretSyncRepository]): """Rule repository.""" repository_type = SecretSyncRepository class SlugBookAsyncService(SQLAlchemyAsyncRepositoryService[UUIDSlugBook, SlugBookAsyncRepository]): """Book repository.""" repository_type = SlugBookAsyncRepository match_fields = ["title"] async def to_model( self, data: ModelDictT[UUIDSlugBook], operation: str | None = None, ) -> UUIDSlugBook: data = schema_dump(data) if is_dict_without_field(data, "slug") and operation == "create": data["slug"] = await self.repository.get_available_slug(data["title"]) if is_dict_without_field(data, "slug") and is_dict_with_field(data, "title") and operation == "update": data["slug"] = await self.repository.get_available_slug(data["title"]) return await super().to_model(data, operation) class SlugBookSyncService(SQLAlchemySyncRepositoryService[UUIDSlugBook, SlugBookSyncRepository]): """Book repository.""" repository_type = SlugBookSyncRepository def to_model( self, data: ModelDictT[UUIDSlugBook], operation: str | None = None, ) -> UUIDSlugBook: data = schema_dump(data) if is_dict_without_field(data, "slug") and operation == "create": data["slug"] = self.repository.get_available_slug(data["title"]) if is_dict_without_field(data, "slug") and is_dict_with_field(data, "title") and operation == "update": data["slug"] = self.repository.get_available_slug(data["title"]) return super().to_model(data, operation) class SlugBookAsyncMockService(SQLAlchemyAsyncRepositoryService[UUIDSlugBook, SlugBookAsyncMockRepository]): """Book repository.""" repository_type = SlugBookAsyncMockRepository match_fields = ["title"] async def to_model( self, data: ModelDictT[UUIDSlugBook], operation: str | None = None, ) -> UUIDSlugBook: data = schema_dump(data) if is_dict_without_field(data, "slug") and operation == "create": data["slug"] = await self.repository.get_available_slug(data["title"]) if is_dict_without_field(data, "slug") and is_dict_with_field(data, "title") and operation == "update": data["slug"] = await self.repository.get_available_slug(data["title"]) return await super().to_model(data, operation) class SlugBookSyncMockService(SQLAlchemySyncRepositoryService[UUIDSlugBook, SlugBookSyncMockRepository]): """Book repository.""" repository_type = SlugBookSyncMockRepository match_fields = ["title"] def to_model( self, data: ModelDictT[UUIDSlugBook], operation: str | None = None, ) -> UUIDSlugBook: data = schema_dump(data) if is_dict_without_field(data, "slug") and operation == "create": data["slug"] = self.repository.get_available_slug(data["title"]) if is_dict_without_field(data, "slug") and is_dict_with_field(data, "title") and operation == "update": data["slug"] = self.repository.get_available_slug(data["title"]) return super().to_model(data, operation) python-advanced-alchemy-1.0.1/tests/helpers.py000066400000000000000000000060161476663714600214220ustar00rootroot00000000000000from __future__ import annotations import asyncio import importlib import inspect import sys from collections.abc import Awaitable from contextlib import AbstractAsyncContextManager, AbstractContextManager from functools import partial from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast, overload from typing_extensions import ParamSpec if TYPE_CHECKING: from types import TracebackType T = TypeVar("T") P = ParamSpec("P") def purge_module(module_names: list[str], path: str | Path) -> None: for name in module_names: if name in sys.modules: del sys.modules[name] Path(importlib.util.cache_from_source(path)).unlink(missing_ok=True) # type: ignore[arg-type] class _ContextManagerWrapper: def __init__(self, cm: AbstractContextManager[T]) -> None: self._cm = cm async def __aenter__(self) -> T: # pyright: ignore return self._cm.__enter__() # type: ignore[return-value] # pyright: ignore async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> bool | None: return self._cm.__exit__(exc_type, exc_val, exc_tb) @overload async def maybe_async(obj: Awaitable[T]) -> T: ... @overload async def maybe_async(obj: T) -> T: ... async def maybe_async(obj: Awaitable[T] | T) -> T: return cast(T, await obj) if inspect.isawaitable(obj) else cast(T, obj) # type: ignore[redundant-cast] def maybe_async_cm(obj: AbstractContextManager[T] | AbstractAsyncContextManager[T]) -> AbstractAsyncContextManager[T]: if isinstance(obj, AbstractContextManager): return cast(AbstractAsyncContextManager[T], _ContextManagerWrapper(obj)) return obj def wrap_sync(fn: Callable[P, T]) -> Callable[P, Awaitable[T]]: if inspect.iscoroutinefunction(fn): return fn async def wrapped(*args: P.args, **kwargs: P.kwargs) -> T: return await asyncio.get_running_loop().run_in_executor(None, partial(fn, *args, **kwargs)) return wrapped class NoValue: """A fake "Empty class""" async def anext_(iterable: Any, default: Any = NoValue, *args: Any) -> Any: # pragma: nocover """Return the next item from an async iterator. Args: iterable: An async iterable. default: An optional default value to return if the iterable is empty. *args: The remaining args Return: The next value of the iterable. Raises: TypeError: The iterable given is not async. This function will return the next value form an async iterable. If the iterable is empty the StopAsyncIteration will be propagated. However, if a default value is given as a second argument the exception is silenced and the default value is returned instead. """ has_default = bool(not isinstance(default, NoValue)) try: return await iterable.__anext__() except StopAsyncIteration as exc: if has_default: return default raise StopAsyncIteration from exc python-advanced-alchemy-1.0.1/tests/integration/000077500000000000000000000000001476663714600217265ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/integration/__init__.py000066400000000000000000000000001476663714600240250ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/integration/conftest.py000066400000000000000000000434011476663714600241270ustar00rootroot00000000000000from __future__ import annotations from collections.abc import AsyncGenerator, Generator from typing import TYPE_CHECKING, cast from unittest.mock import NonCallableMagicMock, create_autospec import pytest from pytest import FixtureRequest from sqlalchemy import URL, Dialect, Engine, NullPool, create_engine from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.orm import Session, sessionmaker if TYPE_CHECKING: from pathlib import Path from pytest import MonkeyPatch @pytest.fixture(autouse=True) def _patch_bases(monkeypatch: MonkeyPatch) -> None: # pyright: ignore[reportUnusedFunction] """Ensure new registry state for every test. This prevents errors such as "Table '...' is already defined for this MetaData instance... """ from sqlalchemy.orm import DeclarativeBase from advanced_alchemy import base, mixins class NewUUIDBase(mixins.UUIDPrimaryKey, base.CommonTableAttributes, DeclarativeBase): ... class NewUUIDAuditBase( mixins.UUIDPrimaryKey, base.CommonTableAttributes, mixins.AuditColumns, DeclarativeBase, ): ... class NewUUIDv6Base(mixins.UUIDPrimaryKey, base.CommonTableAttributes, DeclarativeBase): ... class NewUUIDv6AuditBase( mixins.UUIDPrimaryKey, base.CommonTableAttributes, mixins.AuditColumns, DeclarativeBase, ): ... class NewUUIDv7Base(mixins.UUIDPrimaryKey, base.CommonTableAttributes, DeclarativeBase): ... class NewUUIDv7AuditBase( mixins.UUIDPrimaryKey, base.CommonTableAttributes, mixins.AuditColumns, DeclarativeBase, ): ... class NewNanoIDBase(mixins.NanoIDPrimaryKey, base.CommonTableAttributes, DeclarativeBase): ... class NewNanoIDAuditBase( mixins.NanoIDPrimaryKey, base.CommonTableAttributes, mixins.AuditColumns, DeclarativeBase, ): ... class NewBigIntBase(mixins.BigIntPrimaryKey, base.CommonTableAttributes, DeclarativeBase): ... class NewBigIntAuditBase( mixins.BigIntPrimaryKey, base.CommonTableAttributes, mixins.AuditColumns, DeclarativeBase, ): ... monkeypatch.setattr(base, "UUIDBase", NewUUIDBase) monkeypatch.setattr(base, "UUIDAuditBase", NewUUIDAuditBase) monkeypatch.setattr(base, "UUIDv6Base", NewUUIDv6Base) monkeypatch.setattr(base, "UUIDv6AuditBase", NewUUIDv6AuditBase) monkeypatch.setattr(base, "UUIDv7Base", NewUUIDv7Base) monkeypatch.setattr(base, "UUIDv7AuditBase", NewUUIDv7AuditBase) monkeypatch.setattr(base, "NanoIDBase", NewNanoIDBase) monkeypatch.setattr(base, "NanoIDAuditBase", NewNanoIDAuditBase) monkeypatch.setattr(base, "BigIntBase", NewBigIntBase) monkeypatch.setattr(base, "BigIntAuditBase", NewBigIntAuditBase) @pytest.fixture() def duckdb_engine(tmp_path: Path) -> Generator[Engine, None, None]: """SQLite engine for end-to-end testing. Returns: Async SQLAlchemy engine instance. """ engine = create_engine(f"duckdb:///{tmp_path}/test.duck.db", poolclass=NullPool) try: yield engine finally: engine.dispose() @pytest.fixture() def oracle18c_engine(docker_ip: str, oracle18c_service: None) -> Generator[Engine, None, None]: """Oracle 18c instance for end-to-end testing. Args: docker_ip: IP address for TCP connection to Docker containers. oracle18c_service: ... Returns: Async SQLAlchemy engine instance. """ yield create_engine( "oracle+oracledb://:@", thick_mode=False, connect_args={ "user": "app", "password": "super-secret", "host": docker_ip, "port": 1512, "service_name": "xepdb1", }, poolclass=NullPool, ) @pytest.fixture() def oracle23c_engine(docker_ip: str, oracle23c_service: None) -> Generator[Engine, None, None]: """Oracle 23c instance for end-to-end testing. Args: docker_ip: IP address for TCP connection to Docker containers. oracle23c_service: ... Returns: Async SQLAlchemy engine instance. """ yield create_engine( "oracle+oracledb://:@", thick_mode=False, connect_args={ "user": "app", "password": "super-secret", "host": docker_ip, "port": 1513, "service_name": "FREEPDB1", }, poolclass=NullPool, ) @pytest.fixture() def psycopg_engine(docker_ip: str, postgres_service: None) -> Generator[Engine, None, None]: """Postgresql instance for end-to-end testing.""" yield create_engine( URL( drivername="postgresql+psycopg", username="postgres", password="super-secret", host=docker_ip, port=5423, database="postgres", query={}, # type:ignore[arg-type] ), poolclass=NullPool, ) @pytest.fixture() def mssql_engine(docker_ip: str, mssql_service: None) -> Generator[Engine, None, None]: """MS SQL instance for end-to-end testing.""" yield create_engine( URL( drivername="mssql+pyodbc", username="sa", password="Super-secret1", host=docker_ip, port=1344, database="master", query={ "driver": "ODBC Driver 18 for SQL Server", "encrypt": "no", "TrustServerCertificate": "yes", }, # type:ignore[arg-type] ), poolclass=NullPool, ) @pytest.fixture() def sqlite_engine(tmp_path: Path) -> Generator[Engine, None, None]: """SQLite engine for end-to-end testing. Returns: Async SQLAlchemy engine instance. """ engine = create_engine(f"sqlite:///{tmp_path}/test.db", poolclass=NullPool) try: yield engine finally: engine.dispose() @pytest.fixture() def spanner_engine(docker_ip: str, spanner_service: None, monkeypatch: MonkeyPatch) -> Generator[Engine, None, None]: """Postgresql instance for end-to-end testing.""" monkeypatch.setenv("SPANNER_EMULATOR_HOST", "localhost:9010") monkeypatch.setenv("GOOGLE_CLOUD_PROJECT", "emulator-test-project") yield create_engine( "spanner+spanner:///projects/emulator-test-project/instances/test-instance/databases/test-database", poolclass=NullPool, ) @pytest.fixture() def cockroachdb_engine(docker_ip: str, cockroachdb_service: None) -> Generator[Engine, None, None]: """CockroachDB instance for end-to-end testing.""" yield create_engine( url="cockroachdb://root@localhost:26257/defaultdb?sslmode=disable", poolclass=NullPool, ) @pytest.fixture() def mock_sync_engine() -> Generator[NonCallableMagicMock, None, None]: """Return a mocked Engine instance.""" mock = cast(NonCallableMagicMock, create_autospec(Engine, instance=True)) mock.dialect = create_autospec(Dialect, instance=True) mock.dialect.name = "mock" yield mock @pytest.fixture( name="engine", params=[ pytest.param( "sqlite_engine", marks=[ pytest.mark.sqlite, pytest.mark.integration, pytest.mark.xdist_group("sqlite"), ], ), pytest.param( "duckdb_engine", marks=[ pytest.mark.duckdb, pytest.mark.integration, pytest.mark.xdist_group("duckdb"), ], ), pytest.param( "oracle18c_engine", marks=[ pytest.mark.oracledb_sync, pytest.mark.integration, pytest.mark.xdist_group("oracle18"), ], ), pytest.param( "oracle23c_engine", marks=[ pytest.mark.oracledb_sync, pytest.mark.integration, pytest.mark.xdist_group("oracle23"), ], ), pytest.param( "psycopg_engine", marks=[ pytest.mark.psycopg_sync, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "cockroachdb_engine", marks=[ pytest.mark.cockroachdb_sync, pytest.mark.integration, pytest.mark.xdist_group("cockroachdb"), ], ), pytest.param( "mssql_engine", marks=[ pytest.mark.mssql_sync, pytest.mark.integration, pytest.mark.xdist_group("mssql"), ], ), pytest.param( "mock_sync_engine", marks=[ pytest.mark.mock_sync, pytest.mark.integration, pytest.mark.xdist_group("mock"), ], ), ], ) def engine(request: FixtureRequest) -> Generator[Engine, None, None]: yield cast(Engine, request.getfixturevalue(request.param)) @pytest.fixture() def session(engine: Engine, request: FixtureRequest) -> Generator[Session, None, None]: if "mock_sync_engine" in request.fixturenames: session = create_autospec(Session, instance=True) session.bind = engine yield session else: session = sessionmaker(bind=engine, expire_on_commit=False)() try: yield session finally: session.rollback() session.close() @pytest.fixture() async def aiosqlite_engine(tmp_path: Path) -> AsyncGenerator[AsyncEngine, None]: """SQLite engine for end-to-end testing. Returns: Async SQLAlchemy engine instance. """ engine = create_async_engine(f"sqlite+aiosqlite:///{tmp_path}/test.db", poolclass=NullPool) try: yield engine finally: await engine.dispose() @pytest.fixture() async def asyncmy_engine(docker_ip: str, mysql_service: None) -> AsyncGenerator[AsyncEngine, None]: """Postgresql instance for end-to-end testing.""" yield create_async_engine( URL( drivername="mysql+asyncmy", username="app", password="super-secret", host=docker_ip, port=3360, database="db", query={}, # type:ignore[arg-type] ), poolclass=NullPool, ) @pytest.fixture() async def asyncpg_engine(docker_ip: str, postgres_service: None) -> AsyncGenerator[AsyncEngine, None]: """Postgresql instance for end-to-end testing.""" yield create_async_engine( URL( drivername="postgresql+asyncpg", username="postgres", password="super-secret", host=docker_ip, port=5423, database="postgres", query={}, # type:ignore[arg-type] ), poolclass=NullPool, ) @pytest.fixture() async def psycopg_async_engine(docker_ip: str, postgres_service: None) -> AsyncGenerator[AsyncEngine, None]: """Postgresql instance for end-to-end testing.""" yield create_async_engine( URL( drivername="postgresql+psycopg", username="postgres", password="super-secret", host=docker_ip, port=5423, database="postgres", query={}, # type:ignore[arg-type] ), poolclass=NullPool, ) @pytest.fixture() async def cockroachdb_async_engine(docker_ip: str, cockroachdb_service: None) -> AsyncGenerator[AsyncEngine, None]: """Cockroach DB async engine instance for end-to-end testing.""" yield create_async_engine( url="cockroachdb+asyncpg://root@localhost:26257/defaultdb", poolclass=NullPool, ) @pytest.fixture() async def mssql_async_engine(docker_ip: str, mssql_service: None) -> AsyncGenerator[AsyncEngine, None]: """MS SQL instance for end-to-end testing.""" yield create_async_engine( URL( drivername="mssql+aioodbc", username="sa", password="Super-secret1", host=docker_ip, port=1344, database="master", query={ "driver": "ODBC Driver 18 for SQL Server", "encrypt": "no", "TrustServerCertificate": "yes", # NOTE: MARS_Connection is only needed for the concurrent async tests # lack of this causes some tests to fail # https://github.com/litestar-org/advanced-alchemy/actions/runs/6800623970/job/18493034767?pr=94 "MARS_Connection": "yes", }, # type:ignore[arg-type] ), poolclass=NullPool, ) @pytest.fixture() async def oracle18c_async_engine(docker_ip: str, oracle18c_service: None) -> AsyncGenerator[AsyncEngine, None]: """Oracle 18c instance for end-to-end testing. Args: docker_ip: IP address for TCP connection to Docker containers. oracle18c_service: ... Returns: Async SQLAlchemy engine instance. """ yield create_async_engine( "oracle+oracledb://:@", thick_mode=False, connect_args={ "user": "app", "password": "super-secret", "host": docker_ip, "port": 1512, "service_name": "xepdb1", }, poolclass=NullPool, ) @pytest.fixture() async def oracle23c_async_engine(docker_ip: str, oracle23c_service: None) -> AsyncGenerator[AsyncEngine, None]: """Oracle 23c instance for end-to-end testing. Args: docker_ip: IP address for TCP connection to Docker containers. oracle23c_service: ... Returns: Async SQLAlchemy engine instance. """ yield create_async_engine( "oracle+oracledb://:@", thick_mode=False, connect_args={ "user": "app", "password": "super-secret", "host": docker_ip, "port": 1513, "service_name": "FREEPDB1", }, poolclass=NullPool, ) @pytest.fixture() async def mock_async_engine() -> AsyncGenerator[NonCallableMagicMock, None]: """Return a mocked AsyncEngine instance.""" mock = cast(NonCallableMagicMock, create_autospec(AsyncEngine, instance=True)) mock.dialect = create_autospec(Dialect, instance=True) mock.dialect.name = "mock" yield mock @pytest.fixture( name="async_engine", params=[ pytest.param( "aiosqlite_engine", marks=[ pytest.mark.aiosqlite, pytest.mark.integration, pytest.mark.xdist_group("sqlite"), ], ), pytest.param( "asyncmy_engine", marks=[ pytest.mark.asyncmy, pytest.mark.integration, pytest.mark.xdist_group("mysql"), ], ), pytest.param( "asyncpg_engine", marks=[ pytest.mark.asyncpg, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "psycopg_async_engine", marks=[ pytest.mark.psycopg_async, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "cockroachdb_async_engine", marks=[ pytest.mark.cockroachdb_async, pytest.mark.integration, pytest.mark.xdist_group("cockroachdb"), ], ), pytest.param( "mssql_async_engine", marks=[ pytest.mark.mssql_async, pytest.mark.integration, pytest.mark.xdist_group("mssql"), ], ), pytest.param( "oracle18c_async_engine", marks=[ pytest.mark.oracledb_async, pytest.mark.integration, pytest.mark.xdist_group("oracle18"), ], ), pytest.param( "oracle23c_async_engine", marks=[ pytest.mark.oracledb_async, pytest.mark.integration, pytest.mark.xdist_group("oracle23"), ], ), pytest.param( "mock_async_engine", marks=[ pytest.mark.mock_async, pytest.mark.integration, pytest.mark.xdist_group("mock"), ], ), ], ) async def async_engine(request: FixtureRequest) -> AsyncGenerator[AsyncEngine, None]: yield cast(AsyncEngine, request.getfixturevalue(request.param)) @pytest.fixture() async def async_session( async_engine: AsyncEngine, request: FixtureRequest, ) -> AsyncGenerator[AsyncSession, None]: if "mock_async_engine" in request.fixturenames: session = create_autospec(AsyncSession, instance=True) session.bind = async_engine yield session else: session = async_sessionmaker(bind=async_engine, expire_on_commit=False)() try: yield session finally: await session.rollback() await session.close() # @pytest.fixture() # async def sync_sqlalchemy_config(engine: Engine, session_maker: sessionmaker[Session]) -> SQLAlchemySyncConfig: # # # @pytest.fixture() # async def async_sqlalchemy_config( # async_engine: AsyncEngine, # async_session_maker: async_sessionmaker[AsyncSession], # ) -> SQLAlchemyAsyncConfig: # # # @pytest.fixture() # async def sync_alembic_commands(sync_sqlalchemy_config: SQLAlchemySyncConfig) -> commands.AlembicCommands: # # # @pytest.fixture() # async def async_alembic_commands(async_sqlalchemy_config: SQLAlchemyAsyncConfig) -> commands.AlembicCommands: # # # @pytest.fixture(params=["sync_alembic_commands", "async_alembic_commands"], autouse=True) # def alembic_commands(request: FixtureRequest) -> commands.AlembicCommands: python-advanced-alchemy-1.0.1/tests/integration/helpers.py000066400000000000000000000017361476663714600237510ustar00rootroot00000000000000from __future__ import annotations import datetime from typing import Any def update_raw_records(raw_authors: list[dict[str, Any]], raw_rules: list[dict[str, Any]]) -> None: for raw_author in raw_authors: raw_author["dob"] = datetime.datetime.strptime(raw_author["dob"], "%Y-%m-%d").date() raw_author["created_at"] = datetime.datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc, ) raw_author["updated_at"] = datetime.datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc, ) for raw_rule in raw_rules: raw_rule["created_at"] = datetime.datetime.strptime(raw_rule["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc ) raw_rule["updated_at"] = datetime.datetime.strptime(raw_rule["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc ) python-advanced-alchemy-1.0.1/tests/integration/test_alembic_commands.py000066400000000000000000000247101476663714600266200ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Generator from pathlib import Path from typing import cast from uuid import UUID import pytest from _pytest.monkeypatch import MonkeyPatch from pytest import CaptureFixture, FixtureRequest from pytest_lazy_fixtures import lf from sqlalchemy import Engine, ForeignKey, String from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker from sqlalchemy.orm import Mapped, mapped_column, relationship, sessionmaker from advanced_alchemy import base from advanced_alchemy.alembic import commands from advanced_alchemy.alembic.utils import drop_all, dump_tables from advanced_alchemy.extensions.litestar import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig from alembic.util.exc import CommandError from tests.fixtures.uuid import models as models_uuid from tests.helpers import maybe_async AuthorModel = type[models_uuid.UUIDAuthor] RuleModel = type[models_uuid.UUIDRule] ModelWithFetchedValue = type[models_uuid.UUIDModelWithFetchedValue] ItemModel = type[models_uuid.UUIDItem] TagModel = type[models_uuid.UUIDTag] pytestmark = [ pytest.mark.integration, ] @pytest.fixture( params=[ pytest.param( "sqlite_engine", marks=[ pytest.mark.sqlite, pytest.mark.integration, ], ), pytest.param( "duckdb_engine", marks=[ pytest.mark.duckdb, pytest.mark.integration, pytest.mark.xdist_group("duckdb"), ], ), pytest.param( "oracle18c_engine", marks=[ pytest.mark.oracledb_sync, pytest.mark.integration, pytest.mark.xdist_group("oracle18"), ], ), pytest.param( "oracle23c_engine", marks=[ pytest.mark.oracledb_sync, pytest.mark.integration, pytest.mark.xdist_group("oracle23"), ], ), pytest.param( "psycopg_engine", marks=[ pytest.mark.psycopg_sync, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "spanner_engine", marks=[ pytest.mark.spanner, pytest.mark.integration, pytest.mark.xdist_group("spanner"), ], ), pytest.param( "mssql_engine", marks=[ pytest.mark.mssql_sync, pytest.mark.integration, pytest.mark.xdist_group("mssql"), ], ), pytest.param( "cockroachdb_engine", marks=[ pytest.mark.cockroachdb_sync, pytest.mark.integration, pytest.mark.xdist_group("cockroachdb"), ], ), ], ) def sync_sqlalchemy_config(request: FixtureRequest) -> Generator[SQLAlchemySyncConfig, None, None]: engine = cast(Engine, request.getfixturevalue(request.param)) orm_registry = base.create_registry() yield SQLAlchemySyncConfig( engine_instance=engine, session_maker=sessionmaker(bind=engine, expire_on_commit=False), metadata=orm_registry.metadata, ) @pytest.fixture( params=[ pytest.param( "aiosqlite_engine", marks=[ pytest.mark.aiosqlite, pytest.mark.integration, ], ), pytest.param( "asyncmy_engine", marks=[ pytest.mark.asyncmy, pytest.mark.integration, pytest.mark.xdist_group("mysql"), ], ), pytest.param( "asyncpg_engine", marks=[ pytest.mark.asyncpg, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "psycopg_async_engine", marks=[ pytest.mark.psycopg_async, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "cockroachdb_async_engine", marks=[ pytest.mark.cockroachdb_async, pytest.mark.integration, pytest.mark.xdist_group("cockroachdb"), ], ), pytest.param( "oracle18c_async_engine", marks=[ pytest.mark.oracledb_async, pytest.mark.integration, pytest.mark.xdist_group("oracle18"), ], ), pytest.param( "oracle23c_async_engine", marks=[ pytest.mark.oracledb_async, pytest.mark.integration, pytest.mark.xdist_group("oracle23"), ], ), pytest.param( "mssql_async_engine", marks=[ pytest.mark.mssql_async, pytest.mark.integration, pytest.mark.xdist_group("mssql"), ], ), ], ) def async_sqlalchemy_config( request: FixtureRequest, ) -> Generator[SQLAlchemyAsyncConfig, None, None]: async_engine = cast(AsyncEngine, request.getfixturevalue(request.param)) orm_registry = base.create_registry() yield SQLAlchemyAsyncConfig( engine_instance=async_engine, session_maker=async_sessionmaker(bind=async_engine, expire_on_commit=False), metadata=orm_registry.metadata, ) @pytest.fixture( params=[lf("sync_sqlalchemy_config"), lf("async_sqlalchemy_config")], ids=["sync", "async"], ) def any_config(request: FixtureRequest) -> Generator[SQLAlchemySyncConfig | SQLAlchemyAsyncConfig, None, None]: """Return a session for the current session""" if isinstance(request.param, SQLAlchemyAsyncConfig): request.getfixturevalue("async_sqlalchemy_config") else: request.getfixturevalue("sync_sqlalchemy_config") yield request.param # type: ignore[no-any-return] @pytest.fixture() def alembic_commands( any_config: SQLAlchemySyncConfig | SQLAlchemyAsyncConfig, ) -> Generator[commands.AlembicCommands, None, None]: yield commands.AlembicCommands( sqlalchemy_config=any_config, ) @pytest.fixture def tmp_project_dir(monkeypatch: MonkeyPatch, tmp_path: Path) -> Generator[Path, None, None]: path = tmp_path / "project_dir" path.mkdir(exist_ok=True) monkeypatch.chdir(path) yield path async def test_alembic_init(alembic_commands: commands.AlembicCommands, tmp_project_dir: Path) -> None: alembic_commands.init(directory=f"{tmp_project_dir}/migrations/") expected_dirs = [f"{tmp_project_dir}/migrations/", f"{tmp_project_dir}/migrations/versions"] expected_files = [f"{tmp_project_dir}/migrations/env.py", f"{tmp_project_dir}/migrations/script.py.mako"] for dir in expected_dirs: assert Path(dir).is_dir() for file in expected_files: assert Path(file).is_file() async def test_alembic_init_already(alembic_commands: commands.AlembicCommands, tmp_project_dir: Path) -> None: alembic_commands.init(directory=f"{tmp_project_dir}/migrations/") expected_dirs = [f"{tmp_project_dir}/migrations/", f"{tmp_project_dir}/migrations/versions"] expected_files = [f"{tmp_project_dir}/migrations/env.py", f"{tmp_project_dir}/migrations/script.py.mako"] for dir in expected_dirs: assert Path(dir).is_dir() for file in expected_files: assert Path(file).is_file() with pytest.raises(CommandError): alembic_commands.init(directory=f"{tmp_project_dir}/migrations/") async def test_drop_all( alembic_commands: commands.AlembicCommands, any_config: SQLAlchemySyncConfig | SQLAlchemyAsyncConfig, capfd: CaptureFixture[str], ) -> None: from examples.litestar.litestar_repo_only import app await maybe_async(any_config.create_all_metadata(app)) if isinstance(any_config, SQLAlchemySyncConfig): assert any_config.metadata any_config.metadata.create_all(any_config.get_engine()) else: async with any_config.get_engine().begin() as conn: assert any_config.metadata await conn.run_sync(any_config.metadata.create_all) await drop_all( alembic_commands.config.engine, alembic_commands.config.version_table_name, base.metadata_registry.get(alembic_commands.config.bind_key), ) result = capfd.readouterr() assert "Successfully dropped all objects" in result.out async def test_dump_tables( any_config: SQLAlchemySyncConfig | SQLAlchemyAsyncConfig, capfd: CaptureFixture[str], tmp_project_dir: Path, ) -> None: from sqlalchemy.orm import DeclarativeBase from advanced_alchemy import base, mixins class _UUIDAuditBase(base.CommonTableAttributes, mixins.UUIDPrimaryKey, DeclarativeBase): registry = base.create_registry() class TestAuthorModel(_UUIDAuditBase): name: Mapped[str] = mapped_column(String(10)) class TestBookModel(_UUIDAuditBase): title: Mapped[str] = mapped_column(String(10)) author_id: Mapped[UUID] = mapped_column(ForeignKey("test_author_model.id")) TestBookModel.author = relationship(TestAuthorModel, lazy="joined", innerjoin=True, viewonly=True) TestAuthorModel.books = relationship(TestBookModel, back_populates="author", lazy="noload", uselist=True) if isinstance(any_config, SQLAlchemySyncConfig): TestBookModel.metadata.create_all(any_config.get_engine()) else: async with any_config.get_engine().begin() as conn: await conn.run_sync(TestBookModel.metadata.create_all) await dump_tables( tmp_project_dir, any_config.get_session(), [TestAuthorModel, TestBookModel], ) result = capfd.readouterr() assert "Dumping table 'test_author_model'" in result.out assert "Dumping table 'test_book_model" in result.out """ async def test_alembic_revision(alembic_commands: commands.AlembicCommands, tmp_project_dir: Path) -> None: alembic_commands.init(directory=f"{tmp_project_dir}/migrations/") alembic_commands.revision(message="test", autogenerate=True) async def test_alembic_upgrade(alembic_commands: commands.AlembicCommands, tmp_project_dir: Path) -> None: alembic_commands.init(directory=f"{tmp_project_dir}/migrations/") alembic_commands.revision(message="test", autogenerate=True) alembic_commands.upgrade(revision="head") """ python-advanced-alchemy-1.0.1/tests/integration/test_association_proxy.py000066400000000000000000000133661476663714600271250ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING import pytest from sqlalchemy import Column, ForeignKey, String, Table, create_engine, select from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.orm import Mapped, Session, mapped_column, relationship, sessionmaker if TYPE_CHECKING: from pytest import MonkeyPatch @pytest.mark.xdist_group("loader") def test_ap_sync(monkeypatch: MonkeyPatch, tmp_path: Path) -> None: from sqlalchemy.orm import DeclarativeBase from advanced_alchemy import base, mixins orm_registry = base.create_registry() class NewUUIDBase(mixins.UUIDPrimaryKey, base.CommonTableAttributes, DeclarativeBase): registry = orm_registry monkeypatch.setattr(base, "UUIDBase", NewUUIDBase) product_tag_table = Table( "product_tag", orm_registry.metadata, Column("product_id", ForeignKey("product.id", ondelete="CASCADE"), primary_key=True), # pyright: ignore[reportUnknownArgumentType] Column("tag_id", ForeignKey("tag.id", ondelete="CASCADE"), primary_key=True), # pyright: ignore[reportUnknownArgumentType] ) class Tag(NewUUIDBase): name: Mapped[str] = mapped_column(index=True) products: Mapped[list[Product]] = relationship( secondary=lambda: product_tag_table, back_populates="product_tags", cascade="all, delete", passive_deletes=True, lazy="noload", ) class Product(NewUUIDBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore product_tags: Mapped[list[Tag]] = relationship( secondary=lambda: product_tag_table, back_populates="products", cascade="all, delete", passive_deletes=True, lazy="joined", ) tags: AssociationProxy[list[str]] = association_proxy( "product_tags", "name", creator=lambda name: Tag(name=name), # pyright: ignore[reportUnknownArgumentType,reportUnknownLambdaType] ) engine = create_engine(f"sqlite:///{tmp_path}/test.sqlite1.db", echo=True) session_factory: sessionmaker[Session] = sessionmaker(engine, expire_on_commit=False) with engine.begin() as conn: Product.metadata.create_all(conn) with session_factory() as db_session: product_1 = Product(name="Product 1", tags=["a new tag", "second tag"]) db_session.add(product_1) tags = db_session.execute(select(Tag)).unique().fetchall() assert len(tags) == 2 product_2 = Product(name="Product 2", tags=["third tag"]) db_session.add(product_2) tags = db_session.execute(select(Tag)).unique().fetchall() assert len(tags) == 3 product_2.tags = [] db_session.add(product_2) _product_2_validate = db_session.execute(select(Product).where(Product.name == "Product 2")).unique().fetchone() assert _product_2_validate tags_2 = db_session.execute(select(Tag)).unique().fetchall() assert len(_product_2_validate[0].product_tags) == 0 assert len(tags_2) == 3 # add more assertions @pytest.mark.xdist_group("loader") async def test_ap_async(monkeypatch: MonkeyPatch, tmp_path: Path) -> None: from sqlalchemy.orm import DeclarativeBase from advanced_alchemy import base, mixins orm_registry = base.create_registry() class NewUUIDBase(mixins.UUIDPrimaryKey, base.CommonTableAttributes, DeclarativeBase): registry = orm_registry monkeypatch.setattr(base, "UUIDBase", NewUUIDBase) product_tag_table = Table( "product_tag", orm_registry.metadata, Column("product_id", ForeignKey("product.id", ondelete="CASCADE"), primary_key=True), # pyright: ignore[reportUnknownArgumentType] Column("tag_id", ForeignKey("tag.id", ondelete="CASCADE"), primary_key=True), # pyright: ignore[reportUnknownArgumentType] ) class Tag(NewUUIDBase): name: Mapped[str] = mapped_column(index=True) products: Mapped[list[Product]] = relationship( secondary=lambda: product_tag_table, back_populates="product_tags", cascade="all, delete", passive_deletes=True, lazy="noload", ) class Product(NewUUIDBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore product_tags: Mapped[list[Tag]] = relationship( secondary=lambda: product_tag_table, back_populates="products", cascade="all, delete", passive_deletes=True, lazy="joined", ) tags: AssociationProxy[list[str]] = association_proxy( "product_tags", "name", creator=lambda name: Tag(name=name), # pyright: ignore[reportUnknownArgumentType,reportUnknownLambdaType] ) engine = create_async_engine(f"sqlite+aiosqlite:///{tmp_path}/test.sqlite2.db", echo=True) session_factory: async_sessionmaker[AsyncSession] = async_sessionmaker(engine, expire_on_commit=False) async with engine.begin() as conn: await conn.run_sync(Tag.metadata.create_all) async with session_factory() as db_session: product_1 = Product(name="Product 1 async", tags=["a new tag", "second tag"]) db_session.add(product_1) tags = await db_session.execute(select(Tag)) assert len(tags.unique().fetchall()) == 2 product_2 = Product(name="Product 2 async", tags=["third tag"]) db_session.add(product_2) tags = await db_session.execute(select(Tag)) assert len(tags.unique().fetchall()) == 3 # add more assertions python-advanced-alchemy-1.0.1/tests/integration/test_loader_and_execution_options.py000066400000000000000000000375601476663714600313000ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING from uuid import UUID import pytest from sqlalchemy import ForeignKey, String, create_engine from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.orm import Mapped, Session, mapped_column, noload, relationship, selectinload, sessionmaker from advanced_alchemy.base import BigIntBase, UUIDBase from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemySyncRepository if TYPE_CHECKING: from pytest import MonkeyPatch @pytest.mark.xdist_group("loader") def test_loader(monkeypatch: MonkeyPatch, tmp_path: Path) -> None: from advanced_alchemy import base, mixins orm_registry = base.create_registry() class NewUUIDBase(mixins.UUIDPrimaryKey, base.CommonTableAttributes, base.AdvancedDeclarativeBase): __abstract__ = True registry = orm_registry class NewBigIntBase(mixins.BigIntPrimaryKey, base.CommonTableAttributes, base.AdvancedDeclarativeBase): __abstract__ = True registry = orm_registry monkeypatch.setattr(base, "UUIDBase", NewUUIDBase) monkeypatch.setattr(base, "BigIntBase", NewBigIntBase) class UUIDCountry(UUIDBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore states: Mapped[list[UUIDState]] = relationship(back_populates="country", uselist=True, lazy="noload") class UUIDState(UUIDBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore country_id: Mapped[UUID] = mapped_column(ForeignKey(UUIDCountry.id)) country: Mapped[UUIDCountry] = relationship(uselist=False, back_populates="states", lazy="raise") class USStateRepository(SQLAlchemySyncRepository[UUIDState]): model_type = UUIDState class CountryRepository(SQLAlchemySyncRepository[UUIDCountry]): model_type = UUIDCountry engine = create_engine(f"sqlite:///{tmp_path}/test.sqlite1.db", echo=True) session_factory: sessionmaker[Session] = sessionmaker(engine, expire_on_commit=False) with engine.begin() as conn: UUIDState.metadata.create_all(conn) with session_factory() as db_session: usa = UUIDCountry(name="United States of America") france = UUIDCountry(name="France") db_session.add(usa) db_session.add(france) california = UUIDState(name="California", country=usa) oregon = UUIDState(name="Oregon", country=usa) ile_de_france = UUIDState(name="รŽle-de-France", country=france) repo = USStateRepository(session=db_session) repo.add(california) repo.add(oregon) repo.add(ile_de_france) db_session.commit() db_session.expire_all() si1_country_repo = CountryRepository(session=db_session, load=[noload(UUIDCountry.states)]) usa_country_1 = si1_country_repo.get_one( name="United States of America", ) assert len(usa_country_1.states) == 0 si0_country_repo = CountryRepository(session=db_session) db_session.expire_all() usa_country_0 = si0_country_repo.get_one( name="United States of America", load=UUIDCountry.states, execution_options={"populate_existing": True}, ) assert len(usa_country_0.states) == 2 db_session.expire_all() si2_country_repo = CountryRepository(session=db_session, load=[selectinload(UUIDCountry.states)]) usa_country_2 = si2_country_repo.get_one(name="United States of America") assert len(usa_country_2.states) == 2 db_session.expire_all() ia_repo = USStateRepository(session=db_session, load=UUIDState.country) string_california = ia_repo.get_one(name="California") assert string_california.name == "California" db_session.expire_all() star_repo = USStateRepository(session=db_session, load="*") star_california = star_repo.get_one(name="California") assert star_california.country.name == "United States of America" db_session.expire_all() star_country_repo = CountryRepository(session=db_session, load="*") usa_country_3 = star_country_repo.get_one(name="United States of America") assert len(usa_country_3.states) == 2 db_session.expunge_all() db_session.expire_all() si1_country_repo = CountryRepository(session=db_session) usa_country_1 = si1_country_repo.get_one( name="United States of America", load=[noload(UUIDCountry.states)], ) assert len(usa_country_1.states) == 0 si0_country_repo = CountryRepository(session=db_session) db_session.expire_all() @pytest.mark.xdist_group("loader") async def test_async_loader(monkeypatch: MonkeyPatch, tmp_path: Path) -> None: from advanced_alchemy import base, mixins orm_registry = base.create_registry() class NewUUIDBase(mixins.UUIDPrimaryKey, base.CommonTableAttributes, base.AdvancedDeclarativeBase): __abstract__ = True registry = orm_registry class NewBigIntBase(mixins.BigIntPrimaryKey, base.CommonTableAttributes, base.AdvancedDeclarativeBase): __abstract__ = True registry = orm_registry monkeypatch.setattr(base, "UUIDBase", NewUUIDBase) monkeypatch.setattr(base, "BigIntBase", NewBigIntBase) class BigIntCountry(BigIntBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore states: Mapped[list[BigIntState]] = relationship(back_populates="country", uselist=True) class BigIntState(BigIntBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore country_id: Mapped[int] = mapped_column(ForeignKey(BigIntCountry.id)) country: Mapped[BigIntCountry] = relationship(uselist=False, back_populates="states", lazy="raise") class USStateRepository(SQLAlchemyAsyncRepository[BigIntState]): model_type = BigIntState class CountryRepository(SQLAlchemyAsyncRepository[BigIntCountry]): model_type = BigIntCountry engine = create_async_engine(f"sqlite+aiosqlite:///{tmp_path}/test.sqlite2.db", echo=True) session_factory: async_sessionmaker[AsyncSession] = async_sessionmaker(engine, expire_on_commit=False) async with engine.begin() as conn: await conn.run_sync(BigIntState.metadata.create_all) async with session_factory() as db_session: usa = BigIntCountry(name="United States of America") france = BigIntCountry(name="France") db_session.add(usa) db_session.add(france) california = BigIntState(name="California", country=usa) oregon = BigIntState(name="Oregon", country=usa) ile_de_france = BigIntState(name="รŽle-de-France", country=france) repo = USStateRepository(session=db_session) await repo.add(california) await repo.add(oregon) await repo.add(ile_de_france) await db_session.commit() db_session.expire_all() si1_country_repo = CountryRepository(session=db_session, load=[noload(BigIntCountry.states)]) usa_country_21 = await si1_country_repo.get_one( name="United States of America", ) assert len(usa_country_21.states) == 0 db_session.expire_all() si0_country_repo = CountryRepository(session=db_session) usa_country_0 = await si0_country_repo.get_one( name="United States of America", load=BigIntCountry.states, execution_options={"populate_existing": True}, ) assert len(usa_country_0.states) == 2 db_session.expire_all() country_repo = CountryRepository(session=db_session) usa_country_1 = await country_repo.get_one( name="United States of America", load=[selectinload(BigIntCountry.states)], ) assert len(usa_country_1.states) == 2 db_session.expire_all() si_country_repo = CountryRepository(session=db_session, load=[selectinload(BigIntCountry.states)]) usa_country_02 = await si_country_repo.get_one(name="United States of America") assert len(usa_country_02.states) == 2 db_session.expire_all() ia_repo = USStateRepository(session=db_session, load=BigIntState.country) string_california = await ia_repo.get_one(name="California") assert string_california.name == "California" db_session.expire_all() star_repo = USStateRepository(session=db_session, load="*") star_california = await star_repo.get_one(name="California") assert star_california.country.name == "United States of America" db_session.expire_all() star_country_repo = CountryRepository(session=db_session, load="*") usa_country_3 = await star_country_repo.get_one(name="United States of America") assert len(usa_country_3.states) == 2 db_session.expire_all() @pytest.mark.xdist_group("loader") def test_default_overrides_loader(monkeypatch: MonkeyPatch, tmp_path: Path) -> None: from advanced_alchemy import base, mixins orm_registry = base.create_registry() class NewUUIDBase(mixins.UUIDPrimaryKey, base.CommonTableAttributes, base.AdvancedDeclarativeBase): __abstract__ = True registry = orm_registry class NewBigIntBase(mixins.BigIntPrimaryKey, base.CommonTableAttributes, base.AdvancedDeclarativeBase): __abstract__ = True registry = orm_registry monkeypatch.setattr(base, "UUIDBase", NewUUIDBase) monkeypatch.setattr(base, "BigIntBase", NewBigIntBase) class UUIDCountryTest(UUIDBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore states: Mapped[list[UUIDStateTest]] = relationship(back_populates="country", uselist=True, lazy="selectin") class UUIDStateTest(UUIDBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore country_id: Mapped[UUID] = mapped_column(ForeignKey(UUIDCountryTest.id)) country: Mapped[UUIDCountryTest] = relationship(uselist=False, back_populates="states", lazy="noload") class USStateRepository(SQLAlchemySyncRepository[UUIDStateTest]): model_type = UUIDStateTest merge_loader_options = False loader_options = [noload(UUIDStateTest.country)] class CountryRepository(SQLAlchemySyncRepository[UUIDCountryTest]): inherit_lazy_relationships = False model_type = UUIDCountryTest engine = create_engine(f"sqlite:///{tmp_path}/test_loader.sqlite.db", echo=True) session_factory: sessionmaker[Session] = sessionmaker(engine, expire_on_commit=False) with engine.begin() as conn: UUIDStateTest.metadata.create_all(conn) with session_factory() as db_session: usa = UUIDCountryTest(name="United States of America") france = UUIDCountryTest(name="France") db_session.add(usa) db_session.add(france) california = UUIDStateTest(name="California", country=usa) oregon = UUIDStateTest(name="Oregon", country=usa) ile_de_france = UUIDStateTest(name="รŽle-de-France", country=france) repo = USStateRepository(session=db_session) repo.add(california) repo.add(oregon) repo.add(ile_de_france) db_session.commit() db_session.expire_all() si1_country_repo = CountryRepository(session=db_session) usa_country_1 = si1_country_repo.get_one( name="United States of America", ) assert len(usa_country_1.states) == 2 usa_country_2 = si1_country_repo.get_one( name="United States of America", load="*", execution_options={"populate_existing": True}, ) assert len(usa_country_2.states) == 2 @pytest.mark.xdist_group("loader") async def test_default_overrides_async_loader(monkeypatch: MonkeyPatch, tmp_path: Path) -> None: from advanced_alchemy import base, mixins orm_registry = base.create_registry() class NewUUIDBase(mixins.UUIDPrimaryKey, base.CommonTableAttributes, base.AdvancedDeclarativeBase): registry = orm_registry class NewBigIntBase(mixins.BigIntPrimaryKey, base.CommonTableAttributes, base.AdvancedDeclarativeBase): registry = orm_registry monkeypatch.setattr(base, "UUIDBase", NewUUIDBase) monkeypatch.setattr(base, "BigIntBase", NewBigIntBase) class BigIntCountryTest(BigIntBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore states: Mapped[list[BigIntStateTest]] = relationship(back_populates="country", uselist=True, lazy="selectin") notes: Mapped[list[BigIntCountryNote]] = relationship(back_populates="country", uselist=True, lazy="selectin") class BigIntCountryNote(BigIntBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore country_id: Mapped[int] = mapped_column(ForeignKey(BigIntCountryTest.id)) country: Mapped[BigIntCountryTest] = relationship(uselist=False, back_populates="notes", lazy="raise") class BigIntStateTest(BigIntBase): name: Mapped[str] = mapped_column(String(length=50)) # pyright: ignore country_id: Mapped[int] = mapped_column(ForeignKey(BigIntCountryTest.id)) country: Mapped[BigIntCountryTest] = relationship(uselist=False, back_populates="states", lazy="raise") class USStateRepository(SQLAlchemyAsyncRepository[BigIntStateTest]): model_type = BigIntStateTest class CountryRepository(SQLAlchemyAsyncRepository[BigIntCountryTest]): model_type = BigIntCountryTest merge_loader_options = False loader_options = [noload(BigIntCountryTest.states), noload(BigIntCountryTest.notes)] engine = create_async_engine(f"sqlite+aiosqlite:///{tmp_path}/test_loader.sqlite2.db", echo=True) session_factory: async_sessionmaker[AsyncSession] = async_sessionmaker(engine, expire_on_commit=False) async with engine.begin() as conn: await conn.run_sync(BigIntStateTest.metadata.create_all) async with session_factory() as db_session: usa = BigIntCountryTest(name="United States of America") usa.notes.append(BigIntCountryNote(name="Note 1")) france = BigIntCountryTest(name="France") db_session.add(usa) db_session.add(france) california = BigIntStateTest(name="California", country=usa) oregon = BigIntStateTest(name="Oregon", country=usa) ile_de_france = BigIntStateTest(name="รŽle-de-France", country=france) repo = USStateRepository(session=db_session) await repo.add(california) await repo.add(oregon) await repo.add(ile_de_france) await db_session.commit() db_session.expire_all() si1_country_repo = CountryRepository(session=db_session, load=[noload(BigIntCountryTest.states)]) usa_country_21 = await si1_country_repo.get_one( name="United States of America", ) assert len(usa_country_21.states) == 0 db_session.expire_all() si0_country_repo = CountryRepository(session=db_session) usa_country_0 = await si0_country_repo.get_one( name="United States of America", load=BigIntCountryTest.states, execution_options={"populate_existing": True}, ) assert len(usa_country_0.states) == 2 db_session.expire_all() country_repo = CountryRepository(session=db_session) usa_country_1 = await country_repo.get_one( name="United States of America", load=[selectinload(BigIntCountryTest.states)], ) assert len(usa_country_1.states) == 2 db_session.expire_all() si_country_repo = CountryRepository(session=db_session, load=[noload(BigIntCountryTest.notes)]) usa_country_02 = await si_country_repo.get_one( name="United States of America", load=[selectinload(BigIntCountryTest.states)] ) assert len(usa_country_02.notes) == 1 db_session.expire_all() python-advanced-alchemy-1.0.1/tests/integration/test_oracledb_json.py000066400000000000000000000024151476663714600261450ustar00rootroot00000000000000"""Unit tests for the SQLAlchemy Repository implementation for psycopg.""" from __future__ import annotations import platform from typing import TYPE_CHECKING import pytest from sqlalchemy.dialects import oracle from sqlalchemy.schema import CreateTable from tests.fixtures.uuid.models import UUIDEventLog if TYPE_CHECKING: from sqlalchemy import Engine pytestmark = [ pytest.mark.skipif(platform.uname()[4] != "x86_64", reason="oracle not available on this platform"), pytest.mark.integration, ] @pytest.mark.xdist_group("oracle18") def test_18c_json_constraint_generation(oracle18c_engine: Engine) -> None: ddl = str(CreateTable(UUIDEventLog.__table__).compile(oracle18c_engine, dialect=oracle.dialect())) # type: ignore assert "BLOB" in ddl.upper() assert "JSON" in ddl.upper() with oracle18c_engine.begin() as conn: UUIDEventLog.metadata.create_all(conn) @pytest.mark.xdist_group("oracle23") def test_23c_json_constraint_generation(oracle23c_engine: Engine) -> None: ddl = str(CreateTable(UUIDEventLog.__table__).compile(oracle23c_engine, dialect=oracle.dialect())) # type: ignore assert "BLOB" in ddl.upper() assert "JSON" in ddl.upper() with oracle23c_engine.begin() as conn: UUIDEventLog.metadata.create_all(conn) python-advanced-alchemy-1.0.1/tests/integration/test_repository.py000066400000000000000000003131151476663714600255620ustar00rootroot00000000000000# ruff: noqa: UP031 """Unit tests for the SQLAlchemy Repository implementation.""" from __future__ import annotations import asyncio import contextlib import datetime import os from collections.abc import AsyncGenerator, Generator, Iterator from typing import TYPE_CHECKING, Any, Literal, Union, cast from unittest.mock import NonCallableMagicMock from uuid import UUID, uuid4 import pytest from msgspec import Struct from pydantic import BaseModel from pytest_lazy_fixtures import lf from sqlalchemy import Engine, Table, and_, insert, select, text from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession from sqlalchemy.orm import Session, sessionmaker from time_machine import travel from advanced_alchemy import base from advanced_alchemy.exceptions import IntegrityError, NotFoundError, RepositoryError from advanced_alchemy.filters import ( BeforeAfter, CollectionFilter, NotInCollectionFilter, NotInSearchFilter, OnBeforeAfter, OrderBy, SearchFilter, ) from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemyAsyncSlugRepository from advanced_alchemy.repository._util import get_instrumented_attr, model_from_dict from advanced_alchemy.repository.memory import ( SQLAlchemyAsyncMockRepository, SQLAlchemyAsyncMockSlugRepository, SQLAlchemySyncMockRepository, SQLAlchemySyncMockSlugRepository, ) from advanced_alchemy.service import ( SQLAlchemyAsyncRepositoryService, ) from advanced_alchemy.service.pagination import OffsetPagination from advanced_alchemy.utils.text import slugify from tests.fixtures.bigint import models as models_bigint from tests.fixtures.bigint import repositories as repositories_bigint from tests.fixtures.bigint import services as services_bigint from tests.fixtures.uuid import models as models_uuid from tests.fixtures.uuid import repositories as repositories_uuid from tests.fixtures.uuid import services as services_uuid from tests.helpers import maybe_async from tests.integration.helpers import update_raw_records if TYPE_CHECKING: from pytest import FixtureRequest from time_machine import Coordinates pytestmark = [ pytest.mark.integration, ] xfail = pytest.mark.xfail RepositoryPKType = Literal["uuid", "bigint"] SecretModel = type[Union[models_uuid.UUIDSecret, models_bigint.BigIntSecret]] AuthorModel = type[Union[models_uuid.UUIDAuthor, models_bigint.BigIntAuthor]] RuleModel = type[Union[models_uuid.UUIDRule, models_bigint.BigIntRule]] ModelWithFetchedValue = type[Union[models_uuid.UUIDModelWithFetchedValue, models_bigint.BigIntModelWithFetchedValue]] ItemModel = type[Union[models_uuid.UUIDItem, models_bigint.BigIntItem]] TagModel = type[Union[models_uuid.UUIDTag, models_bigint.BigIntTag]] SlugBookModel = type[Union[models_uuid.UUIDSlugBook, models_bigint.BigIntSlugBook]] AnySecret = Union[models_uuid.UUIDSecret, models_bigint.BigIntSecret] SecretRepository = SQLAlchemyAsyncRepository[AnySecret] SecretService = SQLAlchemyAsyncRepositoryService[AnySecret, SecretRepository] SecretMockRepository = SQLAlchemyAsyncMockRepository[AnySecret] AnySecretRepository = Union[SecretRepository, SecretMockRepository] AnyAuthor = Union[models_uuid.UUIDAuthor, models_bigint.BigIntAuthor] AuthorRepository = SQLAlchemyAsyncRepository[AnyAuthor] AuthorMockRepository = SQLAlchemyAsyncMockRepository[AnyAuthor] AnyAuthorRepository = Union[AuthorRepository, AuthorMockRepository] AuthorService = SQLAlchemyAsyncRepositoryService[AnyAuthor, AuthorRepository] AnyRule = Union[models_uuid.UUIDRule, models_bigint.BigIntRule] RuleRepository = SQLAlchemyAsyncRepository[AnyRule] RuleService = SQLAlchemyAsyncRepositoryService[AnyRule, RuleRepository] AnySlugBook = Union[models_uuid.UUIDSlugBook, models_bigint.BigIntSlugBook] SlugBookRepository = SQLAlchemyAsyncSlugRepository[AnySlugBook] SlugBookService = SQLAlchemyAsyncRepositoryService[AnySlugBook, SlugBookRepository] AnyBook = Union[models_uuid.UUIDBook, models_bigint.BigIntBook] BookRepository = SQLAlchemyAsyncRepository[AnyBook] BookService = SQLAlchemyAsyncRepositoryService[AnyBook, BookRepository] AnyTag = Union[models_uuid.UUIDTag, models_bigint.BigIntTag] TagRepository = SQLAlchemyAsyncRepository[AnyTag] TagService = SQLAlchemyAsyncRepositoryService[AnyTag, TagRepository] AnyItem = Union[models_uuid.UUIDItem, models_bigint.BigIntItem] ItemRepository = SQLAlchemyAsyncRepository[AnyItem] ItemService = SQLAlchemyAsyncRepositoryService[AnyItem, ItemRepository] AnyModelWithFetchedValue = Union[models_uuid.UUIDModelWithFetchedValue, models_bigint.BigIntModelWithFetchedValue] ModelWithFetchedValueRepository = SQLAlchemyAsyncRepository[AnyModelWithFetchedValue] ModelWithFetchedValueService = SQLAlchemyAsyncRepositoryService[ AnyModelWithFetchedValue, ModelWithFetchedValueRepository ] RawRecordData = list[dict[str, Any]] mock_engines = {"mock_async_engine", "mock_sync_engine"} @pytest.fixture(autouse=True) def _clear_in_memory_db() -> Generator[None, None, None]: # pyright: ignore[reportUnusedFunction] try: yield finally: SQLAlchemyAsyncMockRepository.__database_clear__() SQLAlchemySyncMockRepository.__database_clear__() @pytest.fixture(name="raw_authors_uuid") def fx_raw_authors_uuid() -> RawRecordData: """Unstructured author representations.""" return [ { "id": UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"), "name": "Agatha Christie", "dob": "1890-09-15", "created_at": "2023-05-01T00:00:00", "updated_at": "2023-05-11T00:00:00", }, { "id": UUID("5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2"), "name": "Leo Tolstoy", "dob": "1828-09-09", "created_at": "2023-03-01T00:00:00", "updated_at": "2023-05-15T00:00:00", }, ] @pytest.fixture(name="raw_books_uuid") def fx_raw_books_uuid(raw_authors_uuid: RawRecordData) -> RawRecordData: """Unstructured book representations.""" return [ { "id": UUID("f34545b9-663c-4fce-915d-dd1ae9cea42a"), "title": "Murder on the Orient Express", "author_id": raw_authors_uuid[0]["id"], "author": raw_authors_uuid[0], }, ] @pytest.fixture(name="raw_slug_books_uuid") def fx_raw_slug_books_uuid(raw_authors_uuid: RawRecordData) -> RawRecordData: """Unstructured slug book representations.""" return [ { "id": UUID("f34545b9-663c-4fce-915d-dd1ae9cea42a"), "title": "Murder on the Orient Express", "slug": slugify("Murder on the Orient Express"), "author_id": str(raw_authors_uuid[0]["id"]), }, ] @pytest.fixture(name="raw_log_events_uuid") def fx_raw_log_events_uuid() -> RawRecordData: """Unstructured log events representations.""" return [ { "id": "f34545b9-663c-4fce-915d-dd1ae9cea42a", "logged_at": "0001-01-01T00:00:00", "payload": {"foo": "bar", "baz": datetime.datetime.now()}, "created_at": "0001-01-01T00:00:00", "updated_at": "0001-01-01T00:00:00", }, ] @pytest.fixture(name="raw_rules_uuid") def fx_raw_rules_uuid() -> RawRecordData: """Unstructured rules representations.""" return [ { "id": "f34545b9-663c-4fce-915d-dd1ae9cea42a", "name": "Initial loading rule.", "config": {"url": "https://example.org", "setting_123": 1}, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-02-01T00:00:00", }, { "id": "f34545b9-663c-4fce-915d-dd1ae9cea34b", "name": "Secondary loading rule.", "config": {"url": "https://example.org", "bar": "foo", "setting_123": 4}, "created_at": "2023-02-01T00:00:00", "updated_at": "2023-02-01T00:00:00", }, ] @pytest.fixture(name="raw_secrets_uuid") def fx_raw_secrets_uuid() -> RawRecordData: """secret representations.""" return [ { "id": "f34545b9-663c-4fce-915d-dd1ae9cea42a", "secret": "I'm a secret!", "long_secret": "It's clobbering time.", }, ] @pytest.fixture(name="raw_authors_bigint") def fx_raw_authors_bigint() -> RawRecordData: """Unstructured author representations.""" return [ { "id": 2023, "name": "Agatha Christie", "dob": "1890-09-15", "created_at": "2023-05-01T00:00:00", "updated_at": "2023-05-11T00:00:00", }, { "id": 2024, "name": "Leo Tolstoy", "dob": "1828-09-09", "created_at": "2023-03-01T00:00:00", "updated_at": "2023-05-15T00:00:00", }, ] @pytest.fixture(name="raw_books_bigint") def fx_raw_books_bigint(raw_authors_bigint: RawRecordData) -> RawRecordData: """Unstructured book representations.""" return [ { "title": "Murder on the Orient Express", "author_id": raw_authors_bigint[0]["id"], "author": raw_authors_bigint[0], }, ] @pytest.fixture(name="raw_slug_books_bigint") def fx_raw_slug_books_bigint(raw_authors_bigint: RawRecordData) -> RawRecordData: """Unstructured slug book representations.""" return [ { "title": "Murder on the Orient Express", "slug": slugify("Murder on the Orient Express"), "author_id": str(raw_authors_bigint[0]["id"]), }, ] @pytest.fixture(name="raw_log_events_bigint") def fx_raw_log_events_bigint() -> RawRecordData: """Unstructured log events representations.""" return [ { "id": 2025, "logged_at": "0001-01-01T00:00:00", "payload": {"foo": "bar", "baz": datetime.datetime.now()}, "created_at": "0001-01-01T00:00:00", "updated_at": "0001-01-01T00:00:00", }, ] @pytest.fixture(name="raw_rules_bigint") def fx_raw_rules_bigint() -> RawRecordData: """Unstructured rules representations.""" return [ { "id": 2025, "name": "Initial loading rule.", "config": {"url": "https://example.org", "setting_123": 1}, "created_at": "2023-01-01T00:00:00", "updated_at": "2023-02-01T00:00:00", }, { "id": 2024, "name": "Secondary loading rule.", "config": {"url": "https://example.org", "bar": "foo", "setting_123": 4}, "created_at": "2023-02-01T00:00:00", "updated_at": "2023-02-01T00:00:00", }, ] @pytest.fixture(name="raw_secrets_bigint") def fx_raw_secrets_bigint() -> RawRecordData: """secret representations.""" return [ { "id": 2025, "secret": "I'm a secret!", "long_secret": "It's clobbering time.", }, ] @pytest.fixture(params=["uuid", "bigint"]) def repository_pk_type(request: FixtureRequest) -> RepositoryPKType: """Return the primary key type of the repository""" return cast(RepositoryPKType, request.param) @pytest.fixture() def author_model(repository_pk_type: RepositoryPKType) -> AuthorModel: """Return the ``Author`` model matching the current repository PK type""" if repository_pk_type == "uuid": return models_uuid.UUIDAuthor return models_bigint.BigIntAuthor @pytest.fixture() def rule_model(repository_pk_type: RepositoryPKType) -> RuleModel: """Return the ``Rule`` model matching the current repository PK type""" if repository_pk_type == "bigint": return models_bigint.BigIntRule return models_uuid.UUIDRule @pytest.fixture() def model_with_fetched_value(repository_pk_type: RepositoryPKType) -> ModelWithFetchedValue: """Return the ``ModelWithFetchedValue`` model matching the current repository PK type""" if repository_pk_type == "bigint": return models_bigint.BigIntModelWithFetchedValue return models_uuid.UUIDModelWithFetchedValue @pytest.fixture() def item_model(repository_pk_type: RepositoryPKType) -> ItemModel: """Return the ``Item`` model matching the current repository PK type""" if repository_pk_type == "bigint": return models_bigint.BigIntItem return models_uuid.UUIDItem @pytest.fixture() def tag_model(repository_pk_type: RepositoryPKType) -> TagModel: """Return the ``Tag`` model matching the current repository PK type""" if repository_pk_type == "uuid": return models_uuid.UUIDTag return models_bigint.BigIntTag @pytest.fixture() def book_model(repository_pk_type: RepositoryPKType) -> type[models_uuid.UUIDBook | models_bigint.BigIntBook]: """Return the ``Book`` model matching the current repository PK type""" if repository_pk_type == "uuid": return models_uuid.UUIDBook return models_bigint.BigIntBook @pytest.fixture() def slug_book_model( repository_pk_type: RepositoryPKType, ) -> SlugBookModel: """Return the ``SlugBook`` model matching the current repository PK type""" if repository_pk_type == "uuid": return models_uuid.UUIDSlugBook return models_bigint.BigIntSlugBook @pytest.fixture() def secret_model(repository_pk_type: RepositoryPKType) -> SecretModel: """Return the ``Secret`` model matching the current repository PK type""" return models_uuid.UUIDSecret if repository_pk_type == "uuid" else models_bigint.BigIntSecret @pytest.fixture() def new_pk_id(repository_pk_type: RepositoryPKType) -> Any: """Return an unused primary key, matching the current repository PK type""" if repository_pk_type == "uuid": return UUID("baa0a5c7-5404-4821-bc76-6cf5e73c8219") return 10 @pytest.fixture() def existing_slug_book_ids(raw_slug_books: RawRecordData) -> Iterator[Any]: """Return the existing primary keys based on the raw data provided""" return (book["id"] for book in raw_slug_books) @pytest.fixture() def first_slug_book_id(raw_slug_books: RawRecordData) -> Any: """Return the primary key of the first ``Book`` record of the current repository PK type""" return raw_slug_books[0]["id"] @pytest.fixture() def existing_author_ids(raw_authors: RawRecordData) -> Iterator[Any]: """Return the existing primary keys based on the raw data provided""" return (author["id"] for author in raw_authors) @pytest.fixture() def first_author_id(raw_authors: RawRecordData) -> Any: """Return the primary key of the first ``Author`` record of the current repository PK type""" return raw_authors[0]["id"] @pytest.fixture() def existing_secret_ids(raw_secrets: RawRecordData) -> Iterator[Any]: """Return the existing primary keys based on the raw data provided""" return (secret["id"] for secret in raw_secrets) @pytest.fixture() def first_secret_id(raw_secrets: RawRecordData) -> Any: """Return the primary key of the first ``Secret`` record of the current repository PK type""" return raw_secrets[0]["id"] @pytest.fixture( params=[ pytest.param( "sqlite_engine", marks=[ pytest.mark.sqlite, pytest.mark.integration, ], ), pytest.param( "duckdb_engine", marks=[ pytest.mark.duckdb, pytest.mark.integration, pytest.mark.xdist_group("duckdb"), ], ), pytest.param( "oracle18c_engine", marks=[ pytest.mark.oracledb_sync, pytest.mark.integration, pytest.mark.xdist_group("oracle18"), ], ), pytest.param( "oracle23c_engine", marks=[ pytest.mark.oracledb_sync, pytest.mark.integration, pytest.mark.xdist_group("oracle23"), ], ), pytest.param( "psycopg_engine", marks=[ pytest.mark.psycopg_sync, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "spanner_engine", marks=[ pytest.mark.spanner, pytest.mark.integration, pytest.mark.xdist_group("spanner"), ], ), pytest.param( "mssql_engine", marks=[ pytest.mark.mssql_sync, pytest.mark.integration, pytest.mark.xdist_group("mssql"), ], ), pytest.param( "cockroachdb_engine", marks=[ pytest.mark.cockroachdb_sync, pytest.mark.integration, pytest.mark.xdist_group("cockroachdb"), ], ), pytest.param( "mock_sync_engine", marks=[ pytest.mark.mock_sync, pytest.mark.integration, pytest.mark.xdist_group("mock"), ], ), ], ) def engine(request: FixtureRequest, repository_pk_type: RepositoryPKType) -> Generator[Engine, None, None]: """Return a synchronous engine. Parametrized to return all engines supported by the repository PK type """ engine = cast(Engine, request.getfixturevalue(request.param)) if engine.dialect.name.startswith("spanner") and repository_pk_type == "bigint": pytest.skip(reason="Spanner does not support monotonically increasing primary keys") elif engine.dialect.name.startswith("cockroach") and repository_pk_type == "bigint": pytest.skip(reason="Cockroachdb has special considerations for monotonically increasing primary keys.") yield engine @pytest.fixture() def raw_authors(request: FixtureRequest, repository_pk_type: RepositoryPKType) -> RawRecordData: """Return raw ``Author`` data matching the current PK type""" if repository_pk_type == "bigint": authors = request.getfixturevalue("raw_authors_bigint") else: authors = request.getfixturevalue("raw_authors_uuid") return cast("RawRecordData", authors) @pytest.fixture() def raw_slug_books(request: FixtureRequest, repository_pk_type: RepositoryPKType) -> RawRecordData: """Return raw ``Author`` data matching the current PK type""" if repository_pk_type == "bigint": books = request.getfixturevalue("raw_slug_books_bigint") else: books = request.getfixturevalue("raw_slug_books_uuid") return cast("RawRecordData", books) @pytest.fixture() def raw_rules(request: FixtureRequest, repository_pk_type: RepositoryPKType) -> RawRecordData: """Return raw ``Rule`` data matching the current PK type""" if repository_pk_type == "bigint": rules = request.getfixturevalue("raw_rules_bigint") else: rules = request.getfixturevalue("raw_rules_uuid") return cast("RawRecordData", rules) @pytest.fixture() def raw_secrets(request: FixtureRequest, repository_pk_type: RepositoryPKType) -> RawRecordData: """Return raw ``Secret`` data matching the current PK type""" if repository_pk_type == "bigint": secrets = request.getfixturevalue("raw_secrets_bigint") else: secrets = request.getfixturevalue("raw_secrets_uuid") return cast("RawRecordData", secrets) def _seed_db_sync( *, engine: Engine, raw_authors: RawRecordData, raw_slug_books: RawRecordData, raw_rules: RawRecordData, raw_secrets: RawRecordData, author_model: AuthorModel, secret_model: SecretModel, rule_model: RuleModel, slug_book_model: SlugBookModel, ) -> None: update_raw_records(raw_authors=raw_authors, raw_rules=raw_rules) if isinstance(engine, NonCallableMagicMock): for raw_author in raw_authors: SQLAlchemySyncMockRepository.__database_add__( # pyright: ignore[reportUnknownMemberType] author_model, model_from_dict(author_model, **raw_author), # type: ignore[type-var] ) for raw_rule in raw_rules: SQLAlchemySyncMockRepository.__database_add__( # pyright: ignore[reportUnknownMemberType] author_model, model_from_dict(rule_model, **raw_rule), # type: ignore[type-var] ) for raw_secret in raw_secrets: SQLAlchemySyncMockRepository.__database_add__( # pyright: ignore[reportUnknownMemberType] secret_model, model_from_dict(secret_model, **raw_secret), # type: ignore[type-var] ) for raw_book in raw_slug_books: SQLAlchemySyncMockSlugRepository.__database_add__( # pyright: ignore[reportUnknownMemberType] slug_book_model, model_from_dict(slug_book_model, **raw_book), # type: ignore[type-var] ) else: with engine.begin() as conn: base.orm_registry.metadata.drop_all(conn) base.orm_registry.metadata.create_all(conn) with engine.begin() as conn: for author in raw_authors: conn.execute(insert(author_model).values(author)) for rule in raw_rules: conn.execute(insert(rule_model).values(rule)) for secret in raw_secrets: conn.execute(insert(secret_model).values(secret)) for book in raw_slug_books: conn.execute(insert(slug_book_model).values(book)) def _seed_spanner( *, engine: Engine, raw_authors_uuid: RawRecordData, raw_rules_uuid: RawRecordData, raw_slug_books_uuid: RawRecordData, ) -> list[Table]: update_raw_records(raw_authors=raw_authors_uuid, raw_rules=raw_rules_uuid) with engine.begin() as txn: objs = [ tbl for tbl in models_uuid.UUIDAuthor.registry.metadata.sorted_tables if tbl.description.startswith("uuid") ] models_uuid.UUIDAuthor.registry.metadata.create_all(txn, tables=objs) return objs @pytest.fixture() def seed_db_sync( engine: Engine, raw_authors: RawRecordData, raw_slug_books: RawRecordData, raw_rules: RawRecordData, raw_secrets: RawRecordData, author_model: AuthorModel, rule_model: RuleModel, secret_model: SecretModel, slug_book_model: SlugBookModel, ) -> None: if engine.dialect.name.startswith("spanner"): _seed_spanner( engine=engine, raw_authors_uuid=raw_authors, raw_rules_uuid=raw_rules, raw_slug_books_uuid=raw_slug_books, ) else: _seed_db_sync( engine=engine, raw_authors=raw_authors, raw_rules=raw_rules, raw_secrets=raw_secrets, raw_slug_books=raw_slug_books, author_model=author_model, rule_model=rule_model, secret_model=secret_model, slug_book_model=slug_book_model, ) @pytest.fixture() def session( engine: Engine, raw_authors: RawRecordData, raw_rules: RawRecordData, raw_secrets: RawRecordData, seed_db_sync: None, ) -> Generator[Session, None, None]: """Return a synchronous session for the current engine""" session = sessionmaker(bind=engine)() if engine.dialect.name.startswith("spanner"): try: author_repo = repositories_uuid.AuthorSyncRepository(session=session) for author in raw_authors: _ = author_repo.get_or_upsert(match_fields="name", **author) secret_repo = repositories_uuid.SecretSyncRepository(session=session) for secret in raw_secrets: _ = secret_repo.get_or_upsert(match_fields="id", **secret) if not bool(os.environ.get("SPANNER_EMULATOR_HOST")): rule_repo = repositories_uuid.RuleSyncRepository(session=session) for rule in raw_rules: _ = rule_repo.add(models_uuid.UUIDRule(**rule)) yield session finally: session.rollback() session.close() with engine.begin() as txn: models_uuid.UUIDAuthor.registry.metadata.drop_all(txn, tables=seed_db_sync) else: try: yield session finally: session.rollback() session.close() @pytest.fixture( params=[ pytest.param( "aiosqlite_engine", marks=[ pytest.mark.aiosqlite, pytest.mark.integration, ], ), pytest.param( "asyncmy_engine", marks=[ pytest.mark.asyncmy, pytest.mark.integration, pytest.mark.xdist_group("mysql"), ], ), pytest.param( "asyncpg_engine", marks=[ pytest.mark.asyncpg, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "psycopg_async_engine", marks=[ pytest.mark.psycopg_async, pytest.mark.integration, pytest.mark.xdist_group("postgres"), ], ), pytest.param( "cockroachdb_async_engine", marks=[ pytest.mark.cockroachdb_async, pytest.mark.integration, pytest.mark.xdist_group("cockroachdb"), ], ), pytest.param( "mssql_async_engine", marks=[ pytest.mark.mssql_async, pytest.mark.integration, pytest.mark.xdist_group("mssql"), ], ), pytest.param( "oracle18c_async_engine", marks=[ pytest.mark.oracledb_async, pytest.mark.integration, pytest.mark.xdist_group("oracle18"), ], ), pytest.param( "oracle23c_async_engine", marks=[ pytest.mark.oracledb_async, pytest.mark.integration, pytest.mark.xdist_group("oracle23"), ], ), pytest.param( "mock_async_engine", marks=[ pytest.mark.mock_async, pytest.mark.integration, pytest.mark.xdist_group("mock"), ], ), ], ) def async_engine(request: FixtureRequest, repository_pk_type: RepositoryPKType) -> Generator[AsyncEngine, None, None]: async_engine = cast(AsyncEngine, request.getfixturevalue(request.param)) if async_engine.dialect.name.startswith("cockroach") and repository_pk_type == "bigint": pytest.skip(reason="Cockroachdb has special considerations for monotonically increasing primary keys.") yield async_engine @pytest.fixture() async def seed_db_async( request: FixtureRequest, async_engine: AsyncEngine | NonCallableMagicMock, raw_authors: RawRecordData, raw_rules: RawRecordData, raw_secrets: RawRecordData, author_model: AuthorModel, rule_model: RuleModel, secret_model: SecretModel, ) -> None: """Return an asynchronous session for the current engine""" # convert date/time strings to dt objects. for raw_author in raw_authors: raw_author["dob"] = datetime.datetime.strptime(raw_author["dob"], "%Y-%m-%d").date() raw_author["created_at"] = datetime.datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc, ) raw_author["updated_at"] = datetime.datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc, ) for raw_author in raw_rules: raw_author["created_at"] = datetime.datetime.strptime(raw_author["created_at"], "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc, ) raw_author["updated_at"] = datetime.datetime.strptime(raw_author["updated_at"], "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc, ) if isinstance(async_engine, NonCallableMagicMock): for raw_author in raw_authors: SQLAlchemyAsyncMockRepository.__database_add__( # pyright: ignore[reportUnknownMemberType] author_model, model_from_dict(author_model, **raw_author), # type: ignore[type-var] ) for raw_rule in raw_rules: SQLAlchemyAsyncMockRepository.__database_add__( # pyright: ignore[reportUnknownMemberType] author_model, model_from_dict(rule_model, **raw_rule), # type: ignore[type-var] ) for raw_secret in raw_secrets: SQLAlchemyAsyncMockRepository.__database_add__( # pyright: ignore[reportUnknownMemberType] secret_model, model_from_dict(secret_model, **raw_secret), # type: ignore[type-var] ) else: async with async_engine.begin() as conn: if "cockroachdb_async_engine" in request.fixturenames: await conn.execute(text("SET multiple_active_portals_enabled = true")) await conn.execute(text("SET autocommit_before_ddl = true")) await conn.run_sync(base.orm_registry.metadata.drop_all) await conn.run_sync(base.orm_registry.metadata.create_all) await conn.execute(insert(author_model), raw_authors) await conn.execute(insert(rule_model), raw_rules) await conn.execute(insert(secret_model), raw_secrets) @pytest.fixture(autouse=False) async def patch_cockroach_session(async_session: AsyncSession) -> AsyncGenerator[None, None]: """Return a session for the current session""" await async_session.execute(text("SET multiple_active_portals_enabled = true")) yield None @pytest.fixture(params=[lf("session"), lf("async_session")], ids=["sync", "async"]) def any_session(request: FixtureRequest) -> Generator[AsyncSession | Session, None, None]: """Return a session for the current session""" if isinstance(request.param, AsyncSession): request.getfixturevalue("seed_db_async") if "cockroachdb_async_engine" in request.fixturenames: request.getfixturevalue("patch_cockroach_session") else: request.getfixturevalue("seed_db_sync") yield request.param # type: ignore[no-any-return] @pytest.fixture(params=[lf("engine"), lf("async_engine")], ids=["sync", "async"]) async def any_engine( request: FixtureRequest, ) -> AsyncGenerator[Engine | AsyncEngine, None]: """Return a session for the current session""" yield cast("Engine | AsyncEngine", request.getfixturevalue(request.param)) @pytest.fixture() def repository_module(repository_pk_type: RepositoryPKType, request: FixtureRequest) -> Any: if repository_pk_type == "bigint" and mock_engines.intersection(set(request.fixturenames)): pytest.skip("Skipping additional bigint mock repository tests") yield repositories_uuid if repository_pk_type == "uuid" else repositories_bigint @pytest.fixture() def service_module(repository_pk_type: RepositoryPKType, request: FixtureRequest) -> Any: if repository_pk_type == "bigint" and mock_engines.intersection(set(request.fixturenames)): pytest.skip("Skipping additional bigint mock repository tests") yield services_uuid if repository_pk_type == "uuid" else services_bigint @pytest.fixture() def author_repo( request: FixtureRequest, any_session: AsyncSession | Session, repository_module: Any, ) -> Generator[AuthorRepository, None, None]: """Return an AuthorAsyncRepository or AuthorSyncRepository based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = repository_module.AuthorAsyncMockRepository(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = repository_module.AuthorSyncMockRepository(session=any_session) elif isinstance(any_session, AsyncSession): repo = repository_module.AuthorAsyncRepository(session=any_session) else: repo = repository_module.AuthorSyncRepository(session=any_session) yield cast(AuthorRepository, repo) @pytest.fixture() def secret_repo( request: FixtureRequest, any_session: AsyncSession | Session, repository_module: Any, ) -> Generator[SecretRepository, None, None]: """Return an SecretAsyncRepository or SecretSyncRepository based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = repository_module.SecretAsyncMockRepository(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = repository_module.SecretSyncMockRepository(session=any_session) elif isinstance(any_session, AsyncSession): repo = repository_module.SecretAsyncRepository(session=any_session) else: repo = repository_module.SecretSyncRepository(session=any_session) yield cast(SecretRepository, repo) @pytest.fixture() def author_service( any_session: AsyncSession | Session, service_module: Any, request: FixtureRequest, ) -> Generator[AuthorService, None, None]: """Return an AuthorAsyncService or AuthorSyncService based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = service_module.AuthorAsyncMockService(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = service_module.AuthorSyncMockService(session=any_session) elif isinstance(any_session, AsyncSession): repo = service_module.AuthorAsyncService(session=any_session) else: repo = service_module.AuthorSyncService(session=any_session) yield cast(AuthorService, repo) @pytest.fixture() def rule_repo( any_session: AsyncSession | Session, repository_module: Any, request: FixtureRequest ) -> Generator[RuleRepository, None, None]: """Return an RuleAsyncRepository or RuleSyncRepository based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = repository_module.RuleAsyncMockRepository(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = repository_module.RuleSyncMockRepository(session=any_session) elif isinstance(any_session, AsyncSession): repo = repository_module.RuleAsyncRepository(session=any_session) else: repo = repository_module.RuleSyncRepository(session=any_session) yield cast(RuleRepository, repo) @pytest.fixture() def rule_service( any_session: AsyncSession | Session, service_module: Any, request: FixtureRequest ) -> Generator[RuleService, None, None]: """Return an RuleAsyncService or RuleSyncService based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = service_module.RuleAsyncMockService(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = service_module.RuleSyncMockService(session=any_session) elif isinstance(any_session, AsyncSession): repo = service_module.RuleAsyncService(session=any_session) else: repo = service_module.RuleSyncService(session=any_session) yield cast(RuleService, repo) @pytest.fixture() def book_repo( any_session: AsyncSession | Session, repository_module: Any, request: FixtureRequest ) -> Generator[BookRepository, None, None]: """Return an BookAsyncRepository or BookSyncRepository based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = repository_module.BookAsyncMockRepository(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = repository_module.BookSyncMockRepository(session=any_session) elif isinstance(any_session, AsyncSession): repo = repository_module.BookAsyncRepository(session=any_session) else: repo = repository_module.BookSyncRepository(session=any_session) yield cast(BookRepository, repo) @pytest.fixture() def book_service( any_session: AsyncSession | Session, service_module: Any, request: FixtureRequest ) -> Generator[BookService, None, None]: """Return an BookAsyncService or BookSyncService based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = service_module.BookAsyncMockService(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = service_module.BookSyncMockService(session=any_session) elif isinstance(any_session, AsyncSession): repo = service_module.BookAsyncService(session=any_session) else: repo = service_module.BookSyncService(session=any_session) yield cast(BookService, repo) @pytest.fixture() def slug_book_repo( any_session: AsyncSession | Session, repository_module: Any, request: FixtureRequest, ) -> Generator[SlugBookRepository, None, None]: """Return an SlugBookAsyncRepository or SlugBookSyncRepository based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = repository_module.SlugBookAsyncMockRepository(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = repository_module.SlugBookSyncMockRepository(session=any_session) elif isinstance(any_session, AsyncSession): repo = repository_module.SlugBookAsyncRepository(session=any_session) else: repo = repository_module.SlugBookSyncRepository(session=any_session) yield cast("SlugBookRepository", repo) @pytest.fixture() def slug_book_service( any_session: AsyncSession | Session, service_module: Any, request: FixtureRequest, ) -> Generator[SlugBookService, None, None]: """Return an SlugBookAsyncService or SlugBookSyncService based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: svc = service_module.SlugBookAsyncMockService(session=any_session) elif "mock_sync_engine" in request.fixturenames: svc = service_module.SlugBookSyncMockService(session=any_session) elif isinstance(any_session, AsyncSession): svc = service_module.SlugBookAsyncService(session=any_session) else: svc = service_module.SlugBookSyncService(session=any_session) yield cast("SlugBookService", svc) @pytest.fixture() def tag_repo( any_session: AsyncSession | Session, repository_module: Any, request: FixtureRequest ) -> Generator[ItemRepository, None, None]: """Return an TagAsyncRepository or TagSyncRepository based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = repository_module.TagAsyncMockRepository(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = repository_module.TagSyncMockRepository(session=any_session) elif isinstance(any_session, AsyncSession): repo = repository_module.TagAsyncRepository(session=any_session) else: repo = repository_module.TagSyncRepository(session=any_session) yield cast(ItemRepository, repo) @pytest.fixture() def tag_service( any_session: AsyncSession | Session, service_module: Any, request: FixtureRequest ) -> Generator[TagService, None, None]: """Return an TagAsyncService or TagSyncService based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = service_module.TagAsyncMockService(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = service_module.TagSyncMockService(session=any_session) elif isinstance(any_session, AsyncSession): repo = service_module.TagAsyncService(session=any_session) else: repo = service_module.TagSyncService(session=any_session) yield cast(TagService, repo) @pytest.fixture() def item_repo( any_session: AsyncSession | Session, repository_module: Any, request: FixtureRequest ) -> Generator[ItemRepository, None, None]: """Return an ItemAsyncRepository or ItemSyncRepository based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = repository_module.ItemAsyncMockRepository(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = repository_module.ItemSyncMockRepository(session=any_session) elif isinstance(any_session, AsyncSession): repo = repository_module.ItemAsyncRepository(session=any_session) else: repo = repository_module.ItemSyncRepository(session=any_session) yield cast(ItemRepository, repo) @pytest.fixture() def item_service( any_session: AsyncSession | Session, service_module: Any, request: FixtureRequest ) -> Generator[ItemService, None, None]: """Return an ItemAsyncService or ItemSyncService based on the current PK and session type""" if "mock_async_engine" in request.fixturenames: repo = service_module.ItemAsyncMockService(session=any_session) elif "mock_sync_engine" in request.fixturenames: repo = service_module.ItemSyncMockService(session=any_session) elif isinstance(any_session, AsyncSession): repo = service_module.ItemAsyncService(session=any_session) else: repo = service_module.ItemSyncService(session=any_session) yield cast(ItemService, repo) @pytest.fixture() def model_with_fetched_value_repo( any_session: AsyncSession | Session, repository_module: Any, ) -> Generator[ModelWithFetchedValueRepository, None, None]: """Return an ModelWithFetchedValueAsyncRepository or ModelWithFetchedValueSyncRepository based on the current PK and session type """ if isinstance(any_session, AsyncSession): repo = repository_module.ModelWithFetchedValueAsyncRepository(session=any_session) else: repo = repository_module.ModelWithFetchedValueSyncRepository(session=any_session) yield cast(ModelWithFetchedValueRepository, repo) async def test_repo_count_method(author_repo: AnyAuthorRepository) -> None: """Test SQLAlchemy count. Args: author_repo: The author mock repository """ assert await maybe_async(author_repo.count()) == 2 async def test_repo_count_method_with_filters(raw_authors: RawRecordData, author_repo: AnyAuthorRepository) -> None: """Test SQLAlchemy count with filters. Args: author_repo: The author mock repository """ if isinstance(author_repo, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): assert ( await maybe_async( author_repo.count( **{author_repo.model_type.name.key: raw_authors[0]["name"]}, ), ) == 1 ) else: assert ( await maybe_async( author_repo.count( author_repo.model_type.name == raw_authors[0]["name"], ), ) == 1 ) async def test_repo_list_and_count_method(raw_authors: RawRecordData, author_repo: AnyAuthorRepository) -> None: """Test SQLAlchemy list with count in asyncpg. Args: raw_authors: list of authors pre-seeded into the mock repository author_repo: The author mock repository """ exp_count = len(raw_authors) collection, count = await maybe_async(author_repo.list_and_count()) assert exp_count == count assert isinstance(collection, list) assert len(collection) == exp_count async def test_repo_list_and_count_method_with_filters( raw_authors: RawRecordData, author_repo: AnyAuthorRepository, ) -> None: """Test SQLAlchemy list with count and filters in asyncpg. Args: raw_authors: list of authors pre-seeded into the mock repository author_repo: The author mock repository """ exp_name = raw_authors[0]["name"] exp_id = raw_authors[0]["id"] if isinstance(author_repo, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): collection, count = await maybe_async( author_repo.list_and_count(**{author_repo.model_type.name.key: exp_name}), ) else: collection, count = await maybe_async( author_repo.list_and_count(author_repo.model_type.name == exp_name), ) assert count == 1 assert isinstance(collection, list) assert len(collection) == 1 assert str(collection[0].id) == str(exp_id) assert collection[0].name == exp_name async def test_repo_list_and_count_basic_method(raw_authors: RawRecordData, author_repo: AnyAuthorRepository) -> None: """Test SQLAlchemy basic list with count in asyncpg. Args: raw_authors: list of authors pre-seeded into the mock repository author_repo: The author mock repository """ exp_count = len(raw_authors) collection, count = await maybe_async(author_repo.list_and_count(count_with_window_function=True)) assert exp_count == count assert isinstance(collection, list) assert len(collection) == exp_count async def test_repo_list_and_count_method_empty(book_repo: BookRepository) -> None: collection, count = await maybe_async(book_repo.list_and_count()) assert count == 0 assert isinstance(collection, list) assert len(collection) == 0 @pytest.fixture() def frozen_datetime() -> Generator[Coordinates, None, None]: with travel(datetime.datetime.utcnow, tick=False) as frozen: # pyright: ignore[reportDeprecated,reportCallIssue] yield frozen async def test_repo_created_updated( frozen_datetime: Coordinates, author_repo: AnyAuthorRepository, book_model: type[AnyBook], repository_pk_type: RepositoryPKType, ) -> None: from advanced_alchemy.config.asyncio import SQLAlchemyAsyncConfig from advanced_alchemy.config.sync import SQLAlchemySyncConfig if isinstance(author_repo, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): pytest.skip(f"{SQLAlchemyAsyncMockRepository.__name__} does not update created/updated columns") if isinstance(author_repo, SQLAlchemyAsyncRepository): # pyright: ignore[reportUnnecessaryIsInstance] config = SQLAlchemyAsyncConfig( engine_instance=author_repo.session.get_bind(), # type: ignore[arg-type] ) else: config = SQLAlchemySyncConfig( # type: ignore[unreachable] engine_instance=author_repo.session.get_bind(), ) config.__post_init__() author = await maybe_async(author_repo.get_one(name="Agatha Christie")) original_update_dt = author.updated_at assert author.created_at is not None assert author.updated_at is not None frozen_datetime.shift(delta=datetime.timedelta(seconds=5)) # looks odd, but we want to get correct type checking here if repository_pk_type == "uuid": author = cast(models_uuid.UUIDAuthor, author) book_model = cast("type[models_uuid.UUIDBook]", book_model) else: author = cast(models_bigint.BigIntAuthor, author) book_model = cast("type[models_bigint.BigIntBook]", book_model) author.name = "Altered" author = await maybe_async(author_repo.update(author)) assert author.updated_at > original_update_dt # test nested author.books.append(book_model(title="Testing")) # type: ignore[arg-type] author = await maybe_async(author_repo.update(author)) assert author.updated_at > original_update_dt # This test does not work when run in group for some reason. # If you run individually, it'll pass. @xfail async def test_repo_created_updated_no_listener( frozen_datetime: Coordinates, author_repo: AuthorRepository, book_model: type[AnyBook], repository_pk_type: RepositoryPKType, ) -> None: from sqlalchemy import event from sqlalchemy.exc import InvalidRequestError from advanced_alchemy._listeners import touch_updated_timestamp from advanced_alchemy.config.asyncio import SQLAlchemyAsyncConfig from advanced_alchemy.config.sync import SQLAlchemySyncConfig with contextlib.suppress(InvalidRequestError): event.remove(Session, "before_flush", touch_updated_timestamp) if isinstance(author_repo, SQLAlchemyAsyncRepository): # pyright: ignore[reportUnnecessaryIsInstance] config = SQLAlchemyAsyncConfig( enable_touch_updated_timestamp_listener=False, engine_instance=author_repo.session.get_bind(), # type: ignore[arg-type] ) else: config = SQLAlchemySyncConfig( # type: ignore[unreachable] enable_touch_updated_timestamp_listener=False, engine_instance=author_repo.session.get_bind(), ) config.__post_init__() author = await maybe_async(author_repo.get_one(name="Agatha Christie")) original_update_dt = author.updated_at assert author.created_at is not None assert author.updated_at is not None frozen_datetime.shift(delta=datetime.timedelta(seconds=5)) # looks odd, but we want to get correct type checking here if repository_pk_type == "uuid": author = cast(models_uuid.UUIDAuthor, author) book_model = cast("type[models_uuid.UUIDBook]", book_model) else: author = cast(models_bigint.BigIntAuthor, author) book_model = cast("type[models_bigint.BigIntBook]", book_model) author.books.append(book_model(title="Testing")) # type: ignore[arg-type] author = await maybe_async(author_repo.update(author)) assert author.updated_at == original_update_dt async def test_repo_list_method( raw_authors_uuid: RawRecordData, author_repo: AnyAuthorRepository, ) -> None: exp_count = len(raw_authors_uuid) collection = await maybe_async(author_repo.list()) assert isinstance(collection, list) assert len(collection) == exp_count async def test_repo_list_method_with_filters(raw_authors: RawRecordData, author_repo: AnyAuthorRepository) -> None: exp_name = raw_authors[0]["name"] exp_id = raw_authors[0]["id"] if isinstance(author_repo, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): collection = await maybe_async( author_repo.list(**{author_repo.model_type.id.key: exp_id, author_repo.model_type.name.key: exp_name}), # type: ignore[union-attr] ) else: collection = await maybe_async( author_repo.list( and_(author_repo.model_type.id == exp_id, author_repo.model_type.name == exp_name), ), ) assert isinstance(collection, list) assert len(collection) == 1 assert str(collection[0].id) == str(exp_id) assert collection[0].name == exp_name async def test_repo_add_method( raw_authors: RawRecordData, author_repo: AnyAuthorRepository, author_model: AuthorModel, ) -> None: exp_count = len(raw_authors) + 1 new_author = author_model(name="Testing", dob=datetime.datetime.now().date()) obj = await maybe_async(author_repo.add(new_author)) count = await maybe_async(author_repo.count()) assert exp_count == count assert isinstance(obj, author_model) assert new_author.name == obj.name assert obj.id is not None async def test_repo_add_many_method( raw_authors: RawRecordData, author_repo: AnyAuthorRepository, author_model: AuthorModel, ) -> None: exp_count = len(raw_authors) + 2 objs = await maybe_async( author_repo.add_many( [ author_model(name="Testing 2", dob=datetime.datetime.now().date()), author_model(name="Cody", dob=datetime.datetime.now().date()), ], ), ) count = await maybe_async(author_repo.count()) assert exp_count == count assert isinstance(objs, list) assert len(objs) == 2 for obj in objs: assert obj.id is not None assert obj.name in {"Testing 2", "Cody"} async def test_repo_update_many_method(author_repo: AnyAuthorRepository) -> None: if author_repo._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage] pytest.skip("Skipped on emulator") objs = await maybe_async(author_repo.list()) for idx, obj in enumerate(objs): obj.name = f"Update {idx}" objs = await maybe_async(author_repo.update_many(objs)) for obj in objs: assert obj.name.startswith("Update") async def test_repo_exists_method(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: exists = await maybe_async(author_repo.exists(id=first_author_id)) assert exists async def test_repo_exists_method_with_filters( raw_authors: RawRecordData, author_repo: AnyAuthorRepository, first_author_id: Any, ) -> None: if isinstance(author_repo, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): exists = await maybe_async( author_repo.exists( **{author_repo.model_type.name.key: raw_authors[0]["name"]}, id=first_author_id, ), ) else: exists = await maybe_async( author_repo.exists( author_repo.model_type.name == raw_authors[0]["name"], id=first_author_id, ), ) assert exists async def test_repo_update_method(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: obj = await maybe_async(author_repo.get(first_author_id)) obj.name = "Updated Name" updated_obj = await maybe_async(author_repo.update(obj)) assert updated_obj.name == obj.name async def test_repo_delete_method(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: obj = await maybe_async(author_repo.delete(first_author_id)) assert str(obj.id) == str(first_author_id) async def test_repo_delete_many_method(author_repo: AnyAuthorRepository, author_model: AuthorModel) -> None: data_to_insert = [author_model(name="author name %d" % chunk) for chunk in range(2000)] _ = await maybe_async(author_repo.add_many(data_to_insert)) all_objs = await maybe_async(author_repo.list()) ids_to_delete = [existing_obj.id for existing_obj in all_objs] objs = await maybe_async(author_repo.delete_many(ids_to_delete)) await maybe_async(author_repo.session.commit()) assert len(objs) > 0 data, count = await maybe_async(author_repo.list_and_count()) assert data == [] assert count == 0 async def test_repo_get_method(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: obj = await maybe_async(author_repo.get(first_author_id)) assert obj.name == "Agatha Christie" async def test_repo_get_one_or_none_method(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: obj = await maybe_async(author_repo.get_one_or_none(id=first_author_id)) assert obj is not None assert obj.name == "Agatha Christie" none_obj = await maybe_async(author_repo.get_one_or_none(name="I don't exist")) assert none_obj is None async def test_repo_get_one_method(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: obj = await maybe_async(author_repo.get_one(id=first_author_id)) assert obj is not None assert obj.name == "Agatha Christie" with pytest.raises(RepositoryError): _ = await author_repo.get_one(name="I don't exist") async def test_repo_get_or_upsert_method(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: existing_obj, existing_created = await maybe_async(author_repo.get_or_upsert(name="Agatha Christie")) assert str(existing_obj.id) == str(first_author_id) assert existing_created is False new_obj, new_created = await maybe_async(author_repo.get_or_upsert(name="New Author")) assert new_obj.id is not None assert new_obj.name == "New Author" assert new_created async def test_repo_get_or_upsert_match_filter(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: now = datetime.datetime.now() existing_obj, existing_created = await maybe_async( author_repo.get_or_upsert(match_fields="name", name="Agatha Christie", dob=now.date()), ) assert str(existing_obj.id) == str(first_author_id) assert existing_obj.dob == now.date() assert existing_created is False async def test_repo_get_or_upsert_match_filter_no_upsert( author_repo: AnyAuthorRepository, first_author_id: Any, ) -> None: now = datetime.datetime.now() existing_obj, existing_created = await maybe_async( author_repo.get_or_upsert(match_fields="name", upsert=False, name="Agatha Christie", dob=now.date()), ) assert str(existing_obj.id) == str(first_author_id) assert existing_obj.dob != now.date() assert existing_created is False async def test_repo_get_and_update(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: existing_obj, existing_updated = await maybe_async( author_repo.get_and_update(name="Agatha Christie"), ) assert str(existing_obj.id) == str(first_author_id) assert existing_updated is False async def test_repo_get_and_upsert_match_filter(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: now = datetime.datetime.now() with pytest.raises(NotFoundError): _ = await maybe_async( author_repo.get_and_update(match_fields="name", name="Agatha Christie123", dob=now.date()), ) with pytest.raises(NotFoundError): _ = await maybe_async( author_repo.get_and_update(name="Agatha Christie123"), ) async def test_repo_upsert_method( author_repo: AnyAuthorRepository, first_author_id: Any, author_model: AuthorModel, new_pk_id: Any, ) -> None: existing_obj = await maybe_async(author_repo.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." upsert_update_obj = await maybe_async(author_repo.upsert(existing_obj)) assert str(upsert_update_obj.id) == str(first_author_id) assert upsert_update_obj.name == "Agatha C." upsert_insert_obj = await maybe_async(author_repo.upsert(author_model(name="An Author"))) assert upsert_insert_obj.id is not None assert upsert_insert_obj.name == "An Author" # ensures that it still works even if the ID is added before insert upsert2_insert_obj = await maybe_async(author_repo.upsert(author_model(id=new_pk_id, name="Another Author"))) assert upsert2_insert_obj.id is not None assert upsert2_insert_obj.name == "Another Author" _ = await maybe_async(author_repo.get_one(name="Leo Tolstoy")) # ensures that it still works even if the ID isn't set on an existing key new_dob = datetime.datetime.strptime("2028-09-09", "%Y-%m-%d").date() upsert3_update_obj = await maybe_async( author_repo.upsert( author_model(name="Leo Tolstoy", dob=new_dob), match_fields=["name"], ), ) if not isinstance(author_repo, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): assert upsert3_update_obj.id in {UUID("5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2"), 2024} assert upsert3_update_obj.name == "Leo Tolstoy" assert upsert3_update_obj.dob == new_dob async def test_repo_upsert_many_method( author_repo: AnyAuthorRepository, author_model: AuthorModel, ) -> None: if author_repo._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage] pytest.skip( "Skipped on emulator. See the following: https://github.com/GoogleCloudPlatform/cloud-spanner-emulator/issues/73", ) existing_obj = await maybe_async(author_repo.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." upsert_update_objs = await maybe_async( author_repo.upsert_many( [ existing_obj, author_model(name="Inserted Author"), author_model(name="Custom Author"), ], ), ) assert len(upsert_update_objs) == 3 assert upsert_update_objs[0].id is not None assert upsert_update_objs[0].name in ("Agatha C.", "Inserted Author", "Custom Author") assert upsert_update_objs[1].id is not None assert upsert_update_objs[1].name in ("Agatha C.", "Inserted Author", "Custom Author") assert upsert_update_objs[2].id is not None assert upsert_update_objs[2].name in ("Agatha C.", "Inserted Author", "Custom Author") async def test_repo_upsert_many_method_match( author_repo: AnyAuthorRepository, author_model: AuthorModel, ) -> None: if author_repo._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage] pytest.skip( "Skipped on emulator. See the following: https://github.com/GoogleCloudPlatform/cloud-spanner-emulator/issues/73", ) existing_obj = await maybe_async(author_repo.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." upsert_update_objs = await maybe_async( author_repo.upsert_many( data=[ existing_obj, author_model(name="Inserted Author"), author_model(name="Custom Author"), ], match_fields=["id"], ), ) assert len(upsert_update_objs) == 3 async def test_repo_upsert_many_method_match_non_id( author_repo: AnyAuthorRepository, author_model: AuthorModel, ) -> None: if author_repo._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage] pytest.skip( "Skipped on emulator. See the following: https://github.com/GoogleCloudPlatform/cloud-spanner-emulator/issues/73", ) existing_count = await maybe_async(author_repo.count()) existing_obj = await maybe_async(author_repo.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." _ = await maybe_async( author_repo.upsert_many( data=[ existing_obj, author_model(name="Inserted Author"), author_model(name="Custom Author"), ], match_fields=["name"], ), ) existing_count_now = await maybe_async(author_repo.count()) assert existing_count_now > existing_count async def test_repo_upsert_many_method_match_not_on_input( author_repo: AnyAuthorRepository, author_model: AuthorModel, ) -> None: if author_repo._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage] pytest.skip( "Skipped on emulator. See the following: https://github.com/GoogleCloudPlatform/cloud-spanner-emulator/issues/73", ) existing_count = await maybe_async(author_repo.count()) existing_obj = await maybe_async(author_repo.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." _ = await maybe_async( author_repo.upsert_many( data=[ existing_obj, author_model(name="Inserted Author"), author_model(name="Custom Author"), ], match_fields=["id"], ), ) existing_count_now = await maybe_async(author_repo.count()) assert existing_count_now > existing_count async def test_repo_filter_before_after(author_repo: AnyAuthorRepository) -> None: before_filter = BeforeAfter( field_name="created_at", before=datetime.datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(datetime.timezone.utc), after=None, ) existing_obj = await maybe_async(author_repo.list(before_filter)) assert existing_obj[0].name == "Leo Tolstoy" after_filter = BeforeAfter( field_name="created_at", after=datetime.datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone(datetime.timezone.utc), before=None, ) existing_obj = await maybe_async(author_repo.list(after_filter)) assert existing_obj[0].name == "Agatha Christie" async def test_repo_filter_on_before_after(author_repo: AnyAuthorRepository) -> None: before_filter = OnBeforeAfter( field_name="created_at", on_or_before=datetime.datetime.strptime("2023-05-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc ), on_or_after=None, ) existing_obj = await maybe_async( author_repo.list(*[before_filter, OrderBy(field_name="created_at", sort_order="desc")]), # type: ignore ) assert existing_obj[0].name == "Agatha Christie" after_filter = OnBeforeAfter( field_name="created_at", on_or_after=datetime.datetime.strptime("2023-03-01T00:00:00", "%Y-%m-%dT%H:%M:%S").astimezone( datetime.timezone.utc ), on_or_before=None, ) existing_obj = await maybe_async( author_repo.list(*[after_filter, OrderBy(field_name="created_at", sort_order="desc")]), # type: ignore ) assert existing_obj[0].name == "Agatha Christie" async def test_repo_filter_search(author_repo: AnyAuthorRepository) -> None: existing_obj = await maybe_async(author_repo.list(SearchFilter(field_name="name", value="gath", ignore_case=False))) assert existing_obj[0].name == "Agatha Christie" existing_obj = await maybe_async(author_repo.list(SearchFilter(field_name="name", value="GATH", ignore_case=False))) # sqlite & mysql are case insensitive by default with a `LIKE` dialect = author_repo.session.bind.dialect.name if author_repo.session.bind else "default" expected_objs = 1 if dialect in {"sqlite", "mysql", "mssql"} else 0 assert len(existing_obj) == expected_objs existing_obj = await maybe_async(author_repo.list(SearchFilter(field_name="name", value="GATH", ignore_case=True))) assert existing_obj[0].name == "Agatha Christie" async def test_repo_filter_search_multi_field(author_repo: AnyAuthorRepository) -> None: existing_obj = await maybe_async( author_repo.list(SearchFilter(field_name={"name", "string_field"}, value="gath", ignore_case=False)), ) assert existing_obj[0].name == "Agatha Christie" existing_obj = await maybe_async( author_repo.list(SearchFilter(field_name={"name", "string_field"}, value="GATH", ignore_case=False)), ) # sqlite & mysql are case insensitive by default with a `LIKE` dialect = author_repo.session.bind.dialect.name if author_repo.session.bind else "default" expected_objs = 1 if dialect in {"sqlite", "mysql", "mssql"} else 0 assert len(existing_obj) == expected_objs existing_obj = await maybe_async( author_repo.list(SearchFilter(field_name={"name", "string_field"}, value="GATH", ignore_case=True)), ) assert existing_obj[0].name == "Agatha Christie" async def test_repo_filter_not_in_search(author_repo: AnyAuthorRepository) -> None: existing_obj = await maybe_async( author_repo.list(NotInSearchFilter(field_name="name", value="gath", ignore_case=False)), ) assert existing_obj[0].name == "Leo Tolstoy" existing_obj = await maybe_async( author_repo.list(NotInSearchFilter(field_name="name", value="GATH", ignore_case=False)), ) # sqlite & mysql are case insensitive by default with a `LIKE` dialect = author_repo.session.bind.dialect.name if author_repo.session.bind else "default" expected_objs = 1 if dialect in {"sqlite", "mysql", "mssql"} else 2 assert len(existing_obj) == expected_objs existing_obj = await maybe_async( author_repo.list(NotInSearchFilter(field_name={"name", "string_field"}, value="GATH", ignore_case=True)), ) assert existing_obj[0].name == "Leo Tolstoy" async def test_repo_filter_not_in_search_multi_field(author_repo: AnyAuthorRepository) -> None: existing_obj = await maybe_async( author_repo.list(NotInSearchFilter(field_name={"name", "string_field"}, value="gath", ignore_case=False)), ) assert existing_obj[0].name == "Leo Tolstoy" existing_obj = await maybe_async( author_repo.list(NotInSearchFilter(field_name={"name", "string_field"}, value="GATH", ignore_case=False)), ) # sqlite & mysql are case insensitive by default with a `LIKE` dialect = author_repo.session.bind.dialect.name if author_repo.session.bind else "default" expected_objs = 1 if dialect in {"sqlite", "mysql", "mssql"} else 2 assert len(existing_obj) == expected_objs existing_obj = await maybe_async( author_repo.list(NotInSearchFilter(field_name={"name", "string_field"}, value="GATH", ignore_case=True)), ) assert existing_obj[0].name == "Leo Tolstoy" async def test_repo_filter_order_by(author_repo: AnyAuthorRepository) -> None: existing_obj = await maybe_async(author_repo.list(OrderBy(field_name="created_at", sort_order="desc"))) assert existing_obj[0].name == "Agatha Christie" existing_obj = await maybe_async(author_repo.list(OrderBy(field_name="created_at", sort_order="asc"))) assert existing_obj[0].name == "Leo Tolstoy" async def test_repo_filter_collection( author_repo: AnyAuthorRepository, existing_author_ids: Generator[Any, None, None], ) -> None: first_author_id = next(existing_author_ids) second_author_id = next(existing_author_ids) existing_obj = await maybe_async(author_repo.list(CollectionFilter(field_name="id", values=[first_author_id]))) assert existing_obj[0].name == "Agatha Christie" existing_obj = await maybe_async(author_repo.list(CollectionFilter(field_name="id", values=[second_author_id]))) assert existing_obj[0].name == "Leo Tolstoy" async def test_repo_filter_no_obj_collection( author_repo: AnyAuthorRepository, ) -> None: no_obj = await maybe_async(author_repo.list(CollectionFilter[str](field_name="id", values=[]))) assert no_obj == [] async def test_repo_filter_null_collection( author_repo: AnyAuthorRepository, ) -> None: no_obj = await maybe_async(author_repo.list(CollectionFilter[str](field_name="id", values=None))) assert len(no_obj) > 0 async def test_repo_filter_not_in_collection( author_repo: AnyAuthorRepository, existing_author_ids: Generator[Any, None, None], ) -> None: first_author_id = next(existing_author_ids) second_author_id = next(existing_author_ids) existing_obj = await maybe_async(author_repo.list(NotInCollectionFilter(field_name="id", values=[first_author_id]))) assert existing_obj[0].name == "Leo Tolstoy" existing_obj = await maybe_async( author_repo.list(NotInCollectionFilter(field_name="id", values=[second_author_id])), ) assert existing_obj[0].name == "Agatha Christie" async def test_repo_filter_not_in_no_obj_collection( author_repo: AnyAuthorRepository, ) -> None: existing_obj = await maybe_async(author_repo.list(NotInCollectionFilter[str](field_name="id", values=[]))) assert len(existing_obj) > 0 async def test_repo_filter_not_in_null_collection( author_repo: AnyAuthorRepository, ) -> None: existing_obj = await maybe_async(author_repo.list(NotInCollectionFilter[str](field_name="id", values=None))) assert len(existing_obj) > 0 async def test_repo_json_methods( raw_rules_uuid: RawRecordData, rule_repo: RuleRepository, rule_service: RuleService, rule_model: RuleModel, ) -> None: if rule_repo._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage] pytest.skip("Skipped on emulator") exp_count = len(raw_rules_uuid) + 1 new_rule = rule_model(name="Testing", config={"an": "object"}) obj = await maybe_async(rule_repo.add(new_rule)) count = await maybe_async(rule_repo.count()) assert exp_count == count assert isinstance(obj, rule_model) assert new_rule.name == obj.name assert new_rule.config == obj.config # pyright: ignore assert obj.id is not None obj.config = {"the": "update"} updated = await maybe_async(rule_repo.update(obj)) assert obj.config == updated.config # pyright: ignore get_obj, get_created = await maybe_async( rule_repo.get_or_upsert(match_fields=["name"], name="Secondary loading rule.", config={"another": "object"}), ) assert get_created is False assert get_obj.id is not None assert get_obj.config == {"another": "object"} # pyright: ignore new_obj, new_created = await maybe_async( rule_repo.get_or_upsert(match_fields=["name"], name="New rule.", config={"new": "object"}), ) assert new_created is True assert new_obj.id is not None assert new_obj.config == {"new": "object"} # pyright: ignore async def test_repo_fetched_value( model_with_fetched_value_repo: ModelWithFetchedValueRepository, model_with_fetched_value: ModelWithFetchedValue, request: FixtureRequest, ) -> None: if any(fixture in request.fixturenames for fixture in ["mock_async_engine", "mock_sync_engine"]): pytest.skip(f"{SQLAlchemyAsyncMockRepository.__name__} does not works with fetched values") obj = await maybe_async(model_with_fetched_value_repo.add(model_with_fetched_value(val=1))) first_time = obj.updated assert first_time is not None assert obj.val == 1 await maybe_async(model_with_fetched_value_repo.session.commit()) await maybe_async(asyncio.sleep(2)) obj.val = 2 obj = await maybe_async(model_with_fetched_value_repo.update(obj)) assert obj.updated is not None assert obj.val == 2 assert obj.updated != first_time async def test_lazy_load( item_repo: ItemRepository, tag_repo: TagRepository, item_model: ItemModel, tag_model: TagModel, ) -> None: if getattr(tag_repo, "__collection__", None) is not None: pytest.skip("Skipping lazy load testing on Mock repositories.") tag_obj = await maybe_async(tag_repo.add(tag_model(name="A new tag"))) assert tag_obj new_items = await maybe_async( item_repo.add_many([item_model(name="The first item"), item_model(name="The second item")]), ) await maybe_async(item_repo.session.commit()) await maybe_async(tag_repo.session.commit()) assert len(new_items) > 0 first_item_id = new_items[0].id new_items[1].id update_data = { "name": "A modified Name", "tag_names": ["A new tag"], "id": first_item_id, } tags_to_add = await maybe_async(tag_repo.list(CollectionFilter("name", update_data.pop("tag_names", [])))) # type: ignore assert len(tags_to_add) > 0 # pyright: ignore assert tags_to_add[0].id is not None # pyright: ignore update_data["tags"] = tags_to_add # type: ignore[assignment] updated_obj = await maybe_async(item_repo.update(item_model(**update_data), auto_refresh=False)) await maybe_async(item_repo.session.commit()) assert len(updated_obj.tags) > 0 assert updated_obj.tags[0].name == "A new tag" async def test_repo_health_check(author_repo: AnyAuthorRepository) -> None: healthy = await maybe_async(author_repo.check_health(author_repo.session)) assert healthy async def test_repo_custom_statement(author_repo: AnyAuthorRepository, author_service: AuthorService) -> None: """Test Repo with custom statement Args: author_repo: The author mock repository """ service_type = type(author_service) new_service = service_type(session=author_repo.session, statement=select(author_repo.model_type)) assert await maybe_async(new_service.count()) == 2 async def test_repo_error_messages(author_repo: AnyAuthorRepository, first_author_id: Any) -> None: if isinstance(author_repo, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): pytest.skip("Skipping mock repo does not works with fetched values") obj = await maybe_async(author_repo.get_one(id=first_author_id)) assert obj is not None assert obj.name == "Agatha Christie" with pytest.raises(IntegrityError): _ = await author_repo.add(author_repo.model_type(id=first_author_id, name="Agatha Christie")) async def test_repo_encrypted_methods( raw_secrets_uuid: RawRecordData, secret_repo: SecretRepository, raw_secrets: RawRecordData, first_secret_id: Any, secret_model: SecretModel, ) -> None: existing_obj = await maybe_async(secret_repo.get(first_secret_id)) assert existing_obj.secret == raw_secrets[0]["secret"] assert existing_obj.long_secret == raw_secrets[0]["long_secret"] exp_count = len(raw_secrets_uuid) + 1 new_secret = secret_model(secret="hidden data", long_secret="another longer secret") obj = await maybe_async(secret_repo.add(new_secret)) count = await maybe_async(secret_repo.count()) assert exp_count == count assert isinstance(obj, secret_model) assert new_secret.secret == obj.secret assert new_secret.long_secret == obj.long_secret assert obj.id is not None obj.secret = "new secret value" obj.long_secret = "new long secret value" updated = await maybe_async(secret_repo.update(obj)) assert obj.secret == updated.secret assert obj.long_secret == updated.long_secret async def test_encrypted_string_length_validation( request: FixtureRequest, secret_repo: SecretRepository, secret_model: SecretModel ) -> None: """Test that EncryptedString enforces length validation. Args: secret_repo: The secret repository secret_model: The secret model class """ if any(fixture in request.fixturenames for fixture in ["mock_async_engine", "mock_sync_engine"]): pytest.skip( f"{SQLAlchemyAsyncMockRepository.__name__} does not works with client side validated encrypted strings lengths" ) # Test valid length valid_secret = "AAAAAAAAA" secret = secret_model(secret="test", long_secret="test", length_validated_secret=valid_secret) saved_secret = await maybe_async(secret_repo.add(secret)) assert saved_secret.length_validated_secret == valid_secret # Test exceeding length long_secret = "A" * 51 # Exceeds 50 character limit with pytest.raises(IntegrityError) as exc_info: secret = secret_model(secret="test", long_secret="test", length_validated_secret=long_secret) await maybe_async(secret_repo.add(secret)) assert exc_info.value.__class__.__name__ == "IntegrityError" assert "exceeds maximum unencrypted length" in str(exc_info.value.detail) # service tests async def test_service_filter_search(author_service: AuthorService) -> None: existing_obj = await maybe_async( author_service.list(SearchFilter(field_name="name", value="gath", ignore_case=False)), ) assert existing_obj[0].name == "Agatha Christie" existing_obj = await maybe_async( author_service.list(SearchFilter(field_name="name", value="GATH", ignore_case=False)), ) # sqlite & mysql are case insensitive by default with a `LIKE` dialect = ( author_service.repository.session.bind.dialect.name if author_service.repository.session.bind else "default" ) expected_objs = 1 if dialect in {"sqlite", "mysql", "mssql"} else 0 assert len(existing_obj) == expected_objs existing_obj = await maybe_async( author_service.list(SearchFilter(field_name="name", value="GATH", ignore_case=True)), ) assert existing_obj[0].name == "Agatha Christie" async def test_service_count_method(author_service: AuthorService) -> None: """Test SQLAlchemy count. Args: author_service: The author mock repository """ assert await maybe_async(author_service.count()) == 2 async def test_service_count_method_with_filters(raw_authors: RawRecordData, author_service: AuthorService) -> None: """Test SQLAlchemy count with filters. Args: author_service: The author mock repository """ if issubclass(author_service.repository_type, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): # type: ignore[unreachable,unused-ignore] assert ( await maybe_async( author_service.count( **{author_service.repository.model_type.name.key: raw_authors[0]["name"]}, ), ) == 1 ) else: assert ( await maybe_async( author_service.count( author_service.repository.model_type.name == raw_authors[0]["name"], ), ) == 1 ) async def test_service_list_and_count_method(raw_authors: RawRecordData, author_service: AuthorService) -> None: """Test SQLAlchemy list with count in asyncpg. Args: raw_authors: list of authors pre-seeded into the mock repository author_service: The author mock repository """ exp_count = len(raw_authors) collection, count = await maybe_async(author_service.list_and_count()) assert exp_count == count assert isinstance(collection, list) assert len(collection) == exp_count async def test_service_list_and_count_method_with_filters( raw_authors: RawRecordData, author_service: AuthorService, ) -> None: """Test SQLAlchemy list with count and filters in asyncpg. Args: raw_authors: list of authors pre-seeded into the mock repository author_service: The author mock repository """ exp_name = raw_authors[0]["name"] exp_id = raw_authors[0]["id"] if isinstance(author_service.repository, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): collection, count = await maybe_async( # pyright: ignore author_service.list_and_count(**{author_service.repository.model_type.name.key: exp_name}), # pyright: ignore ) else: collection, count = await maybe_async( author_service.list_and_count(author_service.repository.model_type.name == exp_name), ) assert count == 1 assert isinstance(collection, list) assert len(collection) == 1 assert str(collection[0].id) == str(exp_id) assert collection[0].name == exp_name async def test_service_list_and_count_basic_method(raw_authors: RawRecordData, author_service: AuthorService) -> None: """Test SQLAlchemy basic list with count in asyncpg. Args: raw_authors: list of authors pre-seeded into the mock repository author_service: The author mock repository """ exp_count = len(raw_authors) collection, count = await maybe_async(author_service.list_and_count(count_with_window_function=False)) assert exp_count == count assert isinstance(collection, list) assert len(collection) == exp_count async def test_service_list_and_count_method_empty(book_service: BookService) -> None: collection, count = await maybe_async(book_service.list_and_count()) assert count == 0 assert isinstance(collection, list) assert len(collection) == 0 async def test_service_list_method( raw_authors_uuid: RawRecordData, author_service: AuthorService, ) -> None: exp_count = len(raw_authors_uuid) collection = await maybe_async(author_service.list()) assert isinstance(collection, list) assert len(collection) == exp_count async def test_service_list_method_with_filters(raw_authors: RawRecordData, author_service: AuthorService) -> None: exp_name = raw_authors[0]["name"] exp_id = raw_authors[0]["id"] if issubclass(author_service.repository_type, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): collection = await maybe_async( # pyright: ignore author_service.list( **{ author_service.repository.model_type.id.key: exp_id, # type: ignore[union-attr] author_service.repository.model_type.name.key: exp_name, }, ), ) else: collection = await maybe_async( author_service.list( and_( author_service.repository.model_type.id == exp_id, author_service.repository.model_type.name == exp_name, ), ), ) assert isinstance(collection, list) assert len(collection) == 1 assert str(collection[0].id) == str(exp_id) assert collection[0].name == exp_name async def test_service_create_method( raw_authors: RawRecordData, author_service: AuthorService, author_model: AuthorModel, ) -> None: exp_count = len(raw_authors) + 1 new_author = author_model(name="Testing", dob=datetime.datetime.now().date()) obj = await maybe_async(author_service.create(new_author)) count = await maybe_async(author_service.count()) assert exp_count == count assert isinstance(obj, author_model) assert new_author.name == obj.name assert obj.id is not None async def test_service_create_many_method( raw_authors: RawRecordData, author_service: AuthorService, author_model: AuthorModel, ) -> None: exp_count = len(raw_authors) + 2 objs = await maybe_async( author_service.create_many( [ author_model(name="Testing 2", dob=datetime.datetime.now().date()), author_model(name="Cody", dob=datetime.datetime.now().date()), ], ), ) count = await maybe_async(author_service.count()) assert exp_count == count assert isinstance(objs, list) assert len(objs) == 2 for obj in objs: assert obj.id is not None assert obj.name in {"Testing 2", "Cody"} async def test_service_update_many_method(author_service: AuthorService) -> None: if author_service.repository._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage,reportUnknownMemberType,reportAttributeAccessIssue] pytest.skip("Skipped on emulator") objs = await maybe_async(author_service.list()) for idx, obj in enumerate(objs): obj.name = f"Update {idx}" objs = await maybe_async(author_service.update_many(list(objs))) for obj in objs: assert obj.name.startswith("Update") async def test_service_exists_method(author_service: AuthorService, first_author_id: Any) -> None: exists = await maybe_async(author_service.exists(id=first_author_id)) assert exists async def test_service_update_method_item_id(author_service: AuthorService, first_author_id: Any) -> None: obj = await maybe_async(author_service.get(first_author_id)) obj.name = "Updated Name2" updated_obj = await maybe_async(author_service.update(item_id=first_author_id, data=obj)) assert updated_obj.name == obj.name async def test_service_update_method_no_item_id(author_service: AuthorService, first_author_id: Any) -> None: obj = await maybe_async(author_service.get(first_author_id)) obj.name = "Updated Name2" updated_obj = await maybe_async(author_service.update(data=obj)) assert str(updated_obj.id) == str(first_author_id) assert updated_obj.name == obj.name async def test_service_update_method_data_is_dict(author_service: AuthorService, first_author_id: Any) -> None: new_date = datetime.datetime.date(datetime.datetime.now()) updated_obj = await maybe_async( author_service.update(item_id=first_author_id, data={"dob": new_date}), ) assert updated_obj.dob == new_date # ensure the other fields are not affected assert updated_obj.name == "Agatha Christie" async def test_service_update_method_data_is_dict_with_none_value( author_service: AuthorService, first_author_id: Any, ) -> None: updated_obj = await maybe_async(author_service.update(item_id=first_author_id, data={"dob": None})) assert cast(Union[datetime.date, None], updated_obj.dob) is None # ensure the other fields are not affected assert updated_obj.name == "Agatha Christie" async def test_service_update_method_instrumented_attribute( author_service: AuthorService, first_author_id: Any, ) -> None: obj = await maybe_async(author_service.get(first_author_id)) id_attribute = get_instrumented_attr(author_service.repository.model_type, "id") obj.name = "Updated Name2" updated_obj = await maybe_async(author_service.update(data=obj, id_attribute=id_attribute)) assert str(updated_obj.id) == str(first_author_id) assert updated_obj.name == obj.name async def test_service_delete_method(author_service: AuthorService, first_author_id: Any) -> None: obj = await maybe_async(author_service.delete(first_author_id)) assert str(obj.id) == str(first_author_id) async def test_service_delete_many_method(author_service: AuthorService, author_model: AuthorModel) -> None: data_to_insert = [author_model(name="author name %d" % chunk) for chunk in range(2000)] _ = await maybe_async(author_service.create_many(data_to_insert)) all_objs = await maybe_async(author_service.list()) ids_to_delete = [existing_obj.id for existing_obj in all_objs] objs = await maybe_async(author_service.delete_many(ids_to_delete)) await maybe_async(author_service.repository.session.commit()) # pyright: ignore[reportUnknownArgumentType,reportUnknownMemberType,reportAttributeAccessIssue] assert len(objs) > 0 data, count = await maybe_async(author_service.list_and_count()) assert data == [] assert count == 0 async def test_service_delete_where_method_empty(author_service: AuthorService, author_model: AuthorModel) -> None: data_to_insert = [author_model(name="author name %d" % chunk) for chunk in range(2000)] _ = await maybe_async(author_service.create_many(data_to_insert)) total_count = await maybe_async(author_service.count()) all_objs = await maybe_async(author_service.delete_where()) assert len(all_objs) == total_count data, count = await maybe_async(author_service.list_and_count()) assert data == [] assert count == 0 async def test_service_delete_where_method_filter(author_service: AuthorService, author_model: AuthorModel) -> None: data_to_insert = [author_model(name="delete me") for _ in range(2000)] _ = await maybe_async(author_service.create_many(data_to_insert)) all_objs = await maybe_async(author_service.delete_where(name="delete me")) assert len(all_objs) == len(data_to_insert) count = await maybe_async(author_service.count()) assert count == 2 async def test_service_delete_where_method_search_filter( author_service: AuthorService, author_model: AuthorModel, ) -> None: data_to_insert = [author_model(name="delete me") for _ in range(2000)] _ = await maybe_async(author_service.create_many(data_to_insert)) all_objs = await maybe_async(author_service.delete_where(NotInSearchFilter(field_name="name", value="delete me"))) assert len(all_objs) == 2 count = await maybe_async(author_service.count()) assert count == len(data_to_insert) async def test_service_get_method(author_service: AuthorService, first_author_id: Any) -> None: obj = await maybe_async(author_service.get(first_author_id)) assert obj.name == "Agatha Christie" async def test_service_get_one_or_none_method(author_service: AuthorService, first_author_id: Any) -> None: obj = await maybe_async(author_service.get_one_or_none(id=first_author_id)) assert obj is not None assert obj.name == "Agatha Christie" none_obj = await maybe_async(author_service.get_one_or_none(name="I don't exist")) assert none_obj is None async def test_service_get_one_method(author_service: AuthorService, first_author_id: Any) -> None: obj = await maybe_async(author_service.get_one(id=first_author_id)) assert obj is not None assert obj.name == "Agatha Christie" with pytest.raises(RepositoryError): _ = await author_service.get_one(name="I don't exist") async def test_service_get_or_upsert_method(author_service: AuthorService, first_author_id: Any) -> None: existing_obj, existing_created = await maybe_async(author_service.get_or_upsert(name="Agatha Christie")) assert str(existing_obj.id) == str(first_author_id) assert existing_created is False new_obj, new_created = await maybe_async(author_service.get_or_upsert(name="New Author")) assert new_obj.id is not None assert new_obj.name == "New Author" assert new_created async def test_service_get_and_update_method(author_service: AuthorService, first_author_id: Any) -> None: existing_obj, existing_created = await maybe_async( author_service.get_and_update(name="Agatha Christie", match_fields="name"), ) assert str(existing_obj.id) == str(first_author_id) assert existing_created is False with pytest.raises(NotFoundError): _ = await maybe_async(author_service.get_and_update(name="New Author")) async def test_service_upsert_method( author_service: AuthorService, first_author_id: Any, author_model: AuthorModel, new_pk_id: Any, ) -> None: existing_obj = await maybe_async(author_service.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." upsert_update_obj = await maybe_async(author_service.upsert(item_id=first_author_id, data=existing_obj)) assert str(upsert_update_obj.id) == str(first_author_id) assert upsert_update_obj.name == "Agatha C." upsert_insert_obj = await maybe_async(author_service.upsert(data=author_model(name="An Author"))) assert upsert_insert_obj.id is not None assert upsert_insert_obj.name == "An Author" # ensures that it still works even if the ID is added before insert upsert2_insert_obj = await maybe_async( author_service.upsert(author_model(id=new_pk_id, name="Another Author")), ) assert upsert2_insert_obj.id is not None assert upsert2_insert_obj.name == "Another Author" async def test_service_upsert_method_match( author_service: AuthorService, first_author_id: Any, author_model: AuthorModel, new_pk_id: Any, ) -> None: if author_service.repository._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage,reportUnknownMemberType,reportAttributeAccessIssue] pytest.skip( "Skipped on emulator. See the following: https://github.com/GoogleCloudPlatform/cloud-spanner-emulator/issues/73", ) existing_obj = await maybe_async(author_service.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." upsert_update_obj = await maybe_async( author_service.upsert(data=existing_obj.to_dict(exclude={"id"}), match_fields=["name"]), ) if not isinstance(author_service.repository, (SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository)): assert str(upsert_update_obj.id) == str(first_author_id) assert upsert_update_obj.name == "Agatha C." upsert_insert_obj = await maybe_async( author_service.upsert(data=author_model(name="An Author"), match_fields=["name"]), ) assert upsert_insert_obj.id is not None assert upsert_insert_obj.name == "An Author" # ensures that it still works even if the ID is added before insert upsert2_insert_obj = await maybe_async( author_service.upsert(author_model(id=new_pk_id, name="Another Author"), match_fields=["name"]), ) assert upsert2_insert_obj.id is not None assert upsert2_insert_obj.name == "Another Author" async def test_service_upsert_many_method( author_service: AuthorService, author_model: AuthorModel, ) -> None: if author_service.repository._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage,reportUnknownMemberType,reportAttributeAccessIssue] pytest.skip( "Skipped on emulator. See the following: https://github.com/GoogleCloudPlatform/cloud-spanner-emulator/issues/73", ) existing_obj = await maybe_async(author_service.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." upsert_update_objs = await maybe_async( author_service.upsert_many( [ existing_obj, author_model(name="Inserted Author"), author_model(name="Custom Author"), ], ), ) assert len(upsert_update_objs) == 3 assert upsert_update_objs[0].id is not None assert upsert_update_objs[0].name in ("Agatha C.", "Inserted Author", "Custom Author") assert upsert_update_objs[1].id is not None assert upsert_update_objs[1].name in ("Agatha C.", "Inserted Author", "Custom Author") assert upsert_update_objs[2].id is not None assert upsert_update_objs[2].name in ("Agatha C.", "Inserted Author", "Custom Author") async def test_service_upsert_many_method_match_fields_id( author_service: AuthorService, author_model: AuthorModel, ) -> None: if author_service.repository._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage,reportUnknownMemberType,reportAttributeAccessIssue] pytest.skip( "Skipped on emulator. See the following: https://github.com/GoogleCloudPlatform/cloud-spanner-emulator/issues/73", ) existing_obj = await maybe_async(author_service.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." upsert_update_objs = await maybe_async( author_service.upsert_many( [ existing_obj, author_model(name="Inserted Author"), author_model(name="Custom Author"), ], match_fields=["id"], ), ) assert len(upsert_update_objs) == 3 assert upsert_update_objs[0].id is not None assert upsert_update_objs[0].name in ("Agatha C.", "Inserted Author", "Custom Author") assert upsert_update_objs[1].id is not None assert upsert_update_objs[1].name in ("Agatha C.", "Inserted Author", "Custom Author") assert upsert_update_objs[2].id is not None assert upsert_update_objs[2].name in ("Agatha C.", "Inserted Author", "Custom Author") async def test_service_upsert_many_method_match_fields_non_id( author_service: AuthorService, author_model: AuthorModel, ) -> None: if author_service.repository._dialect.name.startswith("spanner") and os.environ.get("SPANNER_EMULATOR_HOST"): # pyright: ignore[reportPrivateUsage,reportUnknownMemberType,reportAttributeAccessIssue] pytest.skip( "Skipped on emulator. See the following: https://github.com/GoogleCloudPlatform/cloud-spanner-emulator/issues/73", ) existing_count = await maybe_async(author_service.count()) existing_obj = await maybe_async(author_service.get_one(name="Agatha Christie")) existing_obj.name = "Agatha C." _ = await maybe_async( author_service.upsert_many( data=[ existing_obj, author_model(name="Inserted Author"), author_model(name="Custom Author"), ], match_fields=["name"], ), ) existing_count_now = await maybe_async(author_service.count()) assert existing_count_now > existing_count async def test_service_update_no_pk(author_service: AuthorService) -> None: with pytest.raises(RepositoryError): _existing_obj = await maybe_async(author_service.update(data={"name": "Agatha Christie"})) async def test_service_create_method_slug( raw_slug_books: RawRecordData, slug_book_service: SlugBookService, slug_book_model: SlugBookModel, ) -> None: new_book = {"title": "a new book!!", "author_id": uuid4().hex} obj = await maybe_async(slug_book_service.create(new_book)) assert isinstance(obj, slug_book_model) assert new_book["title"] == obj.title assert obj.slug == "a-new-book" assert obj.id is not None async def test_service_create_method_slug_existing( raw_slug_books: RawRecordData, slug_book_service: SlugBookService, slug_book_model: SlugBookModel, ) -> None: if isinstance( slug_book_service.repository_type, ( SQLAlchemySyncMockSlugRepository, SQLAlchemyAsyncMockSlugRepository, SQLAlchemyAsyncMockRepository, SQLAlchemySyncMockRepository, ), ): pytest.skip("Skipping additional bigint mock repository tests") current_count = await maybe_async(slug_book_service.count()) if current_count == 0: _ = await maybe_async(slug_book_service.create_many(raw_slug_books)) new_book = {"title": "Murder on the Orient Express", "author_id": uuid4().hex} obj = await maybe_async(slug_book_service.create(new_book)) assert isinstance(obj, slug_book_model) assert new_book["title"] == obj.title assert obj.slug != "murder-on-the-orient-express" assert obj.id is not None async def test_service_create_many_method_slug( raw_slug_books: RawRecordData, slug_book_service: SlugBookService, slug_book_model: SlugBookModel, ) -> None: objs = await maybe_async( slug_book_service.create_many( [ {"title": " extra!! ", "author_id": uuid4().hex}, {"title": "punctuated Book!!", "author_id": uuid4().hex}, ], ), ) assert isinstance(objs, list) for obj in objs: assert obj.id is not None assert obj.slug in {"extra", "punctuated-book"} assert obj.title in {" extra!! ", "punctuated Book!!"} class AuthorStruct(Struct): name: str class AuthorBaseModel(BaseModel): model_config = {"from_attributes": True} name: str async def test_service_paginated_to_schema(raw_authors: RawRecordData, author_service: AuthorService) -> None: """Test SQLAlchemy list with count in asyncpg. Args: raw_authors: list of authors pre-seeded into the mock repository author_service: The author mock repository """ exp_count = len(raw_authors) collection, count = await maybe_async(author_service.list_and_count()) model_dto = author_service.to_schema(data=collection, total=count) pydantic_dto = author_service.to_schema(data=collection, total=count, schema_type=AuthorBaseModel) msgspec_dto = author_service.to_schema(data=collection, total=count, schema_type=AuthorStruct) assert exp_count == count assert isinstance(model_dto, OffsetPagination) assert isinstance(model_dto.items[0].name, str) assert model_dto.total == exp_count assert isinstance(pydantic_dto, OffsetPagination) assert isinstance(pydantic_dto.items[0].name, str) # pyright: ignore assert pydantic_dto.total == exp_count assert isinstance(msgspec_dto, OffsetPagination) assert isinstance(msgspec_dto.items[0].name, str) # pyright: ignore assert msgspec_dto.total == exp_count async def test_service_to_schema( author_service: AuthorService, first_author_id: Any, ) -> None: """Test SQLAlchemy list with count in asyncpg. Args: raw_authors: list of authors pre-seeded into the mock repository author_service: The author mock repository """ obj = await maybe_async(author_service.get(first_author_id)) model_dto = author_service.to_schema(data=obj) pydantic_dto = author_service.to_schema(data=obj, schema_type=AuthorBaseModel) msgspec_dto = author_service.to_schema(data=obj, schema_type=AuthorStruct) assert issubclass(AuthorStruct, Struct) assert issubclass(AuthorBaseModel, BaseModel) assert isinstance(model_dto.name, str) assert isinstance(pydantic_dto, BaseModel) assert isinstance(msgspec_dto, Struct) assert isinstance(pydantic_dto.name, str) # pyright: ignore assert isinstance(msgspec_dto.name, str) # pyright: ignore python-advanced-alchemy-1.0.1/tests/integration/test_sqlquery_service.py000066400000000000000000000235361476663714600267550ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path import pytest from msgspec import Struct from pydantic import BaseModel from sqlalchemy import create_engine, select from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from sqlalchemy.orm import DeclarativeBase, Mapped, Session from advanced_alchemy import base, mixins from advanced_alchemy.repository import ( SQLAlchemyAsyncRepository, SQLAlchemySyncRepository, ) from advanced_alchemy.service import SQLAlchemyAsyncQueryService, SQLAlchemySyncQueryService from advanced_alchemy.service._async import SQLAlchemyAsyncRepositoryService from advanced_alchemy.service._sync import SQLAlchemySyncRepositoryService from advanced_alchemy.service.typing import ( is_msgspec_struct, is_msgspec_struct_with_field, is_msgspec_struct_without_field, is_pydantic_model, is_pydantic_model_with_field, is_pydantic_model_without_field, ) from advanced_alchemy.utils.fixtures import open_fixture, open_fixture_async pytestmark = [ # type: ignore pytest.mark.integration, ] here = Path(__file__).parent fixture_path = here.parent.parent / "examples" state_registry = base.create_registry() class UUIDBase(mixins.UUIDPrimaryKey, base.CommonTableAttributes, DeclarativeBase): """Base for all SQLAlchemy declarative models with UUID primary keys.""" registry = state_registry class USState(UUIDBase): __tablename__ = "us_state_lookup" # type: ignore[assignment] abbreviation: Mapped[str] name: Mapped[str] class USStateStruct(Struct): abbreviation: str name: str class USStateBaseModel(BaseModel): abbreviation: str name: str class USStateSyncRepository(SQLAlchemySyncRepository[USState]): """US State repository.""" model_type = USState class USStateSyncService(SQLAlchemySyncRepositoryService[USState, USStateSyncRepository]): """US State repository.""" repository_type = USStateSyncRepository class USStateAsyncRepository(SQLAlchemyAsyncRepository[USState]): """US State repository.""" model_type = USState class USStateAsyncService(SQLAlchemyAsyncRepositoryService[USState, USStateAsyncRepository]): """US State repository.""" repository_type = USStateAsyncRepository class StateQuery(base.SQLQuery): """Nonsensical query to test custom SQL queries.""" __table__ = select( # type: ignore USState.abbreviation.label("state_abbreviation"), USState.name.label("state_name"), ).alias("state_lookup") __mapper_args__ = { "primary_key": [USState.abbreviation], } state_abbreviation: str state_name: str class StateQueryStruct(Struct): state_abbreviation: str state_name: str class StateQueryBaseModel(BaseModel): state_abbreviation: str state_name: str def test_sync_fixture_and_query() -> None: engine = create_engine("sqlite://") state_registry.metadata.create_all(engine) with Session(engine) as session: state_service = USStateSyncService(session=session) query_service = SQLAlchemySyncQueryService(session=session) fixture = open_fixture(fixture_path, USStateSyncRepository.model_type.__tablename__) # type: ignore[has-type] _add_objs = state_service.create_many( data=[USStateStruct(**raw_obj) for raw_obj in fixture], ) _ordered_objs = state_service.list(order_by=(USState.name, True)) assert _ordered_objs[0].name == "Wyoming" _ordered_objs_2 = state_service.list_and_count(order_by=[(USState.name, True)]) assert _ordered_objs_2[0][0].name == "Wyoming" query_count = query_service.repository.count(statement=select(StateQuery)) assert query_count > 0 list_query_objs, list_query_count = query_service.repository.list_and_count( statement=select(StateQuery), ) assert list_query_count >= 50 _paginated_objs = query_service.to_schema( data=list_query_objs, total=list_query_count, ) _pydantic_paginated_objs = query_service.to_schema( data=list_query_objs, total=list_query_count, schema_type=StateQueryBaseModel, ) assert isinstance(_pydantic_paginated_objs.items[0], StateQueryBaseModel) _msgspec_paginated_objs = query_service.to_schema( data=list_query_objs, total=list_query_count, schema_type=StateQueryStruct, ) assert isinstance(_msgspec_paginated_objs.items[0], StateQueryStruct) _list_service_objs = query_service.repository.list(statement=select(StateQuery)) assert len(_list_service_objs) >= 50 _get_ones = query_service.repository.list(statement=select(StateQuery), state_name="Alabama") assert len(_get_ones) == 1 _get_one = query_service.repository.get_one(statement=select(StateQuery), state_name="Alabama") assert _get_one.state_name == "Alabama" _get_one_or_none_1 = query_service.repository.get_one_or_none( statement=select(StateQuery).where(StateQuery.state_name == "Texas"), # type: ignore ) assert _get_one_or_none_1 is not None assert _get_one_or_none_1.state_name == "Texas" _obj = query_service.to_schema( data=_get_one_or_none_1, ) _pydantic_obj = query_service.to_schema( data=_get_one_or_none_1, schema_type=StateQueryBaseModel, ) assert isinstance(_pydantic_obj, StateQueryBaseModel) assert is_pydantic_model(_pydantic_obj) assert is_pydantic_model_with_field(_pydantic_obj, "state_abbreviation") assert not is_pydantic_model_without_field(_pydantic_obj, "state_abbreviation") _msgspec_obj = query_service.to_schema( data=_get_one_or_none_1, schema_type=StateQueryStruct, ) assert isinstance(_msgspec_obj, StateQueryStruct) assert is_msgspec_struct(_msgspec_obj) assert is_msgspec_struct_with_field(_msgspec_obj, "state_abbreviation") assert not is_msgspec_struct_without_field(_msgspec_obj, "state_abbreviation") _get_one_or_none = query_service.repository.get_one_or_none( statement=select(StateQuery).filter_by(state_name="Nope"), ) assert _get_one_or_none is None async def test_async_fixture_and_query() -> None: engine = create_async_engine("sqlite+aiosqlite://") async with engine.begin() as conn: await conn.run_sync(state_registry.metadata.create_all) async with AsyncSession(engine) as session: state_service = USStateAsyncService(session=session) query_service = SQLAlchemyAsyncQueryService(session=session) fixture = await open_fixture_async(fixture_path, USStateSyncRepository.model_type.__tablename__) _add_objs = await state_service.create_many( data=[USStateBaseModel(**raw_obj) for raw_obj in fixture], ) _ordered_objs = await state_service.list(order_by=(USState.name, True)) assert _ordered_objs[0].name == "Wyoming" _ordered_objs_2 = await state_service.list_and_count(order_by=(USState.name, True)) assert _ordered_objs_2[0][0].name == "Wyoming" query_count = await query_service.repository.count(statement=select(StateQuery)) assert query_count > 0 list_query_objs, list_query_count = await query_service.repository.list_and_count( statement=select(StateQuery), ) assert list_query_count >= 50 _paginated_objs = query_service.to_schema( list_query_objs, total=list_query_count, ) _pydantic_paginated_objs = query_service.to_schema( data=list_query_objs, total=list_query_count, schema_type=StateQueryBaseModel, ) assert isinstance(_pydantic_paginated_objs.items[0], StateQueryBaseModel) _msgspec_paginated_objs = query_service.to_schema( data=list_query_objs, total=list_query_count, schema_type=StateQueryStruct, ) assert isinstance(_msgspec_paginated_objs.items[0], StateQueryStruct) _list_service_objs = await query_service.repository.list(statement=select(StateQuery)) assert len(_list_service_objs) >= 50 _get_ones = await query_service.repository.list(statement=select(StateQuery), state_name="Alabama") assert len(_get_ones) == 1 _get_one = await query_service.repository.get_one(statement=select(StateQuery), state_name="Alabama") assert _get_one.state_name == "Alabama" _get_one_or_none_1 = await query_service.repository.get_one_or_none( statement=select(StateQuery).where(StateQuery.state_name == "Texas"), # type: ignore ) assert _get_one_or_none_1 is not None assert _get_one_or_none_1.state_name == "Texas" _obj = query_service.to_schema( data=_get_one_or_none_1, ) _pydantic_obj = query_service.to_schema( data=_get_one_or_none_1, schema_type=StateQueryBaseModel, ) assert isinstance(_pydantic_obj, StateQueryBaseModel) assert is_pydantic_model(_pydantic_obj) assert is_pydantic_model_with_field(_pydantic_obj, "state_abbreviation") assert not is_pydantic_model_without_field(_pydantic_obj, "state_abbreviation") _msgspec_obj = query_service.to_schema( data=_get_one_or_none_1, schema_type=StateQueryStruct, ) assert isinstance(_msgspec_obj, StateQueryStruct) assert is_msgspec_struct(_msgspec_obj) assert is_msgspec_struct_with_field(_msgspec_obj, "state_abbreviation") _get_one_or_none = await query_service.repository.get_one_or_none( select(StateQuery).filter_by(state_name="Nope") ) assert not is_msgspec_struct_without_field(_msgspec_obj, "state_abbreviation") assert _get_one_or_none is None python-advanced-alchemy-1.0.1/tests/integration/test_unique_mixin.py000066400000000000000000000107451476663714600260600ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Hashable from typing import TYPE_CHECKING import pytest from sqlalchemy import ColumnElement, String, UniqueConstraint, create_engine, func, select from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from sqlalchemy.orm import Mapped, Session, mapped_column from advanced_alchemy.base import BigIntBase, create_registry, orm_registry from advanced_alchemy.exceptions import MultipleResultsFoundError from advanced_alchemy.mixins import UniqueMixin if TYPE_CHECKING: from collections.abc import Iterator from typing import Any @pytest.fixture(name="rows") def generate_mock_data() -> Iterator[list[dict[str, Any]]]: rows = [{"col_1": i, "col_2": f"value_{i}", "col_3": i} for i in range(1, 3)] # Duplicate the last row in the list to violate the unique constraint rows.extend([rows[-1]] * 3) # 3 is arbitrary yield rows custom_registry = create_registry() class BigIntModelWithUniqueValue(UniqueMixin, BigIntBase): registry = custom_registry col_1: Mapped[int] col_2: Mapped[str] = mapped_column(String(50)) col_3: Mapped[int] __table_args__ = (UniqueConstraint("col_1", "col_3"),) @classmethod def unique_hash(cls, col_1: int, col_2: int, col_3: str) -> Hashable: return (col_1, col_3) @classmethod def unique_filter(cls, col_1: int, col_2: int, col_3: str) -> ColumnElement[bool]: return (cls.col_1 == col_1) & (cls.col_3 == col_3) class BigIntModelWithMaybeUniqueValue(UniqueMixin, BigIntBase): registry = custom_registry col_1: Mapped[int] col_2: Mapped[str] = mapped_column(String(50)) col_3: Mapped[int] @classmethod def unique_hash(cls, col_1: int, col_2: int, col_3: str) -> Hashable: return (col_1, col_3) @classmethod def unique_filter(cls, col_1: int, col_2: int, col_3: str) -> ColumnElement[bool]: return (cls.col_1 == col_1) & (cls.col_3 == col_3) def test_as_unique_sync(rows: list[dict[str, Any]]) -> None: engine = create_engine("sqlite://") orm_registry.metadata.create_all(engine) with Session(engine) as session: session.add_all(BigIntModelWithUniqueValue(**row) for row in rows) with pytest.raises(IntegrityError): # An exception should be raised when not using ``as_unique_sync`` session.flush() with Session(engine) as session: session.add_all(BigIntModelWithUniqueValue.as_unique_sync(session, **row) for row in rows) statement = select(func.count()).select_from(BigIntModelWithUniqueValue) count = session.scalar(statement) assert count == 2 with Session(engine) as session: # Add non unique rows on purpose to check if the mixin triggers ``MultipleResultsFound`` session.add_all(BigIntModelWithMaybeUniqueValue(**row) for row in rows) # flush here so that when the mixin queries the db, the non unique rows are in the transaction session.flush() with pytest.raises(MultipleResultsFoundError): session.add_all(BigIntModelWithMaybeUniqueValue.as_unique_sync(session, **row) for row in rows) async def test_as_unique_async(rows: list[dict[str, Any]]) -> None: engine = create_async_engine("sqlite+aiosqlite://") async with engine.begin() as conn: await conn.run_sync(orm_registry.metadata.create_all) async with AsyncSession(engine) as session: session.add_all(BigIntModelWithUniqueValue(**row) for row in rows) with pytest.raises(IntegrityError): # An exception should be raised when not using ``as_unique_async`` await session.flush() async with AsyncSession(engine) as session: session.add_all([await BigIntModelWithUniqueValue.as_unique_async(session, **row) for row in rows]) statement = select(func.count()).select_from(BigIntModelWithUniqueValue) count = await session.scalar(statement) assert count == 2 async with AsyncSession(engine) as session: # Add non unique rows on purpose to check if the mixin triggers ``MultipleResultsFound`` session.add_all(BigIntModelWithMaybeUniqueValue(**row) for row in rows) # flush here so that when the mixin queries the db, the non unique rows are in the transaction await session.flush() with pytest.raises(MultipleResultsFoundError): session.add_all([await BigIntModelWithMaybeUniqueValue.as_unique_async(session, **row) for row in rows]) python-advanced-alchemy-1.0.1/tests/unit/000077500000000000000000000000001476663714600203625ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/__init__.py000066400000000000000000000000001476663714600224610ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/fixtures.py000066400000000000000000000002471476663714600226100ustar00rootroot00000000000000from __future__ import annotations from advanced_alchemy.config import SQLAlchemySyncConfig configs = [SQLAlchemySyncConfig(connection_string="sqlite:///:memory:")] python-advanced-alchemy-1.0.1/tests/unit/test_base.py000066400000000000000000000025241476663714600227100ustar00rootroot00000000000000# ruff: noqa: TC004, F401 # pyright: reportUnusedImport=false from __future__ import annotations import warnings from tests.helpers import purge_module def test_deprecated_classes_functionality() -> None: """Test that mixins classes maintain have base functionality.""" purge_module(["advanced_alchemy.base", "advanced_alchemy.mixins"], __file__) warnings.filterwarnings("ignore", category=DeprecationWarning) from sqlalchemy import exc as sa_exc warnings.filterwarnings("ignore", category=sa_exc.SAWarning) # Test instantiation and basic attributes from advanced_alchemy.mixins import ( AuditColumns, NanoIDPrimaryKey, UUIDPrimaryKey, UUIDv6PrimaryKey, UUIDv7PrimaryKey, ) uuidv7_pk = UUIDv7PrimaryKey() uuidv6_pk = UUIDv6PrimaryKey() uuid_pk = UUIDPrimaryKey() nanoid_pk = NanoIDPrimaryKey() audit = AuditColumns() # Verify the classes have the expected attributes assert hasattr(uuidv7_pk, "id") assert hasattr(uuidv7_pk, "_sentinel") assert hasattr(uuidv6_pk, "id") assert hasattr(uuidv6_pk, "_sentinel") assert hasattr(uuid_pk, "id") assert hasattr(uuid_pk, "_sentinel") assert hasattr(nanoid_pk, "id") assert hasattr(nanoid_pk, "_sentinel") assert hasattr(audit, "created_at") assert hasattr(audit, "updated_at") python-advanced-alchemy-1.0.1/tests/unit/test_cli.py000066400000000000000000000140531476663714600225450ustar00rootroot00000000000000from __future__ import annotations from collections.abc import Generator from pathlib import Path from typing import TYPE_CHECKING from unittest.mock import MagicMock, patch import pytest from click.testing import CliRunner from sqlalchemy.ext.asyncio import AsyncEngine from advanced_alchemy.cli import add_migration_commands, get_alchemy_group if TYPE_CHECKING: from click import Group @pytest.fixture def cli_runner() -> Generator[CliRunner, None, None]: """Create a Click CLI test runner.""" yield CliRunner() @pytest.fixture def mock_config() -> Generator[MagicMock, None, None]: """Create a mock SQLAlchemy config.""" config = MagicMock() config.bind_key = "default" config.alembic_config.script_location = "migrations" config.get_engine.return_value = MagicMock(spec=AsyncEngine) yield config @pytest.fixture def mock_context(mock_config: MagicMock) -> Generator[MagicMock, None, None]: """Create a mock Click context.""" ctx = MagicMock() ctx.obj = {"configs": [mock_config]} yield ctx @pytest.fixture def database_cli(mock_context: MagicMock) -> Generator[Group, None, None]: """Create the database CLI group.""" cli_group = get_alchemy_group() cli_group = add_migration_commands() cli_group.ctx = mock_context # pyright: ignore[reportAttributeAccessIssue] yield cli_group def test_show_current_revision(cli_runner: CliRunner, database_cli: Group, mock_context: MagicMock) -> None: """Test the show-current-revision command.""" with patch("advanced_alchemy.alembic.commands.AlembicCommands") as mock_alembic: result = cli_runner.invoke( database_cli, ["--config", "tests.unit.fixtures.configs", "show-current-revision"], ) assert result.exit_code == 0 mock_alembic.assert_called_once() mock_alembic.return_value.current.assert_called_once_with(verbose=False) @pytest.mark.parametrize("no_prompt", [True, False]) def test_downgrade_database( cli_runner: CliRunner, database_cli: Group, mock_context: MagicMock, no_prompt: bool ) -> None: """Test the downgrade command.""" with patch("advanced_alchemy.alembic.commands.AlembicCommands") as mock_alembic: args = ["--config", "tests.unit.fixtures.configs", "downgrade"] if no_prompt: args.append("--no-prompt") result = cli_runner.invoke(database_cli, args) if no_prompt: assert result.exit_code == 0 mock_alembic.assert_called_once() mock_alembic.return_value.downgrade.assert_called_once_with(revision="-1", sql=False, tag=None) else: # it's going to be -1 because we abort the task since we don't fill in the prompt assert result.exit_code == 1 # When prompting is enabled, we need to check if the confirmation was shown assert "Are you sure you want to downgrade" in result.output @pytest.mark.parametrize("no_prompt", [True, False]) def test_upgrade_database(cli_runner: CliRunner, database_cli: Group, mock_context: MagicMock, no_prompt: bool) -> None: """Test the upgrade command.""" with patch("advanced_alchemy.alembic.commands.AlembicCommands") as mock_alembic: args = ["--config", "tests.unit.fixtures.configs", "upgrade", "head"] if no_prompt: args.append("--no-prompt") result = cli_runner.invoke(database_cli, args) if no_prompt: assert result.exit_code == 0 mock_alembic.assert_called_once() mock_alembic.return_value.upgrade.assert_called_once_with(revision="head", sql=False, tag=None) else: # it's going to be -1 because we abort the task since we don't fill in the prompt assert result.exit_code == 1 assert "Are you sure you want migrate the database" in result.output def test_init_alembic(cli_runner: CliRunner, database_cli: Group, mock_context: MagicMock) -> None: """Test the init command.""" with patch("advanced_alchemy.alembic.commands.AlembicCommands") as mock_alembic: result = cli_runner.invoke( database_cli, ["--config", "tests.unit.fixtures.configs", "init", "--no-prompt", "migrations"], ) assert result.exit_code == 0 mock_alembic.assert_called_once() mock_alembic.return_value.init.assert_called_once_with(directory="migrations", multidb=False, package=True) def test_make_migrations(cli_runner: CliRunner, database_cli: Group, mock_context: MagicMock) -> None: """Test the make-migrations command.""" with patch("advanced_alchemy.alembic.commands.AlembicCommands") as mock_alembic: result = cli_runner.invoke( database_cli, ["--config", "tests.unit.fixtures.configs", "make-migrations", "--no-prompt", "-m", "test migration"], ) assert result.exit_code == 0 mock_alembic.assert_called_once() mock_alembic.return_value.revision.assert_called_once() def test_drop_all(cli_runner: CliRunner, database_cli: Group, mock_context: MagicMock) -> None: """Test the drop-all command.""" result = cli_runner.invoke(database_cli, ["--config", "tests.unit.fixtures.configs", "drop-all", "--no-prompt"]) assert result.exit_code == 0 def test_dump_data(cli_runner: CliRunner, database_cli: Group, mock_context: MagicMock, tmp_path: Path) -> None: """Test the dump-data command.""" result = cli_runner.invoke( database_cli, ["--config", "tests.unit.fixtures.configs", "dump-data", "--table", "test_table", "--dir", str(tmp_path)], ) assert result.exit_code == 0 def test_cli_group_creation() -> None: """Test that the CLI group is created correctly.""" cli_group = add_migration_commands() assert cli_group.name == "alchemy" assert "show-current-revision" in cli_group.commands assert "upgrade" in cli_group.commands assert "downgrade" in cli_group.commands assert "init" in cli_group.commands assert "make-migrations" in cli_group.commands assert "drop-all" in cli_group.commands assert "dump-data" in cli_group.commands python-advanced-alchemy-1.0.1/tests/unit/test_exceptions.py000066400000000000000000000135761476663714600241700ustar00rootroot00000000000000import pytest from sqlalchemy.exc import ( IntegrityError as SQLAlchemyIntegrityError, ) from sqlalchemy.exc import ( InvalidRequestError as SQLAlchemyInvalidRequestError, ) from sqlalchemy.exc import ( MultipleResultsFound, SQLAlchemyError, StatementError, ) from advanced_alchemy.exceptions import ( DuplicateKeyError, IntegrityError, InvalidRequestError, MultipleResultsFoundError, NotFoundError, RepositoryError, wrap_sqlalchemy_exception, ) def test_wrap_sqlalchemy_exception_multiple_results_found() -> None: with pytest.raises(MultipleResultsFoundError), wrap_sqlalchemy_exception(): raise MultipleResultsFound() @pytest.mark.parametrize("dialect_name", ["postgresql", "sqlite", "mysql"]) def test_wrap_sqlalchemy_exception_integrity_error_duplicate_key(dialect_name: str) -> None: error_message = { "postgresql": 'duplicate key value violates unique constraint "uq_%(table_name)s_%(column_0_name)s"', "sqlite": "UNIQUE constraint failed: %(table_name)s.%(column_0_name)s", "mysql": "1062 (23000): Duplicate entry '%(value)s' for key '%(table_name)s.%(column_0_name)s'", } with ( pytest.raises(DuplicateKeyError), wrap_sqlalchemy_exception( dialect_name=dialect_name, error_messages={"duplicate_key": error_message[dialect_name]}, ), ): if dialect_name == "postgresql": exception = SQLAlchemyIntegrityError( "INSERT INTO table (id) VALUES (1)", {"table_name": "table", "column_0_name": "id"}, Exception( 'duplicate key value violates unique constraint "uq_table_id"\nDETAIL: Key (id)=(1) already exists.', ), ) elif dialect_name == "sqlite": exception = SQLAlchemyIntegrityError( "INSERT INTO table (id) VALUES (1)", {"table_name": "table", "column_0_name": "id"}, Exception("UNIQUE constraint failed: table.id"), ) else: exception = SQLAlchemyIntegrityError( "INSERT INTO table (id) VALUES (1)", {"table_name": "table", "column_0_name": "id", "value": "1"}, Exception("1062 (23000): Duplicate entry '1' for key 'table.id'"), ) raise exception def test_wrap_sqlalchemy_exception_integrity_error_other() -> None: with pytest.raises(IntegrityError), wrap_sqlalchemy_exception(): raise SQLAlchemyIntegrityError("original", {}, Exception("original")) def test_wrap_sqlalchemy_exception_invalid_request_error() -> None: with pytest.raises(InvalidRequestError), wrap_sqlalchemy_exception(): raise SQLAlchemyInvalidRequestError("original", {}, Exception("original")) def test_wrap_sqlalchemy_exception_statement_error() -> None: with pytest.raises(IntegrityError), wrap_sqlalchemy_exception(): raise StatementError("original", None, {}, Exception("original")) # pyright: ignore[reportArgumentType] def test_wrap_sqlalchemy_exception_sqlalchemy_error() -> None: with pytest.raises(RepositoryError), wrap_sqlalchemy_exception(): raise SQLAlchemyError("original") def test_wrap_sqlalchemy_exception_attribute_error() -> None: with pytest.raises(RepositoryError), wrap_sqlalchemy_exception(): raise AttributeError("original") def test_wrap_sqlalchemy_exception_not_found_error() -> None: with pytest.raises(NotFoundError, match="No rows matched the specified data"), wrap_sqlalchemy_exception(): raise NotFoundError("No item found when one was expected") def test_wrap_sqlalchemy_exception_no_wrap() -> None: with pytest.raises(SQLAlchemyError), wrap_sqlalchemy_exception(wrap_exceptions=False): raise SQLAlchemyError("original") with pytest.raises(SQLAlchemyIntegrityError), wrap_sqlalchemy_exception(wrap_exceptions=False): raise SQLAlchemyIntegrityError(statement="select 1", params=None, orig=BaseException()) with pytest.raises(MultipleResultsFound), wrap_sqlalchemy_exception(wrap_exceptions=False): raise MultipleResultsFound() with pytest.raises(SQLAlchemyInvalidRequestError), wrap_sqlalchemy_exception(wrap_exceptions=False): raise SQLAlchemyInvalidRequestError() with pytest.raises(AttributeError), wrap_sqlalchemy_exception(wrap_exceptions=False): raise AttributeError() with ( pytest.raises(NotFoundError, match="No item found when one was expected"), wrap_sqlalchemy_exception(wrap_exceptions=False), ): raise NotFoundError("No item found when one was expected") def test_custom_not_found_error_message() -> None: with ( pytest.raises(NotFoundError, match="Custom Error"), wrap_sqlalchemy_exception(error_messages={"not_found": "Custom Error"}), ): raise NotFoundError("original") def test_wrap_sqlalchemy_exception_custom_error_message() -> None: def custom_message(exc: Exception) -> str: return f"Custom: {exc}" with ( pytest.raises(RepositoryError) as excinfo, wrap_sqlalchemy_exception( error_messages={"other": custom_message}, ), ): raise SQLAlchemyError("original") assert str(excinfo.value) == "Custom: original" def test_wrap_sqlalchemy_exception_no_error_messages() -> None: with pytest.raises(RepositoryError) as excinfo, wrap_sqlalchemy_exception(): raise SQLAlchemyError("original") assert str(excinfo.value) == "An exception occurred: original" def test_wrap_sqlalchemy_exception_no_match() -> None: with ( pytest.raises(IntegrityError) as excinfo, wrap_sqlalchemy_exception( dialect_name="postgresql", error_messages={"integrity": "Integrity error"}, ), ): raise SQLAlchemyIntegrityError("original", {}, Exception("original")) assert str(excinfo.value) == "Integrity error" python-advanced-alchemy-1.0.1/tests/unit/test_extensions/000077500000000000000000000000001476663714600236205ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_extensions/__init__.py000066400000000000000000000000001476663714600257170ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_fastapi.py000066400000000000000000000410771476663714600266710ustar00rootroot00000000000000import sys from collections.abc import AsyncGenerator, Generator from contextlib import asynccontextmanager from typing import TYPE_CHECKING, Annotated, Callable, Literal, Union, cast from unittest.mock import MagicMock import pytest from fastapi import Depends, FastAPI, HTTPException, Request, Response from fastapi.testclient import TestClient from pytest import FixtureRequest from pytest_mock import MockerFixture from sqlalchemy import Engine from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession from sqlalchemy.orm import Session from typing_extensions import assert_type from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.extensions.fastapi import AdvancedAlchemy, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig AnyConfig = Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig] pytestmark = pytest.mark.xfail( condition=sys.version_info < (3, 9), reason="Certain versions of Starlette and FastAPI are stated to still support 3.8, but there are documented incompatibilities on various versions that have not been yanked. Marking 3.8 as an acceptable failure for now.", ) @pytest.fixture() def app() -> FastAPI: return FastAPI() @pytest.fixture() def client(app: FastAPI) -> Generator[TestClient, None, None]: with TestClient(app=app, raise_server_exceptions=False) as client: yield client @pytest.fixture() def sync_config() -> SQLAlchemySyncConfig: return SQLAlchemySyncConfig(connection_string="sqlite:///:memory:") @pytest.fixture() def async_config() -> SQLAlchemyAsyncConfig: return SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:") @pytest.fixture(params=["sync_config", "async_config"]) def config(request: FixtureRequest) -> AnyConfig: return cast(AnyConfig, request.getfixturevalue(request.param)) @pytest.fixture() def alchemy(config: AnyConfig, app: FastAPI) -> AdvancedAlchemy: return AdvancedAlchemy(config, app=app) async def test_infer_types_from_config(async_config: SQLAlchemyAsyncConfig, sync_config: SQLAlchemySyncConfig) -> None: if TYPE_CHECKING: alchemy = AdvancedAlchemy(config=[async_config, sync_config]) assert alchemy.get_sync_config() is sync_config assert alchemy.get_async_config() is async_config assert_type(alchemy.get_sync_engine(), Engine) assert_type(alchemy.get_async_engine(), AsyncEngine) assert_type(alchemy.get_sync_config().create_session_maker(), Callable[[], Session]) assert_type(alchemy.get_async_config().create_session_maker(), Callable[[], AsyncSession]) with alchemy.with_sync_session() as db_session: assert_type(db_session, Session) async with alchemy.with_async_session() as async_session: assert_type(async_session, AsyncSession) def test_init_app_not_called_raises(config: SQLAlchemySyncConfig) -> None: alchemy = AdvancedAlchemy(config) with pytest.raises(ImproperConfigurationError): alchemy.app def test_inject_sync_engine() -> None: app = FastAPI() mock = MagicMock() config = SQLAlchemySyncConfig(connection_string="sqlite:///:memory:") alchemy = AdvancedAlchemy(config=config, app=app) @app.get("/") def handler(engine: Annotated[Engine, Depends(alchemy.provide_engine())]) -> Response: mock(engine) return Response(status_code=200) with TestClient(app=app) as client: resp = client.get("/") assert resp.status_code == 200 call_args = mock.call_args[0] assert call_args[0] is config.get_engine() def test_inject_async_engine() -> None: app = FastAPI() mock = MagicMock() config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:") alchemy = AdvancedAlchemy(config=config, app=app) @app.get("/") def handler(engine: Annotated[AsyncEngine, Depends(alchemy.provide_engine())]) -> Response: mock(engine) return Response(status_code=200) with TestClient(app=app) as client: resp = client.get("/") assert resp.status_code == 200 call_args = mock.call_args[0] assert call_args[0] is config.get_engine() def test_inject_sync_session() -> None: app = FastAPI() mock = MagicMock() config = SQLAlchemySyncConfig(connection_string="sqlite:///:memory:") alchemy = AdvancedAlchemy(config=config, app=app) SessionDependency = Annotated[Session, Depends(alchemy.get_sync_session)] def some_dependency(session: SessionDependency) -> None: # pyright: ignore[reportInvalidTypeForm,reportMissingTypeArgument,reportUnknownParameterType] mock(session) @app.get("/") def handler(session: SessionDependency, something: Annotated[None, Depends(some_dependency)]) -> None: # pyright: ignore[reportInvalidTypeForm,reportMissingTypeArgument,reportUnknownParameterType,reportUnknownArgumentType] mock(session) with TestClient(app=app) as client: client.get("/") assert mock.call_count == 2 call_1_args = mock.call_args_list[0].args call_2_args = mock.call_args_list[1].args assert call_1_args[0] is call_2_args[0] call_1_session = call_1_args[0] call_2_session = call_2_args[0] assert isinstance(call_1_session, Session) assert call_1_session is call_2_session def test_inject_async_session() -> None: app = FastAPI() mock = MagicMock() config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:") alchemy = AdvancedAlchemy(config=config, app=app) SessionDependency = Annotated[AsyncSession, Depends(alchemy.get_async_session)] def some_dependency(session: SessionDependency) -> None: # pyright: ignore[reportInvalidTypeForm,reportMissingTypeArgument,reportUnknownParameterType] mock(session) @app.get("/") def handler(session: SessionDependency, something: Annotated[None, Depends(some_dependency)]) -> None: # pyright: ignore[reportInvalidTypeForm,reportMissingTypeArgument,reportUnknownParameterType,reportUnknownArgumentType] mock(session) with TestClient(app=app) as client: client.get("/") assert mock.call_count == 2 call_1_args = mock.call_args_list[0].args call_2_args = mock.call_args_list[1].args assert call_1_args[0] is call_2_args[0] call_1_session = call_1_args[0] call_2_session = call_2_args[0] assert isinstance(call_1_session, AsyncSession) assert call_1_session is call_2_session @pytest.mark.parametrize( "status_code", [200, 201, 202, 204, 206, 300, 301, 305, 307, 308, 400, 401, 404, 450, 500, 900] ) @pytest.mark.parametrize("autocommit_strategy", ["manual", "autocommit", "autocommit_include_redirect"]) def test_sync_commit_strategies( mocker: MockerFixture, status_code: int, autocommit_strategy: Literal["manual", "autocommit", "autocommit_include_redirect"], ) -> None: app = FastAPI() config = SQLAlchemySyncConfig(connection_string="sqlite:///:memory:", commit_mode=autocommit_strategy) alchemy = AdvancedAlchemy(config=config, app=app) mock_commit = mocker.patch("sqlalchemy.orm.Session.commit") mock_close = mocker.patch("sqlalchemy.orm.Session.close") mock_rollback = mocker.patch("sqlalchemy.orm.Session.rollback") SessionDependency = Annotated[Session, Depends(alchemy.provide_session())] @app.get("/") def handler(session: SessionDependency) -> Response: # pyright: ignore[reportInvalidTypeForm,reportMissingTypeArgument,reportUnknownParameterType] return Response(status_code=status_code) with TestClient(app=app) as client: response = client.get("/") assert response.status_code == status_code if autocommit_strategy == "manual": mock_commit.call_count = 0 mock_close.call_count = 1 mock_rollback.call_count = 0 elif autocommit_strategy == "autocommit" and status_code < 300: mock_commit.call_count = 1 mock_close.call_count = 1 mock_rollback.call_count = 0 elif autocommit_strategy == "autocommit" and status_code >= 300: mock_commit.call_count = 0 mock_close.call_count = 1 mock_rollback.call_count = 1 elif autocommit_strategy == "autocommit_include_redirect" and status_code < 400: mock_commit.call_count = 1 mock_close.call_count = 1 mock_rollback.call_count = 0 elif autocommit_strategy == "autocommit_include_redirect" and status_code >= 400: mock_commit.call_count = 0 mock_close.call_count = 1 mock_rollback.call_count = 1 @pytest.mark.parametrize( "status_code", [200, 201, 202, 204, 206, 300, 301, 305, 307, 308, 400, 401, 404, 450, 500, 900] ) @pytest.mark.parametrize("autocommit_strategy", ["manual", "autocommit", "autocommit_include_redirect"]) def test_async_commit_strategies( mocker: MockerFixture, status_code: int, autocommit_strategy: Literal["manual", "autocommit", "autocommit_include_redirect"], ) -> None: app = FastAPI() config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:", commit_mode=autocommit_strategy) alchemy = AdvancedAlchemy(config=config, app=app) mock_commit = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") mock_close = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") mock_rollback = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.rollback") @app.get("/") def handler(session: Annotated[AsyncSession, Depends(alchemy.provide_session())]) -> Response: return Response(status_code=status_code) with TestClient(app=app) as client: response = client.get("/") assert response.status_code == status_code if autocommit_strategy == "manual": mock_commit.call_count = 0 mock_close.call_count = 1 mock_rollback.call_count = 0 elif autocommit_strategy == "autocommit" and status_code < 300: mock_commit.call_count = 1 mock_close.call_count = 1 mock_rollback.call_count = 0 elif autocommit_strategy == "autocommit" and status_code >= 300: mock_commit.call_count = 0 mock_close.call_count = 1 mock_rollback.call_count = 1 elif autocommit_strategy == "autocommit_include_redirect" and status_code < 400: mock_commit.call_count = 1 mock_close.call_count = 1 mock_rollback.call_count = 0 elif autocommit_strategy == "autocommit_include_redirect" and status_code >= 400: mock_commit.call_count = 0 mock_close.call_count = 1 mock_rollback.call_count = 1 @pytest.mark.parametrize("autocommit_strategy", ["manual", "autocommit", "autocommit_include_redirect"]) def test_sync_session_close_on_exception( mocker: MockerFixture, autocommit_strategy: Literal["manual", "autocommit", "autocommit_include_redirect"], ) -> None: app = FastAPI() config = SQLAlchemySyncConfig( connection_string="sqlite+pysqlite://", commit_mode=autocommit_strategy, ) alchemy = AdvancedAlchemy(config=config, app=app) mock_commit = mocker.patch("sqlalchemy.orm.Session.commit") mock_close = mocker.patch("sqlalchemy.orm.Session.close") mock_rollback = mocker.patch("sqlalchemy.orm.Session.rollback") def provide_session(request: Request) -> Session: return alchemy.get_sync_session(request) @app.get("/") def handler(sync_db_session: Annotated[Session, Depends(provide_session)]) -> str: raise HTTPException(status_code=500, detail="Intentional error for testing") with TestClient(app=app, raise_server_exceptions=False) as client: _ = client.get("/") assert _.status_code == 500 assert _.json().get("detail") == "Intentional error for testing" mock_commit.call_count = 0 mock_close.call_count = 1 mock_rollback.call_count = 0 @pytest.mark.parametrize("autocommit_strategy", ["manual", "autocommit", "autocommit_include_redirect"]) def test_async_session_close_on_exception( mocker: MockerFixture, autocommit_strategy: Literal["manual", "autocommit", "autocommit_include_redirect"], ) -> None: app = FastAPI() config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", commit_mode=autocommit_strategy, ) alchemy = AdvancedAlchemy(config=config, app=app) mock_commit = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") mock_close = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") mock_rollback = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.rollback") def provide_session(request: Request) -> AsyncSession: return alchemy.get_async_session(request) @app.get("/") def handler(async_db_session: Annotated[AsyncSession, Depends(provide_session)]) -> str: raise HTTPException(status_code=500, detail="Intentional error for testing") with TestClient(app=app, raise_server_exceptions=False) as client: _ = client.get("/") assert _.status_code == 500 assert _.json().get("detail") == "Intentional error for testing" mock_commit.call_count = 0 mock_close.call_count = 1 mock_rollback.call_count = 0 def test_multiple_sync_instances(app: FastAPI) -> None: mock = MagicMock() config_1 = SQLAlchemySyncConfig(connection_string="sqlite:///:memory:") config_2 = SQLAlchemySyncConfig(connection_string="sqlite:///temp.db", bind_key="config_2") alchemy_1 = AdvancedAlchemy([config_1, config_2], app=app) def provide_engine_1() -> Engine: return alchemy_1.get_sync_engine() def provide_engine_2() -> Engine: return alchemy_1.get_sync_engine("config_2") @app.get("/") def handler( session_1: Annotated[Session, Depends(lambda: alchemy_1.provide_session())], session_2: Annotated[Session, Depends(lambda: alchemy_1.provide_session("config_2"))], engine_1: Annotated[Engine, Depends(lambda: alchemy_1.provide_engine())], engine_2: Annotated[Engine, Depends(lambda: alchemy_1.provide_engine("config_2"))], ) -> None: assert session_1 is not session_2 assert engine_1 is not engine_2 mock(session=session_1, engine=engine_1) mock(session=session_2, engine=engine_2) with TestClient(app=app) as client: client.get("/") assert alchemy_1.get_sync_config().bind_key != alchemy_1.get_sync_config("config_2").bind_key assert alchemy_1.get_sync_config().session_maker != alchemy_1.get_sync_config("config_2").session_maker assert alchemy_1.get_sync_config().get_engine() is not alchemy_1.get_sync_config("config_2").get_engine() assert ( alchemy_1.get_sync_config().create_session_maker() is not alchemy_1.get_sync_config("config_2").create_session_maker() ) assert mock.call_args_list[0].kwargs["session"] is not mock.call_args_list[1].kwargs["session"] assert mock.call_args_list[0].kwargs["engine"] is not mock.call_args_list[1].kwargs["engine"] async def test_lifespan_startup_shutdown_called_fastapi(mocker: MockerFixture, app: FastAPI, config: AnyConfig) -> None: mock_startup = mocker.patch.object(AdvancedAlchemy, "on_startup") mock_shutdown = mocker.patch.object(AdvancedAlchemy, "on_shutdown") _alchemy = AdvancedAlchemy(config, app=app) with TestClient(app=app) as _client: # TestClient context manager triggers lifespan events pass # App starts up and shuts down within this context mock_startup.assert_called_once() mock_shutdown.assert_called_once() async def test_lifespan_with_custom_lifespan_fastapi(mocker: MockerFixture, app: FastAPI, config: AnyConfig) -> None: mock_aa_startup = mocker.patch.object(AdvancedAlchemy, "on_startup") mock_aa_shutdown = mocker.patch.object(AdvancedAlchemy, "on_shutdown") mock_custom_startup = mocker.MagicMock() mock_custom_shutdown = mocker.MagicMock() @asynccontextmanager async def custom_lifespan(app_in: FastAPI) -> AsyncGenerator[None, None]: mock_custom_startup() yield mock_custom_shutdown() app.router.lifespan_context = custom_lifespan # type: ignore[assignment] # Set a custom lifespan on the app _alchemy = AdvancedAlchemy(config, app=app) with TestClient(app=app) as _client: # TestClient context manager triggers lifespan events pass # App starts up and shuts down within this context mock_aa_startup.assert_called_once() mock_aa_shutdown.assert_called_once() mock_custom_startup.assert_called_once() mock_custom_shutdown.assert_called_once() # Optionally assert the order of calls if needed, e.g., using mocker.call_order python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_flask.py000066400000000000000000000577241476663714600263500ustar00rootroot00000000000000# ruff: noqa: RUF029 """Tests for the Flask extension.""" from __future__ import annotations from collections.abc import Generator, Sequence from pathlib import Path import pytest from flask import Flask, Response from msgspec import Struct from pydantic import BaseModel from sqlalchemy import String, select, text from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import DeclarativeBase, Mapped, Session, mapped_column from advanced_alchemy import base, mixins from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.extensions.flask import ( AdvancedAlchemy, FlaskServiceMixin, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig, ) from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemySyncRepository from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService, SQLAlchemySyncRepositoryService metadata = base.metadata_registry.get("flask_testing") class NewBigIntBase(mixins.BigIntPrimaryKey, base.CommonTableAttributes, DeclarativeBase): """Base model with a big integer primary key.""" __metadata__ = metadata class User(NewBigIntBase): """Test user model.""" __tablename__ = "users_testing" name: Mapped[str] = mapped_column(String(50)) class UserSchema(Struct): """Test user pydantic model.""" name: str class UserPydantic(BaseModel): """Test user pydantic model.""" name: str class UserService(SQLAlchemySyncRepositoryService[User], FlaskServiceMixin): """Test user service.""" class Repo(SQLAlchemySyncRepository[User]): model_type = User repository_type = Repo class AsyncUserService(SQLAlchemyAsyncRepositoryService[User], FlaskServiceMixin): """Test user service.""" class Repo(SQLAlchemyAsyncRepository[User]): model_type = User repository_type = Repo @pytest.fixture(scope="session") def tmp_path_session(tmp_path_factory: pytest.TempPathFactory) -> Path: return tmp_path_factory.mktemp("test_extensions_flask") @pytest.fixture(scope="session") def setup_database(tmp_path_session: Path) -> Generator[Path, None, None]: # Create a new database for each test db_path = tmp_path_session / "test.db" config = SQLAlchemySyncConfig(connection_string=f"sqlite:///{db_path}", metadata=metadata) engine = config.get_engine() User._sa_registry.metadata.create_all(engine) # pyright: ignore[reportPrivateUsage] with config.get_session() as session: assert isinstance(session, Session) table_exists = session.execute(text("SELECT COUNT(*) FROM users_testing")).scalar_one() assert table_exists >= 0 yield db_path def test_sync_extension_init(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): config = SQLAlchemySyncConfig(connection_string=f"sqlite:///{setup_database}", metadata=metadata) extension = AdvancedAlchemy(config, app) assert "advanced_alchemy" in app.extensions session = extension.get_session() assert isinstance(session, Session) def test_sync_extension_init_with_app(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): config = SQLAlchemySyncConfig(connection_string=f"sqlite:///{setup_database}", metadata=metadata) extension = AdvancedAlchemy(config, app) assert "advanced_alchemy" in app.extensions session = extension.get_session() assert isinstance(session, Session) def test_sync_extension_multiple_init(setup_database: Path) -> None: app = Flask(__name__) with ( app.app_context(), pytest.raises(ImproperConfigurationError, match="Advanced Alchemy extension is already registered"), ): config = SQLAlchemySyncConfig(connection_string=f"sqlite:///{setup_database}", metadata=metadata) extension = AdvancedAlchemy(config, app) extension.init_app(app) def test_async_extension_init(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): config = SQLAlchemyAsyncConfig( bind_key="async", connection_string=f"sqlite+aiosqlite:///{setup_database}", metadata=metadata ) extension = AdvancedAlchemy(config, app) assert "advanced_alchemy" in app.extensions session = extension.get_session("async") assert isinstance(session, AsyncSession) extension.portal_provider.stop() def test_async_extension_init_single_config_no_bind_key(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): config = SQLAlchemyAsyncConfig(connection_string=f"sqlite+aiosqlite:///{setup_database}", metadata=metadata) extension = AdvancedAlchemy(config, app) assert "advanced_alchemy" in app.extensions session = extension.get_session() assert isinstance(session, AsyncSession) extension.portal_provider.stop() def test_async_extension_init_with_app(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): config = SQLAlchemyAsyncConfig( bind_key="async", connection_string=f"sqlite+aiosqlite:///{setup_database}", metadata=metadata ) extension = AdvancedAlchemy(config, app) assert "advanced_alchemy" in app.extensions session = extension.get_session("async") assert isinstance(session, AsyncSession) extension.portal_provider.stop() def test_async_extension_multiple_init(setup_database: Path) -> None: app = Flask(__name__) with ( app.app_context(), pytest.raises(ImproperConfigurationError, match="Advanced Alchemy extension is already registered"), ): config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", bind_key="async", metadata=metadata ) extension = AdvancedAlchemy(config, app) extension.init_app(app) def test_sync_and_async_extension_init(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): extension = AdvancedAlchemy( [ SQLAlchemySyncConfig(connection_string=f"sqlite:///{setup_database}"), SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", bind_key="async", metadata=metadata ), ], app, ) assert "advanced_alchemy" in app.extensions session = extension.get_session() assert isinstance(session, Session) def test_multiple_binds(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): extension = AdvancedAlchemy( [ SQLAlchemySyncConfig( connection_string=f"sqlite:///{setup_database}", bind_key="db1", metadata=metadata ), SQLAlchemySyncConfig( connection_string=f"sqlite:///{setup_database}", bind_key="db2", metadata=metadata ), ], app, ) session = extension.get_session("db1") assert isinstance(session, Session) session = extension.get_session("db2") assert isinstance(session, Session) def test_multiple_binds_async(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): configs: Sequence[SQLAlchemyAsyncConfig] = [ SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", bind_key="db1", metadata=metadata ), SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", bind_key="db2", metadata=metadata ), ] extension = AdvancedAlchemy(configs, app) session = extension.get_session("db1") assert isinstance(session, AsyncSession) session = extension.get_session("db2") assert isinstance(session, AsyncSession) extension.portal_provider.stop() def test_mixed_binds(setup_database: Path) -> None: app = Flask(__name__) with app.app_context(): configs: Sequence[SQLAlchemyAsyncConfig | SQLAlchemySyncConfig] = [ SQLAlchemySyncConfig(connection_string=f"sqlite:///{setup_database}", bind_key="sync", metadata=metadata), SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", bind_key="async", metadata=metadata ), ] extension = AdvancedAlchemy(configs, app) session = extension.get_session("sync") assert isinstance(session, Session) session.close() session = extension.get_session("async") assert isinstance(session, AsyncSession) extension.portal_provider.portal.call(session.close) extension.portal_provider.stop() def test_sync_autocommit(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemySyncConfig( connection_string=f"sqlite:///{setup_database}", commit_mode="autocommit", metadata=metadata ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> tuple[dict[str, str], int]: session = extension.get_session() assert isinstance(session, Session) user = User(name="test") session.add(user) return {"status": "success"}, 200 # Test successful response (should commit) response = client.post("/test") assert response.status_code == 200 # Verify the data was committed session = extension.get_session() assert isinstance(session, Session) result = session.execute(select(User).where(User.name == "test")) assert result.scalar_one().name == "test" def test_sync_autocommit_include_redirect(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemySyncConfig( connection_string=f"sqlite:///{setup_database}", commit_mode="autocommit_include_redirect", metadata=metadata, ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> tuple[str, int, dict[str, str]]: session = extension.get_session() assert isinstance(session, Session) session.add(User(name="test_redirect")) return "", 302, {"Location": "/redirected"} # Test redirect response (should commit with autocommit_include_redirect) response = client.post("/test") assert response.status_code == 302 # Verify the data was committed session = extension.get_session() assert isinstance(session, Session) result = session.execute(select(User).where(User.name == "test_redirect")) assert result.scalar_one().name == "test_redirect" def test_sync_no_autocommit_on_error(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemySyncConfig( connection_string=f"sqlite:///{setup_database}", commit_mode="autocommit", metadata=metadata ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> tuple[dict[str, str], int]: session = extension.get_session() assert isinstance(session, Session) user = User(name="test_error") session.add(user) return {"error": "test error"}, 500 # Test error response (should not commit) response = client.post("/test") assert response.status_code == 500 # Verify the data was not committed session = extension.get_session() assert isinstance(session, Session) result = session.execute(select(User).where(User.name == "test_error")) assert result.first() is None def test_async_autocommit(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", commit_mode="autocommit", metadata=metadata ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> tuple[dict[str, str], int]: session = extension.get_session() assert isinstance(session, AsyncSession) session.add(User(name="test_async")) return {"status": "success"}, 200 # Test successful response (should commit) response = client.post("/test") assert response.status_code == 200 # Verify the data was committed session = extension.get_session() assert isinstance(session, AsyncSession) result = extension.portal_provider.portal.call(session.execute, select(User).where(User.name == "test_async")) assert result.scalar_one().name == "test_async" extension.portal_provider.stop() def test_async_autocommit_include_redirect(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", commit_mode="autocommit_include_redirect", metadata=metadata, ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> tuple[str, int, dict[str, str]]: session = extension.get_session() assert isinstance(session, AsyncSession) user = User(name="test_async_redirect") # type: ignore session.add(user) return "", 302, {"Location": "/redirected"} # Test redirect response (should commit with autocommit_include_redirect) response = client.post("/test") assert response.status_code == 302 session = extension.get_session() assert isinstance(session, AsyncSession) result = extension.portal_provider.portal.call( session.execute, select(User).where(User.name == "test_async_redirect") ) assert result.scalar_one().name == "test_async_redirect" extension.portal_provider.stop() def test_async_no_autocommit_on_error(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", commit_mode="autocommit", metadata=metadata ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> tuple[dict[str, str], int]: session = extension.get_session() assert isinstance(session, AsyncSession) user = User(name="test_async_error") # type: ignore session.add(user) return {"error": "test async error"}, 500 # Test error response (should not commit) response = client.post("/test") assert response.status_code == 500 session = extension.get_session() assert isinstance(session, AsyncSession) async def get_user() -> User | None: result = await session.execute(select(User).where(User.name == "test_async_error")) return result.scalar_one_or_none() # Verify the data was not committed user = extension.portal_provider.portal.call(get_user) assert user is None extension.portal_provider.stop() def test_async_portal_cleanup(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", commit_mode="manual", metadata=metadata ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> tuple[dict[str, str], int]: session = extension.get_session() assert isinstance(session, AsyncSession) user = User(name="test_async_cleanup") # type: ignore session.add(user) return {"status": "success"}, 200 # Test successful response (should not commit since we're using MANUAL mode) response = client.post("/test") assert response.status_code == 200 session = extension.get_session() assert isinstance(session, AsyncSession) # Verify the data was not committed (MANUAL mode) result = extension.portal_provider.portal.call( session.execute, select(User).where(User.name == "test_async_cleanup") ) assert result.first() is None extension.portal_provider.stop() def test_async_portal_explicit_stop(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", metadata=metadata, commit_mode="manual", ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> tuple[dict[str, str], int]: session = extension.get_session() assert isinstance(session, AsyncSession) user = User(name="test_async_explicit_stop") # type: ignore session.add(user) return {"status": "success"}, 200 # Test successful response (should not commit since we're using MANUAL mode) response = client.post("/test") assert response.status_code == 200 with app.app_context(): session = extension.get_session() assert isinstance(session, AsyncSession) # Verify the data was not committed (MANUAL mode) result = extension.portal_provider.portal.call( session.scalar, select(User).where(User.name == "test_async_explicit_stop") ) assert result is None extension.portal_provider.stop() def test_async_portal_explicit_stop_with_commit(setup_database: Path) -> None: app = Flask(__name__) @app.route("/test", methods=["POST"]) def test_route() -> tuple[dict[str, str], int]: session = extension.get_session() assert isinstance(session, AsyncSession) async def create_user() -> None: user = User(name="test_async_explicit_stop_with_commit") # type: ignore session.add(user) await session.commit() # type: ignore extension.portal_provider.portal.call(create_user) return {"status": "success"}, 200 with app.test_client() as client: config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", metadata=metadata, commit_mode="manual", ) extension = AdvancedAlchemy(config, app) # Test successful response response = client.post("/test") assert response.status_code == 200 # Verify in a new session session = extension.get_session() assert isinstance(session, AsyncSession) async def get_user() -> User | None: async with session: result = await session.execute(select(User).where(User.name == "test_async_explicit_stop_with_commit")) return result.scalar_one_or_none() user = extension.portal_provider.portal.call(get_user) assert isinstance(user, User) assert user.name == "test_async_explicit_stop_with_commit" extension.portal_provider.stop() def test_sync_service_jsonify_msgspec(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemySyncConfig( connection_string=f"sqlite:///{setup_database}", metadata=metadata, commit_mode="autocommit" ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> Response: service = UserService(extension.get_sync_session()) user = service.create({"name": "service_test"}) return service.jsonify(service.to_schema(user, schema_type=UserSchema)) # Test successful response (should commit) response = client.post("/test") assert response.status_code == 200 # Verify the data was committed session = extension.get_session() assert isinstance(session, Session) result = session.execute(select(User).where(User.name == "service_test")) assert result.scalar_one().name == "service_test" def test_async_service_jsonify_msgspec(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", metadata=metadata, commit_mode="autocommit" ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> Response: service = AsyncUserService(extension.get_async_session()) user = extension.portal_provider.portal.call(service.create, {"name": "async_service_test"}) return service.jsonify(service.to_schema(user, schema_type=UserSchema)) # Test successful response (should commit) response = client.post("/test") assert response.status_code == 200 # Verify the data was committed session = extension.get_session() assert isinstance(session, AsyncSession) result = extension.portal_provider.portal.call( session.scalar, select(User).where(User.name == "async_service_test") ) assert result assert result.name == "async_service_test" extension.portal_provider.stop() def test_sync_service_jsonify_pydantic(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemySyncConfig( connection_string=f"sqlite:///{setup_database}", metadata=metadata, commit_mode="autocommit" ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> Response: service = UserService(extension.get_sync_session()) user = service.create({"name": "test_sync_service_jsonify_pydantic"}) return service.jsonify(service.to_schema(user, schema_type=UserPydantic)) # Test successful response (should commit) response = client.post("/test") assert response.status_code == 200 # Verify the data was committed session = extension.get_session() assert isinstance(session, Session) result = session.execute(select(User).where(User.name == "test_sync_service_jsonify_pydantic")) assert result.scalar_one().name == "test_sync_service_jsonify_pydantic" def test_async_service_jsonify_pydantic(setup_database: Path) -> None: app = Flask(__name__) with app.test_client() as client: config = SQLAlchemyAsyncConfig( connection_string=f"sqlite+aiosqlite:///{setup_database}", metadata=metadata, commit_mode="autocommit" ) extension = AdvancedAlchemy(config, app) @app.route("/test", methods=["POST"]) def test_route() -> Response: service = AsyncUserService(extension.get_async_session()) user = extension.portal_provider.portal.call( service.create, {"name": "test_async_service_jsonify_pydantic"} ) return service.jsonify(service.to_schema(user, schema_type=UserPydantic)) # Test successful response (should commit) response = client.post("/test") assert response.status_code == 200 # Verify the data was committed session = extension.get_session() assert isinstance(session, AsyncSession) result = extension.portal_provider.portal.call( session.scalar, select(User).where(User.name == "test_async_service_jsonify_pydantic") ) assert result assert result.name == "test_async_service_jsonify_pydantic" extension.portal_provider.stop() python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/000077500000000000000000000000001476663714600265065ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/__init__.py000066400000000000000000000000001476663714600306050ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/conftest.py000066400000000000000000000253571476663714600307210ustar00rootroot00000000000000from __future__ import annotations import importlib.util import os import random import string import sys from collections.abc import AsyncGenerator, Generator, Sequence from dataclasses import replace from pathlib import Path from types import ModuleType from typing import Any, Callable, cast from unittest.mock import ANY import pytest from litestar.app import Litestar from litestar.dto import AbstractDTO, DTOField, Mark from litestar.dto._backend import DTOBackend from litestar.dto.data_structures import DTOFieldDefinition from litestar.testing import RequestFactory from litestar.types import ( ASGIVersion, RouteHandlerType, Scope, ScopeSession, # type: ignore ) from litestar.types.empty import Empty from litestar.typing import FieldDefinition from pytest import FixtureRequest, MonkeyPatch from sqlalchemy import Engine, NullPool, create_engine from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.orm import Session, sessionmaker from typing_extensions import TypeVar from advanced_alchemy.alembic.commands import AlembicCommands from advanced_alchemy.config.common import GenericSQLAlchemyConfig from advanced_alchemy.extensions.litestar import SQLAlchemyAsyncConfig, SQLAlchemyPlugin, SQLAlchemySyncConfig @pytest.fixture(autouse=True) def reload_package() -> Generator[None, None, None]: yield GenericSQLAlchemyConfig._SESSION_SCOPE_KEY_REGISTRY = set() # type: ignore GenericSQLAlchemyConfig._ENGINE_APP_STATE_KEY_REGISTRY = set() # type: ignore GenericSQLAlchemyConfig._SESSIONMAKER_APP_STATE_KEY_REGISTRY = set() # type: ignore @pytest.fixture(autouse=True) def reset_cached_dto_backends() -> Generator[None, None, None]: DTOBackend._seen_model_names = set() # pyright: ignore[reportPrivateUsage] AbstractDTO._dto_backends = {} # pyright: ignore[reportPrivateUsage] yield DTOBackend._seen_model_names = set() # pyright: ignore[reportPrivateUsage] AbstractDTO._dto_backends = {} # pyright: ignore[reportPrivateUsage] @pytest.fixture(autouse=True) async def disable_implicit_sync_warning() -> None: os.environ["LITESTAR_WARN_IMPLICIT_SYNC_TO_THREAD"] = "0" @pytest.fixture def int_factory() -> Generator[Callable[[], int], None, None]: yield lambda: 2 @pytest.fixture def expected_field_defs(int_factory: Callable[[], int]) -> Generator[list[DTOFieldDefinition], None, None]: yield [ DTOFieldDefinition.from_field_definition( field_definition=FieldDefinition.from_kwarg( annotation=int, name="a", ), model_name=ANY, default_factory=Empty, # type: ignore[arg-type] dto_field=DTOField(), ), replace( DTOFieldDefinition.from_field_definition( field_definition=FieldDefinition.from_kwarg( annotation=int, name="b", ), model_name=ANY, default_factory=Empty, # type: ignore[arg-type] dto_field=DTOField(mark=Mark.READ_ONLY), ), metadata=ANY, type_wrappers=ANY, raw=ANY, kwarg_definition=ANY, ), replace( DTOFieldDefinition.from_field_definition( field_definition=FieldDefinition.from_kwarg( annotation=int, name="c", ), model_name=ANY, default_factory=Empty, # type: ignore[arg-type] dto_field=DTOField(), ), metadata=ANY, type_wrappers=ANY, raw=ANY, kwarg_definition=ANY, ), replace( DTOFieldDefinition.from_field_definition( field_definition=FieldDefinition.from_kwarg( annotation=int, name="d", default=1, ), model_name=ANY, default_factory=Empty, # type: ignore[arg-type] dto_field=DTOField(), ), metadata=ANY, type_wrappers=ANY, raw=ANY, kwarg_definition=ANY, ), replace( DTOFieldDefinition.from_field_definition( field_definition=FieldDefinition.from_kwarg( annotation=int, name="e", ), model_name=ANY, default_factory=int_factory, dto_field=DTOField(), ), metadata=ANY, type_wrappers=ANY, raw=ANY, kwarg_definition=ANY, ), ] @pytest.fixture def create_module(tmp_path: Path, monkeypatch: MonkeyPatch) -> Generator[Callable[[str], ModuleType], None, None]: """Utility fixture for dynamic module creation.""" def wrapped(source: str) -> ModuleType: """ Args: source: Source code as a string. Returns: An imported module. """ T = TypeVar("T") def not_none(val: T | None) -> T: assert val is not None return val def module_name_generator() -> str: letters = string.ascii_lowercase return "".join(random.choice(letters) for _ in range(10)) module_name = module_name_generator() path = tmp_path / f"{module_name}.py" path.write_text(source) # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly spec = not_none(importlib.util.spec_from_file_location(module_name, path)) module = not_none(importlib.util.module_from_spec(spec)) monkeypatch.setitem(sys.modules, module_name, module) not_none(spec.loader).exec_module(module) return module yield wrapped @pytest.fixture def create_scope() -> Generator[Callable[..., Scope], None, None]: def inner( *, type: str = "http", app: Litestar | None = None, asgi: ASGIVersion | None = None, auth: Any = None, client: tuple[str, int] | None = ("testclient", 50000), extensions: dict[str, dict[object, object]] | None = None, http_version: str = "1.1", path: str = "/", path_params: dict[str, str] | None = None, query_string: str = "", root_path: str = "", route_handler: RouteHandlerType | None = None, scheme: str = "http", server: tuple[str, int | None] | None = ("testserver", 80), session: ScopeSession | None = None, # pyright: ignore[reportUnknownParameterType] state: dict[str, Any] | None = None, user: Any = None, **kwargs: dict[str, Any], ) -> Scope: scope: dict[str, Any] = { "app": app, "asgi": asgi or {"spec_version": "2.0", "version": "3.0"}, "auth": auth, "type": type, "path": path, "raw_path": path.encode(), "root_path": root_path, "scheme": scheme, "query_string": query_string.encode(), "client": client, "server": server, "method": "GET", "http_version": http_version, "extensions": extensions or {"http.response.template": {}}, "state": state or {}, "path_params": path_params or {}, "route_handler": route_handler, "user": user, "session": session, **kwargs, } return cast("Scope", scope) yield inner # pyright: ignore[reportUnknownVariableType] @pytest.fixture def scope(create_scope: Callable[..., Scope]) -> Generator[Scope, None, None]: yield create_scope() @pytest.fixture() def engine() -> Generator[Engine, None, None]: """SQLite engine for end-to-end testing. Returns: Async SQLAlchemy engine instance. """ engine = create_engine("sqlite:///:memory:", poolclass=NullPool) try: yield engine finally: engine.dispose() @pytest.fixture() async def sync_sqlalchemy_plugin( engine: Engine, session_maker: sessionmaker[Session] | None = None, ) -> AsyncGenerator[SQLAlchemyPlugin, None]: yield SQLAlchemyPlugin(config=SQLAlchemySyncConfig(engine_instance=engine, session_maker=session_maker)) @pytest.fixture() async def async_engine() -> AsyncGenerator[AsyncEngine, None]: """SQLite engine for end-to-end testing. Returns: Async SQLAlchemy engine instance. """ engine = create_async_engine("sqlite+aiosqlite:///:memory:", poolclass=NullPool) try: yield engine finally: await engine.dispose() @pytest.fixture() async def async_sqlalchemy_plugin( async_engine: AsyncEngine, async_session_maker: async_sessionmaker[AsyncSession] | None = None, ) -> AsyncGenerator[SQLAlchemyPlugin, None]: yield SQLAlchemyPlugin( config=SQLAlchemyAsyncConfig(engine_instance=async_engine, session_maker=async_session_maker), ) @pytest.fixture(params=[pytest.param("sync_sqlalchemy_plugin"), pytest.param("async_sqlalchemy_plugin")]) async def plugin(request: FixtureRequest) -> AsyncGenerator[SQLAlchemyPlugin, None]: yield cast(SQLAlchemyPlugin, request.getfixturevalue(request.param)) @pytest.fixture() async def sync_app(sync_sqlalchemy_plugin: SQLAlchemyPlugin) -> AsyncGenerator[Litestar, None]: yield Litestar(plugins=[sync_sqlalchemy_plugin]) @pytest.fixture() async def async_app(async_sqlalchemy_plugin: SQLAlchemyPlugin) -> AsyncGenerator[Litestar, None]: yield Litestar(plugins=[async_sqlalchemy_plugin]) @pytest.fixture() async def sync_alembic_commands(sync_app: Litestar) -> AsyncGenerator[AlembicCommands, None]: plugin = sync_app.plugins.get(SQLAlchemyPlugin) config = plugin.config[0] if isinstance(plugin.config, Sequence) else plugin.config # type: ignore yield AlembicCommands(sqlalchemy_config=config) @pytest.fixture() async def async_alembic_commands(async_app: Litestar) -> AsyncGenerator[AlembicCommands, None]: plugin = async_app.plugins.get(SQLAlchemyPlugin) config = plugin.config[0] if isinstance(plugin.config, Sequence) else plugin.config # type: ignore yield AlembicCommands(sqlalchemy_config=config) @pytest.fixture(params=[pytest.param("sync_alembic_commands"), pytest.param("async_alembic_commands")]) async def alembic_commands(request: FixtureRequest) -> AsyncGenerator[AlembicCommands, None]: yield cast(AlembicCommands, request.getfixturevalue(request.param)) @pytest.fixture(params=[pytest.param("sync_app"), pytest.param("async_app")]) async def app(request: FixtureRequest) -> AsyncGenerator[Litestar, None]: yield cast(Litestar, request.getfixturevalue(request.param)) @pytest.fixture() def request_factory() -> Generator[RequestFactory, None, None]: yield RequestFactory() python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_context.py000066400000000000000000000016321476663714600316050ustar00rootroot00000000000000from __future__ import annotations from typing import cast from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session from advanced_alchemy.extensions.litestar.plugins import SQLAlchemyPlugin from advanced_alchemy.extensions.litestar.plugins.init.config.asyncio import SQLAlchemyAsyncConfig from advanced_alchemy.extensions.litestar.plugins.init.config.sync import SQLAlchemySyncConfig async def test_sync_db_session(sync_sqlalchemy_plugin: SQLAlchemyPlugin) -> None: config = cast("SQLAlchemySyncConfig", sync_sqlalchemy_plugin.config[0]) with config.get_session() as session: assert isinstance(session, Session) async def test_async_db_session(async_sqlalchemy_plugin: SQLAlchemyPlugin) -> None: config = cast("SQLAlchemyAsyncConfig", async_sqlalchemy_plugin.config[0]) async with config.get_session() as session: assert isinstance(session, AsyncSession) python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_dto.py000066400000000000000000000646561476663714600307260ustar00rootroot00000000000000from __future__ import annotations import datetime import sys from typing import TYPE_CHECKING, Annotated, ClassVar from uuid import UUID, uuid4 import pytest import sqlalchemy from litestar import Request, get from litestar.dto import DTOField, Mark from litestar.dto.field import DTO_FIELD_META_KEY from litestar.enums import MediaType from litestar.plugins.pydantic import PydanticInitPlugin from litestar.serialization import encode_json from litestar.testing import RequestFactory from litestar.typing import FieldDefinition from sqlalchemy import ForeignKey, func from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, declared_attr, mapped_column, relationship from typing_extensions import TypeVar from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.extensions.litestar.dto import ( SQLAlchemyDTO, SQLAlchemyDTOConfig, parse_type_from_element, # type: ignore ) if TYPE_CHECKING: from collections.abc import Callable from types import ModuleType from typing import Any @pytest.fixture(name="base") def fx_base() -> type[DeclarativeBase]: class Base(DeclarativeBase): id: Mapped[UUID] = mapped_column(default=uuid4, primary_key=True) created: Mapped[datetime.datetime] = mapped_column( default=datetime.datetime.now, info={DTO_FIELD_META_KEY: DTOField(mark=Mark.READ_ONLY)}, ) updated: Mapped[datetime.datetime] = mapped_column( default=datetime.datetime.now, info={DTO_FIELD_META_KEY: DTOField(mark=Mark.READ_ONLY)}, ) # noinspection PyMethodParameters @declared_attr.directive def __tablename__(cls) -> str: """Infer table name from class name.""" return cls.__name__.lower() return Base @pytest.fixture(name="author_model") def fx_author_model(base: DeclarativeBase) -> type[DeclarativeBase]: class Author(base): # type: ignore name: Mapped[str] dob: Mapped[datetime.date] return Author @pytest.fixture(name="raw_author") def fx_raw_author() -> bytes: return b'{"id":"97108ac1-ffcb-411d-8b1e-d9183399f63b","name":"Agatha Christie","dob":"1890-09-15","created":"0001-01-01T00:00:00","updated":"0001-01-01T00:00:00"}' @pytest.fixture(name="asgi_connection") def fx_asgi_connection() -> Request[Any, Any, Any]: @get("/", name="handler_id", media_type=MediaType.JSON, type_decoders=PydanticInitPlugin.decoders()) def _handler() -> None: ... return RequestFactory().get(path="/", route_handler=_handler) T = TypeVar("T") DataT = TypeVar("DataT", bound=DeclarativeBase) async def get_model_from_dto( dto_type: type[SQLAlchemyDTO[DataT]], annotation: Any, asgi_connection: Request[Any, Any, Any], raw: bytes, ) -> Any: dto_type.create_for_field_definition( handler_id=asgi_connection.route_handler.handler_id, field_definition=FieldDefinition.from_kwarg(annotation, name="data"), ) dto_type.create_for_field_definition( handler_id=asgi_connection.route_handler.handler_id, field_definition=FieldDefinition.from_kwarg(annotation, name="return"), ) return dto_type(asgi_connection).decode_bytes(raw) def assert_model_values(model_instance: DeclarativeBase, expected_values: dict[str, Any]) -> None: assert {k: v for k, v in model_instance.__dict__.items() if not k.startswith("_")} == expected_values async def test_model_write_dto( author_model: type[DeclarativeBase], raw_author: bytes, asgi_connection: Request[Any, Any, Any], ) -> None: model = await get_model_from_dto(SQLAlchemyDTO[author_model], author_model, asgi_connection, raw_author) # type: ignore assert_model_values( model, { "id": UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"), "name": "Agatha Christie", "dob": datetime.date(1890, 9, 15), }, ) async def test_model_read_dto( author_model: type[DeclarativeBase], raw_author: bytes, asgi_connection: Request[Any, Any, Any], ) -> None: config = SQLAlchemyDTOConfig() dto_type = SQLAlchemyDTO[Annotated[author_model, config]] # type: ignore model = await get_model_from_dto(dto_type, author_model, asgi_connection, raw_author) assert_model_values( model, { "id": UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"), "name": "Agatha Christie", "dob": datetime.date(1890, 9, 15), }, ) async def test_model_list_dto(author_model: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any]) -> None: dto_type = SQLAlchemyDTO[author_model] # type: ignore raw = b'[{"id": "97108ac1-ffcb-411d-8b1e-d9183399f63b","name":"Agatha Christie","dob":"1890-09-15","created":"0001-01-01T00:00:00","updated":"0001-01-01T00:00:00"}]' dto_data = await get_model_from_dto(dto_type, list[author_model], asgi_connection, raw) # type: ignore assert isinstance(dto_data, list) assert_model_values( dto_data[0], # type: ignore { "id": UUID("97108ac1-ffcb-411d-8b1e-d9183399f63b"), "name": "Agatha Christie", "dob": datetime.date(1890, 9, 15), }, ) async def test_dto_exclude( author_model: type[DeclarativeBase], raw_author: bytes, asgi_connection: Request[Any, Any, Any], ) -> None: config = SQLAlchemyDTOConfig(exclude={"id"}) model = await get_model_from_dto( SQLAlchemyDTO[Annotated[author_model, config]], # type: ignore author_model, asgi_connection, raw_author, ) assert "id" not in vars(model) async def test_write_dto_field_default(base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any]) -> None: class Model(base): # type: ignore field: Mapped[int] = mapped_column(default=3) dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig(exclude={"id", "created", "updated"})]] model = await get_model_from_dto(dto_type, Model, asgi_connection, b'{"a":"b"}') assert_model_values(model, {"field": 3}) async def test_write_dto_for_model_field_factory_default( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: val = uuid4() class Model(base): # type: ignore field: Mapped[UUID] = mapped_column(default=lambda: val) dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig(exclude={"id", "created", "updated"})]] model = await get_model_from_dto(dto_type, Model, asgi_connection, b'{"a":"b"}') assert_model_values(model, {"field": val}) async def test_dto_instrumented_attribute_key( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: val = uuid4() class Model(base): # type: ignore field: Mapped[UUID] = mapped_column(default=lambda: val) dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig(exclude={Model.id, Model.created, Model.updated})]] # pyright: ignore[reportAttributeAccessIssue,reportUnknownMemberType,reportUnknownArgumentType] model = await get_model_from_dto(dto_type, Model, asgi_connection, b'{"a":"b"}') assert_model_values(model, {"field": val}) async def test_write_dto_for_model_field_unsupported_default( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: """Test for error condition where we don't know what to do with a default type.""" class Model(base): # type: ignore field: Mapped[datetime.datetime] = mapped_column(default=func.now()) with pytest.raises(ValueError): await get_model_from_dto(SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig()]], Model, asgi_connection, b"") async def test_dto_for_private_model_field( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: class Model(base): # type: ignore field: Mapped[datetime.datetime] = mapped_column( info={DTO_FIELD_META_KEY: DTOField(mark=Mark.PRIVATE)}, ) dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig()]] raw = b'{"id":"97108ac1-ffcb-411d-8b1e-d9183399f63b","created":"0001-01-01T00:00:00","updated":"0001-01-01T00:00:00","field":"0001-01-01T00:00:00"}' assert "field" not in vars(await get_model_from_dto(dto_type, Model, asgi_connection, raw)) dto_instance = dto_type(asgi_connection) serializable = dto_instance.data_to_encodable_type( # pyright: ignore[reportUnknownMemberType,reportUnknownVariableType] Model( id=UUID("0956ca9e-5671-4d7d-a862-b98e6368ed2c"), created=datetime.datetime.min, updated=datetime.datetime.min, field=datetime.datetime.min, ), ) assert b"field" not in encode_json(serializable) async def test_dto_for_non_mapped_model_field( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: class Model(base): # type: ignore field: ClassVar[datetime.datetime] dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig()]] raw = b'{"id": "97108ac1-ffcb-411d-8b1e-d9183399f63b","created":"0001-01-01T00:00:00","updated":"0001-01-01T00:00:00","field":"0001-01-01T00:00:00"}' assert "field" not in vars(await get_model_from_dto(dto_type, Model, asgi_connection, raw)) async def test_dto_mapped_as_dataclass_model_type( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: """Test declare pydantic type on `dto.DTOField`.""" class Model(base, MappedAsDataclass): # type: ignore clz_var: ClassVar[str] field: Mapped[str] dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig(exclude={"id"})]] model = await get_model_from_dto(dto_type, Model, asgi_connection, b'{"clz_var":"nope","field":"yep"}') assert_model_values(model, {"field": "yep"}) async def test_to_mapped_model_with_collection_relationship( base: type[DeclarativeBase], create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Test building a DTO with collection relationship, and parsing data.""" module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, List from sqlalchemy import ForeignKey, Integer from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" b_id: Mapped[int] = mapped_column(ForeignKey("b.id")) class B(Base): __tablename__ = "b" a: Mapped[List[A]] = relationship("A") dto_type = SQLAlchemyDTO[Annotated[B, SQLAlchemyDTOConfig()]] """, ) model = await get_model_from_dto( module.dto_type, module.B, asgi_connection, b'{"id": 1, "a": [{"id": 2, "b_id": 1}, {"id": 3, "b_id": 1}]}', ) assert isinstance(model, module.B) assert len(model.a) == 2 assert all(isinstance(val, module.A) for val in model.a) async def test_to_mapped_model_with_relationship_type_hint( base: type[DeclarativeBase], create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Test building a DTO with collection relationship, and parsing data.""" module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, List from sqlalchemy import ForeignKey, Integer from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship, Relationship from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" b_id: Mapped[int] = mapped_column(ForeignKey("b.id")) class B(Base): __tablename__ = "b" a: Relationship[List[A]] = relationship("A") dto_type = SQLAlchemyDTO[Annotated[B, SQLAlchemyDTOConfig()]] """, ) model = await get_model_from_dto( module.dto_type, module.B, asgi_connection, b'{"id": 1, "a": [{"id": 2, "b_id": 1}, {"id": 3, "b_id": 1}]}', ) assert isinstance(model, module.B) assert len(model.a) == 2 assert all(isinstance(val, module.A) for val in model.a) async def test_to_mapped_model_with_scalar_relationship( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Test building DTO with Scalar relationship, and parsing data.""" module = create_module( """ from __future__ import annotations from sqlalchemy import ForeignKey from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" class B(Base): __tablename__ = "b" a_id: Mapped[int] = mapped_column(ForeignKey("a.id")) a: Mapped[A] = relationship(A) dto_type = SQLAlchemyDTO[Annotated[B, SQLAlchemyDTOConfig()]] """, ) model = await get_model_from_dto( module.dto_type, module.B, asgi_connection, b'{"id": 2, "a_id": 1, "a": {"id": 1}}', ) assert isinstance(model, module.B) assert isinstance(model.a, module.A) async def test_dto_mapped_union( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Test where a column type declared as e.g., `Mapped[str | None]`.""" module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, Union from sqlalchemy import ForeignKey from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" a: Mapped[Union[str, None]] dto_type = SQLAlchemyDTO[A] """, ) model = await get_model_from_dto(module.dto_type, module.A, asgi_connection, b'{"id": 1}') assert vars(model)["a"] is None @pytest.mark.skipif(sys.version_info < (3, 10), reason="requires python3.10 or higher") async def test_dto_mapped_union_type( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Test where a column type declared as e.g., `Mapped[str | None]`.""" module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, Union, Optional from sqlalchemy import ForeignKey from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" a: Mapped[Optional[str]] dto_type = SQLAlchemyDTO[A] """, ) model = await get_model_from_dto(module.dto_type, module.A, asgi_connection, b'{"id": 1}') assert vars(model)["a"] is None model = await get_model_from_dto(module.dto_type, module.A, asgi_connection, b'{"id": 1, "a": "a"}') assert vars(model)["a"] == "a" async def test_dto_self_referencing_relationships( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: module = create_module( """ from __future__ import annotations from sqlalchemy import ForeignKey from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" b_id: Mapped[int] = mapped_column(ForeignKey("b.id")) b: Mapped[B] = relationship(back_populates="a") class B(Base): __tablename__ = "b" a: Mapped[A] = relationship(back_populates="b") dto_type = SQLAlchemyDTO[A] """, ) raw = b'{"id": 1, "b_id": 1, "b": {"id": 1, "a": {"id": 1, "b_id": 1}}}' model = await get_model_from_dto(module.dto_type, module.A, asgi_connection, raw) assert isinstance(model, module.A) assert isinstance(model.b, module.B) assert isinstance(model.b.a, module.A) encodable_type = module.dto_type(asgi_connection).data_to_encodable_type(model) assert encodable_type.id == 1 assert encodable_type.b_id == 1 assert encodable_type.b.id == 1 async def test_dto_optional_relationship_with_none_value( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, Optional from sqlalchemy import ForeignKey from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" class B(Base): __tablename__ = "b" a_id: Mapped[Optional[int]] = mapped_column(ForeignKey("a.id")) a: Mapped[Optional[A]] = relationship(A) dto_type = SQLAlchemyDTO[Annotated[B, SQLAlchemyDTOConfig()]] """, ) model = await get_model_from_dto(module.dto_type, module.B, asgi_connection, b'{"id": 2, "a_id": null, "a": null}') assert isinstance(model, module.B) assert model.a is None async def test_forward_ref_relationship_resolution( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Testing that classes related to the mapped class for the dto are considered for forward-ref resolution. The key part of this test is that the `B` type is only imported inside an `if TYPE_CHECKING:` block in `a_module`, so it should not be available for forward-ref resolution when `a_module` is imported. This works due to related mapped classes (via `mapper.registry.mappers`) being added to forward-ref resolution namespace. """ base_module = create_module( """ from __future__ import annotations from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) """, ) b_module = create_module( f""" from __future__ import annotations from {base_module.__name__} import Base class B(Base): __tablename__ = "b" """, ) a_module = create_module( f""" from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, TYPE_CHECKING from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column, relationship from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig from {base_module.__name__} import Base if TYPE_CHECKING: from {b_module.__name__} import B class A(Base): __tablename__ = "a" b_id: Mapped[int] = mapped_column(ForeignKey("b.id")) b: Mapped[B] = relationship() dto_type = SQLAlchemyDTO[Annotated[A, SQLAlchemyDTOConfig()]] """, ) model = await get_model_from_dto( a_module.dto_type, a_module.A, asgi_connection, b'{"id": 1, "b_id": 2, "b": {"id": 2}}', ) assert isinstance(model, a_module.A) assert isinstance(model.b, b_module.B) async def test_dto_mapped_builtin_collection( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Test where a column type declared as e.g., `Mapped[dict]`.""" module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, Union from sqlalchemy import ForeignKey, Integer from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column from sqlalchemy.types import JSON, ARRAY from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" a: Mapped[dict] = mapped_column(JSON) c: Mapped[list] = mapped_column(ARRAY(Integer)) dto_type = SQLAlchemyDTO[A] """, ) model = await get_model_from_dto( module.dto_type, module.A, asgi_connection, b'{"id": 1, "a": {"b": 1}, "c": [1, 2, 3]}', ) assert vars(model)["a"] == {"b": 1} assert vars(model)["c"] == [1, 2, 3] async def test_no_type_hint_column(base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any]) -> None: class Model(base): # type: ignore nullable_field = mapped_column(sqlalchemy.String) not_nullable_field = mapped_column(sqlalchemy.String, nullable=False, default="") dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig()]] model = await get_model_from_dto(dto_type, Model, asgi_connection, b"{}") assert model.nullable_field is None assert model.not_nullable_field == "" async def test_no_type_hint_scalar_relationship_with_nullable_fk( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: class Child(base): # type: ignore ... class Model(base): # type: ignore child_id = mapped_column(ForeignKey("child.id")) child = relationship(Child) dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig(exclude={"child_id"})]] model = await get_model_from_dto(dto_type, Model, asgi_connection, b"{}") assert model.child is None async def test_no_type_hint_scalar_relationship_with_not_nullable_fk( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: class Child(base): # type: ignore ... class Model(base): # type: ignore child_id = mapped_column(ForeignKey("child.id"), nullable=False) child = relationship(Child) dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig(exclude={"child_id"})]] model = await get_model_from_dto(dto_type, Model, asgi_connection, b'{"child": {}}') assert isinstance(model.child, Child) async def test_no_type_hint_collection_relationship( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: class Child(base): # type: ignore model_id = mapped_column(ForeignKey("model.id")) class Model(base): # type: ignore children = relationship(Child) dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig()]] model = await get_model_from_dto(dto_type, Model, asgi_connection, b'{"children": []}') assert model.children == [] async def test_no_type_hint_collection_relationship_alt_collection_class( base: type[DeclarativeBase], asgi_connection: Request[Any, Any, Any], ) -> None: class Child(base): # type: ignore model_id = mapped_column(ForeignKey("model.id")) class Model(base): # type: ignore children = relationship(Child, collection_class=set) dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig()]] model = await get_model_from_dto(dto_type, Model, asgi_connection, b'{"children": []}') assert model.children == set() def test_parse_type_from_element_failure() -> None: with pytest.raises(ImproperConfigurationError) as exc: parse_type_from_element(1, None) # type: ignore assert str(exc.value) == "Unable to parse type from element '1'. Consider adding a type hint." async def test_to_mapped_model_with_dynamic_mapped( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Test building DTO with DynamicMapped relationship, and parsing data.""" module = create_module( """ from __future__ import annotations from sqlalchemy import ForeignKey from sqlalchemy.orm import DeclarativeBase, DynamicMapped, Mapped, mapped_column, relationship, WriteOnlyMapped from typing import List from typing_extensions import Annotated from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class Child(Base): __tablename__ = "child" test_model_id: Mapped[int] = mapped_column(ForeignKey("test_model.id")) class TestModel(Base): __tablename__ = "test_model" children: DynamicMapped[List[Child]] = relationship(Child, lazy="joined") dto_type = SQLAlchemyDTO[Annotated[TestModel, SQLAlchemyDTOConfig()]] """, ) model = await get_model_from_dto( module.dto_type, module.TestModel, asgi_connection, b'{"id": 2, "children": [{"id": 1, "test_model_id": 2}]}', ) assert isinstance(model, module.TestModel) # For DynamicMapped, we should check the query result child = model.children[0] # Access first item from the dynamic query assert isinstance(child, module.Child) async def test_to_mapped_model_with_writeonly_mapped( create_module: Callable[[str], ModuleType], asgi_connection: Request[Any, Any, Any], ) -> None: """Test building DTO with WriteOnlyMapped relationship, and parsing data.""" module = create_module( """ from __future__ import annotations from sqlalchemy import ForeignKey from sqlalchemy.orm import DeclarativeBase, Mapped, WriteOnlyMapped, mapped_column, relationship from typing import List from typing_extensions import Annotated from litestar.dto.field import Mark, dto_field from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class Child(Base): __tablename__ = "child" test_model_id: Mapped[int] = mapped_column(ForeignKey("test_model.id")) class TestModel(Base): __tablename__ = "test_model" children: WriteOnlyMapped[List[Child]] = relationship(Child, info=dto_field(mark=Mark.WRITE_ONLY)) dto_type = SQLAlchemyDTO[Annotated[TestModel, SQLAlchemyDTOConfig()]] """, ) model = await get_model_from_dto( module.dto_type, module.TestModel, asgi_connection, b'{"id": 2, "children": [{"id": 1, "test_model_id": 2}]}', ) assert isinstance(model, module.TestModel) # WriteOnlyMapped relationships can only be written to, not read from # So we can only verify the model was created successfully python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_dto_integration.py000066400000000000000000000675031476663714600333230ustar00rootroot00000000000000from __future__ import annotations from dataclasses import dataclass from types import ModuleType from typing import Annotated, Any, Callable, Optional from uuid import UUID import pytest from litestar import get, post from litestar.di import Provide from litestar.dto import DTOField, Mark from litestar.dto._backend import _camelize # type: ignore from litestar.dto.field import DTO_FIELD_META_KEY from litestar.dto.types import RenameStrategy from litestar.testing import create_test_client # type: ignore from sqlalchemy import Column, ForeignKey, Integer, String, Table, func, select from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import ( DeclarativeBase, Mapped, column_property, composite, declared_attr, mapped_column, relationship, ) from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig class Base(DeclarativeBase): id: Mapped[str] = mapped_column(primary_key=True, default=UUID) # noinspection PyMethodParameters @declared_attr.directive def __tablename__(cls) -> str: """Infer table name from class name.""" return cls.__name__.lower() class Tag(Base): name: Mapped[str] = mapped_column(default="best seller") class TaggableMixin: @classmethod @declared_attr.directive def tag_association_table(cls) -> Table: return Table( f"{cls.__tablename__}_tag_association", # type: ignore cls.metadata, # type: ignore Column("base_id", ForeignKey(f"{cls.__tablename__}.id", ondelete="CASCADE"), primary_key=True), # type: ignore Column("tag_id", ForeignKey("tag.id", ondelete="CASCADE"), primary_key=True), # type: ignore ) @declared_attr def assigned_tags(cls) -> Mapped[list[Tag]]: return relationship( "Tag", secondary=lambda: cls.tag_association_table, lazy="immediate", cascade="all, delete", passive_deletes=True, ) @declared_attr def tags(cls) -> AssociationProxy[list[str]]: return association_proxy( "assigned_tags", "name", creator=lambda name: Tag(name=name), # pyright: ignore[reportUnknownArgumentType,reportUnknownLambdaType] info={"__dto__": DTOField()}, ) class Author(Base): name: Mapped[str] = mapped_column(default="Arthur") date_of_birth: Mapped[str] = mapped_column(nullable=True) class BookReview(Base): review: Mapped[str] book_id: Mapped[str] = mapped_column(ForeignKey("book.id"), default="000") class Book(Base): title: Mapped[str] = mapped_column(String(length=250), default="Hi") author_id: Mapped[str] = mapped_column(ForeignKey("author.id"), default="123") first_author: Mapped[Author] = relationship(lazy="joined", innerjoin=True) reviews: Mapped[list[BookReview]] = relationship(lazy="joined", innerjoin=True) bar: Mapped[str] = mapped_column(default="Hello") SPAM: Mapped[str] = mapped_column(default="Bye") spam_bar: Mapped[str] = mapped_column(default="Goodbye") number_of_reviews: Mapped[Optional[int]] = column_property( # noqa: UP007 select(func.count(BookReview.id)).where(BookReview.book_id == id).scalar_subquery(), # type: ignore ) def _rename_field(name: str, strategy: RenameStrategy) -> str: if callable(strategy): return strategy(name) if strategy == "camel": return _camelize(value=name, capitalize_first_letter=False) if strategy == "pascal": return _camelize(value=name, capitalize_first_letter=True) return name.lower() if strategy == "lower" else name.upper() @dataclass class BookAuthorTestData: book_id: str = "000" book_title: str = "TDD Python" book_author_id: str = "123" book_author_name: str = "Harry Percival" book_author_date_of_birth: str = "01/01/1900" book_bar: str = "Hi" book_SPAM: str = "Bye" book_spam_bar: str = "GoodBye" book_review_id: str = "23432" book_review: str = "Excellent!" number_of_reviews: int | None = None @pytest.fixture def book_json_data() -> Callable[[RenameStrategy, BookAuthorTestData], tuple[dict[str, Any], Book]]: def _generate(rename_strategy: RenameStrategy, test_data: BookAuthorTestData) -> tuple[dict[str, Any], Book]: data: dict[str, Any] = { _rename_field(name="id", strategy=rename_strategy): test_data.book_id, _rename_field(name="title", strategy=rename_strategy): test_data.book_title, _rename_field(name="author_id", strategy=rename_strategy): test_data.book_author_id, _rename_field(name="bar", strategy=rename_strategy): test_data.book_bar, _rename_field(name="SPAM", strategy=rename_strategy): test_data.book_SPAM, _rename_field(name="spam_bar", strategy=rename_strategy): test_data.book_spam_bar, _rename_field(name="first_author", strategy=rename_strategy): { _rename_field(name="id", strategy=rename_strategy): test_data.book_author_id, _rename_field(name="name", strategy=rename_strategy): test_data.book_author_name, _rename_field(name="date_of_birth", strategy=rename_strategy): test_data.book_author_date_of_birth, }, _rename_field(name="reviews", strategy=rename_strategy): [ { _rename_field(name="book_id", strategy=rename_strategy): test_data.book_id, _rename_field(name="id", strategy=rename_strategy): test_data.book_review_id, _rename_field(name="review", strategy=rename_strategy): test_data.book_review, }, ], _rename_field(name="number_of_reviews", strategy=rename_strategy): test_data.number_of_reviews, } book = Book( id=test_data.book_id, title=test_data.book_title, author_id=test_data.book_author_id, bar=test_data.book_bar, SPAM=test_data.book_SPAM, spam_bar=test_data.book_spam_bar, first_author=Author( id=test_data.book_author_id, name=test_data.book_author_name, date_of_birth=test_data.book_author_date_of_birth, ), reviews=[ BookReview(id=test_data.book_review_id, review=test_data.book_review, book_id=test_data.book_id), ], ) return data, book return _generate @pytest.mark.parametrize( "rename_strategy", ("camel",), ) def test_fields_alias_generator_sqlalchemy( rename_strategy: RenameStrategy, book_json_data: Callable[[RenameStrategy, BookAuthorTestData], tuple[dict[str, Any], Book]], ) -> None: test_data = BookAuthorTestData() json_data, instance = book_json_data(rename_strategy, test_data) config = SQLAlchemyDTOConfig(rename_strategy=rename_strategy) dto = SQLAlchemyDTO[Annotated[Book, config]] @post(dto=dto, signature_namespace={"Book": Book}) def post_handler(data: Book) -> Book: return data @get(dto=dto, signature_namespace={"Book": Book}) def get_handler() -> Book: return instance with create_test_client( route_handlers=[post_handler, get_handler], ) as client: response_callback = client.get("/") assert response_callback.json() == json_data response_callback = client.post("/", json=json_data) assert response_callback.json() == json_data class ConcreteBase(Base): pass func_result_query = select(func.count()).scalar_subquery() model_with_func_query = select(ConcreteBase, func_result_query.label("func_result")).subquery() class ModelWithFunc(Base): __table__ = model_with_func_query func_result: Mapped[Optional[int]] = column_property(model_with_func_query.c.func_result) # noqa: UP007 def test_model_using_func() -> None: instance = ModelWithFunc(id="hi") config = SQLAlchemyDTOConfig() dto = SQLAlchemyDTO[Annotated[ModelWithFunc, config]] @get(dto=dto, signature_namespace={"ModelWithFunc": ModelWithFunc}) async def get_handler() -> ModelWithFunc: return instance with create_test_client( route_handlers=[get_handler], ) as client: response_callback = client.get("/") assert response_callback def test_dto_with_association_proxy(create_module: Callable[[str], ModuleType]) -> None: module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, Final, List, Generator from sqlalchemy import Column from sqlalchemy import ForeignKey from sqlalchemy import Integer from sqlalchemy import String from sqlalchemy import Table from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.orm import relationship from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.ext.associationproxy import AssociationProxy from litestar import get from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO from litestar.dto import dto_field class Base(DeclarativeBase): pass class User(Base): __tablename__ = "user" id: Mapped[int] = mapped_column(primary_key=True) kw: Mapped[List[Keyword]] = relationship(secondary=lambda: user_keyword_table, info=dto_field("private")) # proxy the 'keyword' attribute from the 'kw' relationship keywords: AssociationProxy[List[str]] = association_proxy("kw", "keyword") class Keyword(Base): __tablename__ = "keyword" id: Mapped[int] = mapped_column(primary_key=True) keyword: Mapped[str] = mapped_column(String(64)) user_keyword_table: Final[Table] = Table( "user_keyword", Base.metadata, Column("user_id", Integer, ForeignKey("user.id"), primary_key=True), Column("keyword_id", Integer, ForeignKey("keyword.id"), primary_key=True), ) dto = SQLAlchemyDTO[User] @get("/", return_dto=dto) async def get_handler() -> User: return User(id=1, kw=[Keyword(keyword="bar"), Keyword(keyword="baz")]) """, ) with create_test_client(route_handlers=[module.get_handler]) as client: response = client.get("/") assert response.json() == {"id": 1, "keywords": ["bar", "baz"]} def test_dto_with_hybrid_property(create_module: Callable[[str], ModuleType]) -> None: module = create_module( """ from __future__ import annotations from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from litestar import get from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO class Base(DeclarativeBase): pass class Interval(Base): __tablename__ = 'interval' id: Mapped[int] = mapped_column(primary_key=True) start: Mapped[int] end: Mapped[int] @hybrid_property def length(self) -> int: return self.end - self.start dto = SQLAlchemyDTO[Interval] @get("/", return_dto=dto) async def get_handler() -> Interval: return Interval(id=1, start=1, end=3) """, ) with create_test_client(route_handlers=[module.get_handler]) as client: response = client.get("/") assert response.json() == {"id": 1, "start": 1, "end": 3, "length": 2} def test_dto_with_hybrid_property_expression(create_module: Callable[[str], ModuleType]) -> None: module = create_module( """ from __future__ import annotations from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.sql import SQLColumnExpression from litestar import get from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO class Base(DeclarativeBase): pass class Interval(Base): __tablename__ = 'interval' id: Mapped[int] = mapped_column(primary_key=True) start: Mapped[int] end: Mapped[int] @hybrid_property def length(self) -> int: return self.end - self.start @length.inplace.expression def _length_expression(cls) -> SQLColumnExpression[int]: return cls.end - cls.start dto = SQLAlchemyDTO[Interval] @get("/", return_dto=dto) async def get_handler() -> Interval: return Interval(id=1, start=1, end=3) """, ) with create_test_client(route_handlers=[module.get_handler]) as client: response = client.get("/") assert response.json() == {"id": 1, "start": 1, "end": 3, "length": 2} def test_dto_with_hybrid_property_setter(create_module: Callable[[str], ModuleType]) -> None: module = create_module( """ from __future__ import annotations from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from sqlalchemy.sql import SQLColumnExpression from litestar import post from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO from litestar.dto import dto_field class Base(DeclarativeBase): pass class Circle(Base): __tablename__ = 'circle' id: Mapped[int] = mapped_column(primary_key=True, info=dto_field("read-only")) diameter: Mapped[float] = mapped_column(info=dto_field("private")) @hybrid_property def radius(self) -> float: return self.diameter / 2 @radius.inplace.setter def _radius_setter(self, value: float) -> None: self.diameter = value * 2 dto = SQLAlchemyDTO[Circle] DIAMETER: float = 0 @post("/", dto=dto, sync_to_thread=False) def get_handler(data: Circle) -> Circle: global DIAMETER DIAMETER = data.diameter data.id = 1 return data """, ) with create_test_client(route_handlers=[module.get_handler]) as client: response = client.post("/", json={"radius": 5}) assert response.json() == {"id": 1, "radius": 5} assert module.DIAMETER == 10 @pytest.mark.skip(reason="Debug me!") async def test_dto_with_composite_map() -> None: @dataclass class Point: x: int y: int class Vertex1(Base): start: Mapped[Point] = composite(mapped_column("x1"), mapped_column("y1")) end: Mapped[Point] = composite(mapped_column("x2"), mapped_column("y2")) dto = SQLAlchemyDTO[Vertex1] @post(dto=dto, signature_namespace={"Vertex": Vertex1}) async def post_handler(data: Vertex1) -> Vertex1: return data with create_test_client(route_handlers=[post_handler]) as client: response = client.post( "/", json={ "id": "1", "start": {"x": 10, "y": 20}, "end": {"x": 1, "y": 2}, }, ) assert response.json() == { "id": "1", "start": {"x": 10, "y": 20}, "end": {"x": 1, "y": 2}, } @pytest.mark.skip(reason="Debug me!") async def test_dto_with_composite_map_using_explicit_columns() -> None: @dataclass class Point: x: int y: int class Vertex2(Base): x1: Mapped[int] y1: Mapped[int] x2: Mapped[int] y2: Mapped[int] start: Mapped[Point] = composite("x1", "y1") end: Mapped[Point] = composite("x2", "y2") dto = SQLAlchemyDTO[Vertex2] @post(dto=dto, signature_namespace={"Vertex": Vertex2}) async def post_handler(data: Vertex2) -> Vertex2: return data with create_test_client(route_handlers=[post_handler]) as client: response = client.post( "/", json={ "id": "1", "start": {"x": 10, "y": 20}, "end": {"x": 1, "y": 2}, }, ) assert response.json() == { "id": "1", "start": {"x": 10, "y": 20}, "end": {"x": 1, "y": 2}, } @pytest.mark.skip(reason="Debug me!") async def test_dto_with_composite_map_using_hybrid_imperative_mapping() -> None: @dataclass class Point: x: int y: int table = Table( "vertices2", Base.metadata, Column("id", String, primary_key=True), Column("x1", Integer), Column("y1", Integer), Column("x2", Integer), Column("y2", Integer), ) class Vertex3(Base): __table__ = table id: Mapped[str] start = composite(Point, table.c.x1, table.c.y1) end = composite(Point, table.c.x2, table.c.y2) dto = SQLAlchemyDTO[Vertex3] @post(dto=dto, signature_namespace={"Vertex": Vertex3}) async def post_handler(data: Vertex3) -> Vertex3: return data with create_test_client(route_handlers=[post_handler]) as client: response = client.post( "/", json={ "id": "1", "start": {"x": 10, "y": 20}, "end": {"x": 1, "y": 2}, }, ) assert response.json() == { "id": "1", "start": {"x": 10, "y": 20}, "end": {"x": 1, "y": 2}, } async def test_field_with_sequence_default(create_module: Callable[[str], ModuleType]) -> None: module = create_module( """ from sqlalchemy import create_engine, Column, Integer, Sequence from sqlalchemy.orm import DeclarativeBase, Mapped, sessionmaker from litestar import Litestar, post from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig engine = create_engine('sqlite:///:memory:', echo=True) Session = sessionmaker(bind=engine, expire_on_commit=False) class Base(DeclarativeBase): pass class Model(Base): __tablename__ = "model" id: Mapped[int] = Column(Integer, Sequence('model_id_seq', optional=False), primary_key=True) val: Mapped[str] class ModelCreateDTO(SQLAlchemyDTO[Model]): config = SQLAlchemyDTOConfig(exclude={"id"}) ModelReturnDTO = SQLAlchemyDTO[Model] @post("/", dto=ModelCreateDTO, return_dto=ModelReturnDTO) def post_handler(data: Model) -> Model: Base.metadata.create_all(engine) with Session() as session: session.add(data) session.commit() return data """, ) with create_test_client(route_handlers=[module.post_handler]) as client: response = client.post("/", json={"id": 1, "val": "value"}) assert response.json() == {"id": 1, "val": "value"} async def test_disable_implicitly_mapped_columns_using_annotated_notation() -> None: class Base(DeclarativeBase): id: Mapped[int] = mapped_column(default=int, primary_key=True) table = Table( "vertices2", Base.metadata, Column("id", Integer, primary_key=True), Column("field", String, nullable=True), ) class Model(Base): __table__ = table id: Mapped[int] @hybrid_property def id_multiplied(self) -> int: return self.id * 10 dto_type = SQLAlchemyDTO[Annotated[Model, SQLAlchemyDTOConfig(include_implicit_fields=False)]] @get( dto=None, return_dto=dto_type, signature_namespace={"Model": Model}, dependencies={"model": Provide(lambda: Model(id=123, field="hi"), sync_to_thread=False)}, ) async def post_handler(model: Model) -> Model: return model with create_test_client(route_handlers=[post_handler]) as client: response = client.get( "/", ) json = response.json() assert json.get("field") is None assert json.get("id_multiplied") is None async def test_disable_implicitly_mapped_columns_special() -> None: class Base(DeclarativeBase): id: Mapped[int] = mapped_column(default=int, primary_key=True) table = Table( "vertices2", Base.metadata, Column("id", Integer, primary_key=True), Column("field", String, nullable=True), ) class Model(Base): __table__ = table id: Mapped[int] class dto_type(SQLAlchemyDTO[Model]): config = SQLAlchemyDTOConfig(include_implicit_fields=False) @get( dto=None, return_dto=dto_type, signature_namespace={"Model": Model}, dependencies={"model": Provide(lambda: Model(id=123, field="hi"), sync_to_thread=False)}, ) async def post_handler(model: Model) -> Model: return model with create_test_client(route_handlers=[post_handler]) as client: response = client.get( "/", ) json = response.json() assert json.get("field") is None async def test_disable_implicitly_mapped_columns_with_hybrid_properties_and_Mark_overrides() -> None: class Base(DeclarativeBase): id: Mapped[int] = mapped_column(default=int, primary_key=True) table = Table( "vertices2", Base.metadata, Column("id", Integer, primary_key=True), Column("field", String, nullable=True), Column("field2", String), Column("field3", String), Column("field4", String), ) class Model(Base): __table__ = table id: Mapped[int] field2 = column_property(table.c.field2, info={DTO_FIELD_META_KEY: DTOField(mark=Mark.READ_ONLY)}) # type: ignore field3 = column_property(table.c.field3, info={DTO_FIELD_META_KEY: DTOField(mark=Mark.WRITE_ONLY)}) # type: ignore field4 = column_property(table.c.field4, info={DTO_FIELD_META_KEY: DTOField(mark=Mark.PRIVATE)}) # type: ignore @hybrid_property def id_multiplied(self) -> int: return self.id * 10 dto_type = SQLAlchemyDTO[ Annotated[ Model, SQLAlchemyDTOConfig(include_implicit_fields="hybrid-only"), ] ] @get( dto=None, return_dto=dto_type, signature_namespace={"Model": Model}, dependencies={ "model": Provide( lambda: Model(id=12, field="hi", field2="bye2", field3="bye3", field4="bye4"), sync_to_thread=False, ), }, ) async def post_handler(model: Model) -> Model: return model with create_test_client(route_handlers=[post_handler]) as client: response = client.get( "/", ) json = response.json() assert json.get("id_multiplied") == 120 assert json.get("field") is None assert json.get("field2") is not None assert json.get("field3") is not None assert json.get("field4") is None def test_dto_to_sync_service(create_module: Callable[[str], ModuleType]) -> None: module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, Generator from litestar import post from litestar.di import Provide from litestar.dto import DTOData from sqlalchemy import create_engine from sqlalchemy.orm import Mapped, sessionmaker from advanced_alchemy.extensions.litestar import SQLAlchemyDTO, SQLAlchemyDTOConfig, base, repository, service engine = create_engine("sqlite:///:memory:", echo=True, connect_args={"check_same_thread": False}) Session = sessionmaker(bind=engine, expire_on_commit=False) class Model(base.BigIntBase): val: Mapped[str] class ModelCreateDTO(SQLAlchemyDTO[Model]): config = SQLAlchemyDTOConfig(exclude={"id"}) ModelReturnDTO = SQLAlchemyDTO[Model] class ModelRepository(repository.SQLAlchemySyncRepository[Model]): model_type=Model class ModelService(service.SQLAlchemySyncRepositoryService[Model]): repository_type = ModelRepository def provide_service( ) -> Generator[ModelService, None, None]: Model.metadata.create_all(engine) with Session() as db_session, ModelService.new(session=db_session) as service: yield service Model.metadata.drop_all(engine) @post("/", dependencies={"service": Provide(provide_service)}, dto=ModelCreateDTO, return_dto=ModelReturnDTO) def post_handler(data: DTOData[Model], service: ModelService) -> Model: return service.create(data, auto_commit=True) """, ) with create_test_client(route_handlers=[module.post_handler]) as client: response = client.post("/", json={"id": 1, "val": "value"}) assert response.json() == {"id": 1, "val": "value"} async def test_dto_to_async_service(create_module: Callable[[str], ModuleType]) -> None: module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, AsyncGenerator from litestar import post from litestar.di import Provide from litestar.dto import DTOData # noqa: TCH002 from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine from sqlalchemy.orm import Mapped # noqa: TCH002 from advanced_alchemy.extensions.litestar import SQLAlchemyDTO, SQLAlchemyDTOConfig, base, repository, service engine = create_async_engine("sqlite+aiosqlite:///:memory:", echo=True, connect_args={"check_same_thread": False}) Session = async_sessionmaker(bind=engine, expire_on_commit=False) class AModel(base.BigIntBase): val: Mapped[str] class ModelCreateDTO(SQLAlchemyDTO[AModel]): config = SQLAlchemyDTOConfig(exclude={"id"}) ModelReturnDTO = SQLAlchemyDTO[AModel] class ModelRepository(repository.SQLAlchemyAsyncRepository[AModel]): model_type=AModel class ModelService(service.SQLAlchemyAsyncRepositoryService[AModel]): repository_type = ModelRepository async def provide_service( ) -> AsyncGenerator[ModelService, None]: async with engine.begin() as conn: await conn.run_sync(AModel.metadata.create_all) async with Session() as db_session, ModelService.new(session=db_session) as service: yield service async with engine.begin() as conn: await conn.run_sync(AModel.metadata.create_all) @post("/", dependencies={"service": Provide(provide_service)}, dto=ModelCreateDTO, return_dto=ModelReturnDTO) async def post_handler(data: DTOData[AModel], service: ModelService) -> AModel: return await service.create(data, auto_commit=True) """, ) with create_test_client(route_handlers=[module.post_handler]) as client: response = client.post("/", json={"id": 1, "val": "value"}) assert response.json() == {"id": 1, "val": "value"} def test_dto_with_declared_attr(create_module: Callable[[str], ModuleType]) -> None: module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, Union from litestar import post from litestar.di import Provide from litestar.dto import DTOData, DTOField, Mark from litestar.dto.field import DTO_FIELD_META_KEY from sqlalchemy import create_engine from sqlalchemy.orm import DeclarativeBase, Mapped, column_property, declared_attr, mapped_column, sessionmaker from advanced_alchemy.extensions.litestar import SQLAlchemyDTO, SQLAlchemyDTOConfig, base, repository, service engine = create_engine("sqlite:///:memory:", echo=True, connect_args={"check_same_thread": False}) Session = sessionmaker(bind=engine, expire_on_commit=False) class Model(base.BigIntBase): __tablename__ = "a" a: Mapped[int] = mapped_column() @declared_attr def a_doubled(cls) -> Mapped[int]: return column_property(cls.a * 2, info={DTO_FIELD_META_KEY: DTOField(mark=Mark.READ_ONLY)}) class ModelCreateDTO(SQLAlchemyDTO[Model]): config = SQLAlchemyDTOConfig(exclude={"id"}) ModelReturnDTO = SQLAlchemyDTO[Model] class ModelRepository(repository.SQLAlchemySyncRepository[Model]): model_type=Model class ModelService(service.SQLAlchemySyncRepositoryService[Model]): repository_type = ModelRepository def provide_service( ) -> Generator[ModelService, None, None]: Model.metadata.create_all(engine) with Session() as db_session, ModelService.new(session=db_session) as service: yield service Model.metadata.drop_all(engine) @post("/", dependencies={"service": Provide(provide_service)}, dto=ModelCreateDTO, return_dto=ModelReturnDTO) def post_handler(data: DTOData[Model], service: ModelService) -> Model: return service.create(data, auto_commit=True) """, ) with create_test_client(route_handlers=[module.post_handler]) as client: response = client.post("/", json={"id": 1, "a": 21}) assert response.json() == {"id": 1, "a": 21, "a_doubled": 42} python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_init_plugin/000077500000000000000000000000001476663714600320665ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_init_plugin/__init__.py000066400000000000000000000000001476663714600341650ustar00rootroot00000000000000test_asyncio.py000066400000000000000000000424431476663714600350740ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_init_pluginfrom __future__ import annotations import random from typing import TYPE_CHECKING from unittest.mock import MagicMock, patch import pytest from asgi_lifespan import LifespanManager from litestar import Litestar, Request, Response, get from litestar.status_codes import ( HTTP_404_NOT_FOUND, HTTP_409_CONFLICT, HTTP_500_INTERNAL_SERVER_ERROR, ) from litestar.testing import RequestFactory, create_test_client from litestar.types.asgi_types import HTTPResponseStartEvent from pytest import MonkeyPatch from sqlalchemy.ext.asyncio import AsyncSession from advanced_alchemy.exceptions import ( DuplicateKeyError, ForeignKeyError, ImproperConfigurationError, IntegrityError, InvalidRequestError, NotFoundError, RepositoryError, ) from advanced_alchemy.extensions.litestar._utils import set_aa_scope_state from advanced_alchemy.extensions.litestar.exception_handler import exception_to_http_response from advanced_alchemy.extensions.litestar.plugins import SQLAlchemyAsyncConfig, SQLAlchemyInitPlugin from advanced_alchemy.extensions.litestar.plugins.init.config.asyncio import ( autocommit_before_send_handler, autocommit_handler_maker, ) if TYPE_CHECKING: from typing import Any, Callable from litestar.types import Scope def test_default_before_send_handler() -> None: """Test default_before_send_handler.""" captured_scope_state: dict[str, Any] | None = None config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://") plugin = SQLAlchemyInitPlugin(config=config) @get() async def test_handler(db_session: AsyncSession, scope: Scope) -> None: nonlocal captured_scope_state captured_scope_state = scope["state"] assert db_session is captured_scope_state[config.session_dependency_key] with create_test_client(route_handlers=[test_handler], plugins=[plugin]) as client: client.get("/") assert captured_scope_state is not None assert config.session_dependency_key not in captured_scope_state # pyright: ignore def test_default_before_send_handle_multi() -> None: """Test default_before_send_handler.""" captured_scope_state: dict[str, Any] | None = None config1 = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://") config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", engine_dependency_key="other_engine", ) plugin = SQLAlchemyInitPlugin(config=[config1, config2]) @get() async def test_handler(db_session: AsyncSession, scope: Scope) -> None: nonlocal captured_scope_state captured_scope_state = scope["state"] assert db_session is captured_scope_state[config1.session_dependency_key] with create_test_client(route_handlers=[test_handler], plugins=[plugin]) as client: client.get("/") assert captured_scope_state is not None assert config1.session_dependency_key not in captured_scope_state async def test_create_all_default(monkeypatch: MonkeyPatch) -> None: """Test default_before_send_handler.""" config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://") plugin = SQLAlchemyInitPlugin(config=config) app = Litestar(route_handlers=[], plugins=[plugin]) with patch.object( config, "create_all_metadata", ) as create_all_metadata_mock: async with LifespanManager(app): # type: ignore[arg-type] # pyright: ignore[reportArgumentType] create_all_metadata_mock.assert_not_called() async def test_create_all(monkeypatch: MonkeyPatch) -> None: """Test default_before_send_handler.""" config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://", create_all=True) plugin = SQLAlchemyInitPlugin(config=config) app = Litestar(route_handlers=[], plugins=[plugin]) with patch.object( config, "create_all_metadata", ) as create_all_metadata_mock: async with LifespanManager(app): # type: ignore[arg-type] # pyright: ignore[reportArgumentType] create_all_metadata_mock.assert_called_once() async def test_before_send_handler_success_response(create_scope: Callable[..., Scope]) -> None: """Test that the session is committed given a success response.""" config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler=autocommit_before_send_handler, ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(200, 299), "headers": {}, } await autocommit_before_send_handler(http_response_start, http_scope) mock_session.commit.assert_awaited_once() async def test_before_send_handler_success_response_autocommit(create_scope: Callable[..., Scope]) -> None: """Test that the session is committed given a success response.""" config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler="autocommit", ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(200, 299), "headers": {}, } await autocommit_before_send_handler(http_response_start, http_scope) mock_session.commit.assert_awaited_once() async def test_before_send_handler_error_response(create_scope: Callable[..., Scope]) -> None: """Test that the session is rolled back given an error response.""" config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler=autocommit_before_send_handler, ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(300, 599), "headers": {}, } await autocommit_before_send_handler(http_response_start, http_scope) mock_session.rollback.assert_awaited_once() async def test_autocommit_handler_maker_redirect_response(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker commits on redirect""" autocommit_redirect_handler = autocommit_handler_maker(commit_on_redirect=True) config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler=autocommit_redirect_handler, ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(300, 399), "headers": {}, } await autocommit_redirect_handler(http_response_start, http_scope) mock_session.commit.assert_awaited_once() async def test_autocommit_handler_maker_commit_statuses(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker commits on explicit statuses""" custom_autocommit_handler = autocommit_handler_maker(extra_commit_statuses={302, 303}) config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler=custom_autocommit_handler, ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(302, 303), "headers": {}, } await custom_autocommit_handler(http_response_start, http_scope) mock_session.commit.assert_awaited_once() async def test_autocommit_handler_maker_rollback_statuses(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" custom_autocommit_handler = autocommit_handler_maker(commit_on_redirect=True, extra_rollback_statuses={307, 308}) config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler=custom_autocommit_handler, ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } await custom_autocommit_handler(http_response_start, http_scope) mock_session.rollback.assert_awaited_once() async def test_autocommit_handler_maker_rollback_statuses_multi(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" custom_autocommit_handler = autocommit_handler_maker( session_scope_key="_sqlalchemy_state_2", commit_on_redirect=True, extra_rollback_statuses={307, 308}, ) config1 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", ) config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler=custom_autocommit_handler, session_dependency_key="other_session", engine_dependency_key="other_engine", session_scope_key="_sqlalchemy_state_2", ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config=[config1, config2])]) mock_session1 = MagicMock(spec=AsyncSession) mock_session2 = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config1.session_scope_key, mock_session1) set_aa_scope_state(http_scope, config2.session_scope_key, mock_session2) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } await custom_autocommit_handler(http_response_start, http_scope) mock_session2.rollback.assert_called_once() mock_session1.rollback.assert_not_called() async def test_autocommit_handler_maker_rollback_statuses_multi_bad_config(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" with pytest.raises(ImproperConfigurationError): custom_autocommit_handler = autocommit_handler_maker( session_scope_key="_sqlalchemy_state_2", commit_on_redirect=True, extra_rollback_statuses={307, 308}, ) config1 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", ) config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler=custom_autocommit_handler, session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config=[config1, config2])]) mock_session1 = MagicMock(spec=AsyncSession) mock_session2 = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config1.session_scope_key, mock_session1) set_aa_scope_state(http_scope, config2.session_scope_key, mock_session2) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } await custom_autocommit_handler(http_response_start, http_scope) mock_session2.rollback.assert_called_once() mock_session1.rollback.assert_not_called() async def test_autocommit_handler_maker_multi(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" config1 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler="autocommit", ) config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler="autocommit", session_dependency_key="other_session", engine_dependency_key="other_engine", ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config=[config1, config2])]) mock_session1 = MagicMock(spec=AsyncSession) mock_session2 = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config1.session_scope_key, mock_session1) set_aa_scope_state(http_scope, config2.session_scope_key, mock_session2) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } await config2.before_send_handler(http_response_start, http_scope) # type: ignore mock_session2.rollback.assert_called_once() mock_session1.rollback.assert_not_called() @pytest.mark.parametrize( ("exc", "status"), [ (IntegrityError, HTTP_409_CONFLICT), (ForeignKeyError, HTTP_409_CONFLICT), (DuplicateKeyError, HTTP_409_CONFLICT), (InvalidRequestError, HTTP_500_INTERNAL_SERVER_ERROR), (NotFoundError, HTTP_404_NOT_FOUND), ], ) def test_repository_exception_to_http_response(exc: type[RepositoryError], status: int) -> None: """Test default exception handler.""" config1 = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://") config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", engine_dependency_key="other_engine", ) plugin = SQLAlchemyInitPlugin(config=[config1, config2]) app = Litestar(route_handlers=[], plugins=[plugin]) request = RequestFactory(app=app, server="testserver").get("/wherever") response = exception_to_http_response(request, exc()) assert app.exception_handlers.get(exc) is None assert app.exception_handlers.get(RepositoryError) is not None assert response.status_code == status @pytest.mark.parametrize( ("exc", "status"), [ (IntegrityError, HTTP_409_CONFLICT), (ForeignKeyError, HTTP_409_CONFLICT), (DuplicateKeyError, HTTP_409_CONFLICT), (InvalidRequestError, HTTP_500_INTERNAL_SERVER_ERROR), (NotFoundError, HTTP_404_NOT_FOUND), ], ) def test_existing_repository_exception_to_http_response(exc: type[RepositoryError], status: int) -> None: """Test default exception handler.""" def handler(request: Request[Any, Any, Any], exc: RepositoryError) -> Response[Any]: return Response(status_code=200, content="OK") config1 = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://") config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", engine_dependency_key="other_engine", ) plugin = SQLAlchemyInitPlugin(config=[config1, config2]) app = Litestar(route_handlers=[], plugins=[plugin], exception_handlers={RepositoryError: handler}) request = RequestFactory(app=app, server="testserver").get("/wherever") response = handler(request, exc()) assert app.exception_handlers.get(exc) is None assert app.exception_handlers.get(RepositoryError) is not None assert app.exception_handlers.get(RepositoryError) == handler assert response.status_code == 200 @pytest.mark.parametrize( ("exc", "status"), [ (IntegrityError, HTTP_409_CONFLICT), (ForeignKeyError, HTTP_409_CONFLICT), (DuplicateKeyError, HTTP_409_CONFLICT), (InvalidRequestError, HTTP_500_INTERNAL_SERVER_ERROR), (NotFoundError, HTTP_404_NOT_FOUND), ], ) def test_repository_disabled_exception_to_http_response(exc: type[RepositoryError], status: int) -> None: """Test default exception handler.""" config1 = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://", set_default_exception_handler=False) config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", engine_dependency_key="other_engine", set_default_exception_handler=False, ) plugin = SQLAlchemyInitPlugin(config=[config1, config2]) app = Litestar(route_handlers=[], plugins=[plugin]) assert app.exception_handlers.get(exc) is None assert app.exception_handlers.get(RepositoryError) is None test_common.py000066400000000000000000000135131476663714600347130ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_init_pluginfrom __future__ import annotations import datetime import uuid from typing import TYPE_CHECKING from unittest.mock import MagicMock, patch import pytest from litestar.datastructures import State from sqlalchemy import create_engine from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.extensions.litestar._utils import _SCOPE_NAMESPACE # pyright: ignore[reportPrivateUsage] from advanced_alchemy.extensions.litestar.plugins import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig from advanced_alchemy.extensions.litestar.plugins.init.config.common import SESSION_SCOPE_KEY if TYPE_CHECKING: from typing import Any from litestar.types import Scope from pytest import MonkeyPatch @pytest.fixture(name="config_cls", params=[SQLAlchemySyncConfig, SQLAlchemyAsyncConfig]) def _config_cls(request: Any) -> type[SQLAlchemySyncConfig | SQLAlchemyAsyncConfig]: """Return SQLAlchemy config class.""" return request.param # type:ignore[no-any-return] def test_raise_improperly_configured_exception(config_cls: type[SQLAlchemySyncConfig]) -> None: """Test raise ImproperlyConfiguredException if both engine and connection string are provided.""" with pytest.raises(ImproperConfigurationError): config_cls(connection_string="sqlite://", engine_instance=create_engine("sqlite://")) def test_engine_config_dict_with_no_provided_config( config_cls: type[SQLAlchemySyncConfig], ) -> None: """Test engine_config_dict with no provided config.""" config = config_cls() assert config.engine_config_dict.keys() == {"json_deserializer", "json_serializer"} def test_session_config_dict_with_no_provided_config( config_cls: type[SQLAlchemySyncConfig], ) -> None: """Test session_config_dict with no provided config.""" config = config_cls() assert config.session_config_dict == {} def test_config_create_engine_if_engine_instance_provided( config_cls: type[SQLAlchemySyncConfig], ) -> None: """Test create_engine if engine instance provided.""" engine = create_engine("sqlite://") config = config_cls(engine_instance=engine) assert config.get_engine() == engine def test_create_engine_if_no_engine_instance_or_connection_string_provided( config_cls: type[SQLAlchemySyncConfig], ) -> None: """Test create_engine if no engine instance or connection string provided.""" config = config_cls() with pytest.raises(ImproperConfigurationError): config.get_engine() def test_call_create_engine_callable_type_error_handling( config_cls: type[SQLAlchemySyncConfig], monkeypatch: MonkeyPatch, ) -> None: """If the dialect doesn't support JSON types, we get a ValueError. This should be handled by removing the JSON serializer/deserializer kwargs. """ call_count = 0 def side_effect(*args: Any, **kwargs: Any) -> None: nonlocal call_count call_count += 1 if call_count == 1: raise TypeError() config = config_cls(connection_string="sqlite://") create_engine_callable_mock = MagicMock(side_effect=side_effect) monkeypatch.setattr(config, "create_engine_callable", create_engine_callable_mock) config.get_engine() assert create_engine_callable_mock.call_count == 2 first_call, second_call = create_engine_callable_mock.mock_calls assert first_call.kwargs.keys() == {"json_deserializer", "json_serializer"} assert second_call.kwargs.keys() == set() def test_create_session_maker_if_session_maker_provided( config_cls: type[SQLAlchemySyncConfig], ) -> None: """Test create_session_maker if session maker provided to config.""" session_maker = MagicMock() config = config_cls(session_maker=session_maker) assert config.create_session_maker() == session_maker def test_create_session_maker_if_no_session_maker_or_bind_provided( config_cls: type[SQLAlchemySyncConfig], monkeypatch: MonkeyPatch, ) -> None: """Test create_session_maker if no session maker or bind provided to config.""" config = config_cls() create_engine_mock = MagicMock(return_value=create_engine("sqlite://")) monkeypatch.setattr(config, "get_engine", create_engine_mock) assert config.session_maker is None assert isinstance(config.create_session_maker(), config.session_maker_class) create_engine_mock.assert_called_once() def test_create_session_instance_if_session_not_in_scope_state( config_cls: type[SQLAlchemySyncConfig], ) -> None: """Test provide_session if session not in scope state.""" with patch( "advanced_alchemy.extensions.litestar._utils.get_aa_scope_state", ) as get_scope_state_mock: get_scope_state_mock.return_value = None config = config_cls() state = State() state[config.session_maker_app_state_key] = MagicMock() scope: Scope = {} # type:ignore[assignment] assert isinstance(config.provide_session(state, scope), MagicMock) assert SESSION_SCOPE_KEY in scope[_SCOPE_NAMESPACE] # type: ignore[literal-required] def test_app_state(config_cls: type[SQLAlchemySyncConfig], monkeypatch: MonkeyPatch) -> None: """Test app_state.""" config = config_cls(connection_string="sqlite://") with ( patch.object(config, "create_session_maker") as create_session_maker_mock, patch.object(config, "get_engine") as create_engine_mock, ): assert config.create_app_state_items().keys() == { config.engine_app_state_key, config.session_maker_app_state_key, } create_session_maker_mock.assert_called_once() create_engine_mock.assert_called_once() def test_namespace_resolution() -> None: # https://github.com/litestar-org/advanced-alchemy/issues/256 from litestar import Litestar, get @get("/") async def handler(param: datetime.datetime, other_param: uuid.UUID) -> None: return None Litestar([handler]) test_engine.py000066400000000000000000000004171476663714600346670ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_init_pluginfrom __future__ import annotations from advanced_alchemy.extensions.litestar.plugins.init.config.engine import serializer def test_serializer_returns_string() -> None: """Test that serializer returns a string.""" assert isinstance(serializer({"a": "b"}), str) python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_init_plugin/test_sync.py000066400000000000000000000457421476663714600344670ustar00rootroot00000000000000from __future__ import annotations import random from typing import TYPE_CHECKING from unittest.mock import MagicMock, patch import pytest from asgi_lifespan import LifespanManager from litestar import Litestar, Request, Response, get from litestar.status_codes import ( HTTP_404_NOT_FOUND, HTTP_409_CONFLICT, HTTP_500_INTERNAL_SERVER_ERROR, ) from litestar.testing import ( RequestFactory, create_test_client, # type: ignore ) from litestar.types.asgi_types import HTTPResponseStartEvent from pytest import MonkeyPatch from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session from advanced_alchemy.exceptions import ( DuplicateKeyError, ForeignKeyError, ImproperConfigurationError, IntegrityError, InvalidRequestError, NotFoundError, RepositoryError, ) from advanced_alchemy.extensions.litestar._utils import set_aa_scope_state from advanced_alchemy.extensions.litestar.exception_handler import exception_to_http_response from advanced_alchemy.extensions.litestar.plugins import ( SQLAlchemyAsyncConfig, SQLAlchemyInitPlugin, SQLAlchemySyncConfig, ) from advanced_alchemy.extensions.litestar.plugins.init.config.sync import ( autocommit_before_send_handler, autocommit_handler_maker, ) if TYPE_CHECKING: from typing import Any, Callable from litestar.types import Scope def test_default_before_send_handler() -> None: """Test default_before_send_handler.""" captured_scope_state: dict[str, Any] | None = None config = SQLAlchemySyncConfig(connection_string="sqlite://") plugin = SQLAlchemyInitPlugin(config=config) @get() def test_handler(db_session: Session, scope: Scope) -> None: nonlocal captured_scope_state captured_scope_state = scope["state"] assert db_session is captured_scope_state[config.session_dependency_key] with create_test_client(route_handlers=[test_handler], plugins=[plugin]) as client: client.get("/") assert captured_scope_state is not None assert config.session_dependency_key not in captured_scope_state def test_default_before_send_handle_multi() -> None: """Test default_before_send_handler.""" captured_scope_state: dict[str, Any] | None = None config1 = SQLAlchemySyncConfig(connection_string="sqlite://") config2 = SQLAlchemySyncConfig( connection_string="sqlite://", session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", engine_dependency_key="other_engine", ) plugin = SQLAlchemyInitPlugin(config=[config1, config2]) @get() def test_handler(db_session: Session, scope: Scope) -> None: nonlocal captured_scope_state captured_scope_state = scope["state"] assert db_session is captured_scope_state[config1.session_dependency_key] with create_test_client(route_handlers=[test_handler], plugins=[plugin]) as client: client.get("/") assert captured_scope_state is not None assert config1.session_dependency_key not in captured_scope_state async def test_create_all_default(monkeypatch: MonkeyPatch) -> None: """Test default_before_send_handler.""" config = SQLAlchemySyncConfig(connection_string="sqlite+aiosqlite://") plugin = SQLAlchemyInitPlugin(config=config) app = Litestar(route_handlers=[], plugins=[plugin]) with patch.object( config, "create_all_metadata", ) as create_all_metadata_mock: async with LifespanManager(app) as _client: # type: ignore[arg-type] # pyright: ignore[reportArgumentType] create_all_metadata_mock.assert_not_called() async def test_create_all(monkeypatch: MonkeyPatch) -> None: """Test default_before_send_handler.""" config = SQLAlchemySyncConfig(connection_string="sqlite+aiosqlite://", create_all=True) plugin = SQLAlchemyInitPlugin(config=config) app = Litestar(route_handlers=[], plugins=[plugin]) with patch.object( config, "create_all_metadata", ) as create_all_metadata_mock: async with LifespanManager(app) as _client: # type: ignore[arg-type] # pyright: ignore[reportArgumentType] create_all_metadata_mock.assert_called_once() def test_before_send_handler_success_response(create_scope: Callable[..., Scope]) -> None: """Test that the session is committed given a success response.""" config = SQLAlchemySyncConfig(connection_string="sqlite://", before_send_handler=autocommit_before_send_handler) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(200, 299), "headers": {}, } autocommit_before_send_handler(http_response_start, http_scope) mock_session.commit.assert_called_once() def test_before_send_handler_success_response_autocommit(create_scope: Callable[..., Scope]) -> None: """Test that the session is committed given a success response.""" config = SQLAlchemySyncConfig(connection_string="sqlite://", before_send_handler="autocommit") app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(200, 299), "headers": {}, } autocommit_before_send_handler(http_response_start, http_scope) mock_session.commit.assert_called_once() def test_before_send_handler_error_response(create_scope: Callable[..., Scope]) -> None: """Test that the session is committed given a success response.""" config = SQLAlchemySyncConfig(connection_string="sqlite://", before_send_handler=autocommit_before_send_handler) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(300, 599), "headers": {}, } autocommit_before_send_handler(http_response_start, http_scope) mock_session.rollback.assert_called_once() def test_autocommit_handler_maker_redirect_response(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker commits on redirect""" autocommit_redirect_handler = autocommit_handler_maker(commit_on_redirect=True) config = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler=autocommit_redirect_handler, ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(300, 399), "headers": {}, } autocommit_redirect_handler(http_response_start, http_scope) mock_session.commit.assert_called_once() def test_autocommit_handler_maker_commit_statuses(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker commits on explicit statuses""" custom_autocommit_handler = autocommit_handler_maker(extra_commit_statuses={302, 303}) config = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler=custom_autocommit_handler, ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(302, 303), "headers": {}, } custom_autocommit_handler(http_response_start, http_scope) mock_session.commit.assert_called_once() def test_autocommit_handler_maker_rollback_statuses(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" custom_autocommit_handler = autocommit_handler_maker(commit_on_redirect=True, extra_rollback_statuses={307, 308}) config = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler=custom_autocommit_handler, ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config)]) mock_session = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config.session_scope_key, mock_session) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } custom_autocommit_handler(http_response_start, http_scope) mock_session.rollback.assert_called_once() def test_autocommit_handler_maker_rollback_statuses_multi(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" custom_autocommit_handler = autocommit_handler_maker( session_scope_key="_sqlalchemy_state_2", commit_on_redirect=True, extra_rollback_statuses={307, 308}, ) config1 = SQLAlchemySyncConfig( connection_string="sqlite://", ) config2 = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler=custom_autocommit_handler, session_dependency_key="other_session", engine_dependency_key="other_engine", session_scope_key="_sqlalchemy_state_2", ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config=[config1, config2])]) mock_session1 = MagicMock(spec=Session) mock_session2 = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config1.session_scope_key, mock_session1) set_aa_scope_state(http_scope, config2.session_scope_key, mock_session2) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } custom_autocommit_handler(http_response_start, http_scope) mock_session2.rollback.assert_called_once() mock_session1.rollback.assert_not_called() def test_autocommit_handler_maker_rollback_statuses_multi_bad_config(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" with pytest.raises(ImproperConfigurationError): custom_autocommit_handler = autocommit_handler_maker( session_scope_key="_sqlalchemy_state_2", commit_on_redirect=True, extra_rollback_statuses={307, 308}, ) config1 = SQLAlchemySyncConfig( connection_string="sqlite://", ) config2 = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler=custom_autocommit_handler, session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config=[config1, config2])]) mock_session1 = MagicMock(spec=Session) mock_session2 = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config1.session_scope_key, mock_session1) set_aa_scope_state(http_scope, config2.session_scope_key, mock_session2) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } custom_autocommit_handler(http_response_start, http_scope) mock_session2.rollback.assert_called_once() mock_session1.rollback.assert_not_called() def test_autocommit_handler_maker_multi(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" config1 = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler="autocommit", ) config2 = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler="autocommit", session_dependency_key="other_session", engine_dependency_key="other_engine", ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config=[config1, config2])]) mock_session1 = MagicMock(spec=Session) mock_session2 = MagicMock(spec=Session) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config1.session_scope_key, mock_session1) set_aa_scope_state(http_scope, config2.session_scope_key, mock_session2) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } config2.before_send_handler(http_response_start, http_scope) # type: ignore mock_session2.rollback.assert_called_once() mock_session1.rollback.assert_not_called() def test_autocommit_handler_maker_multi_async_and_sync(create_scope: Callable[..., Scope]) -> None: """Test that the handler created by the handler maker rolls back on explicit statuses""" config1 = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler="autocommit", ) config2 = SQLAlchemySyncConfig( connection_string="sqlite://", before_send_handler="autocommit", session_dependency_key="other_session", engine_dependency_key="other_engine", ) config3 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler="autocommit", session_dependency_key="the_session", engine_dependency_key="the_engine", ) config4 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", before_send_handler="autocommit", session_dependency_key="other_other_session", engine_dependency_key="other_other_engine", ) app = Litestar(route_handlers=[], plugins=[SQLAlchemyInitPlugin(config=[config1, config2, config3, config4])]) mock_session1 = MagicMock(spec=Session) mock_session2 = MagicMock(spec=Session) mock_session3 = MagicMock(spec=AsyncSession) mock_session4 = MagicMock(spec=AsyncSession) http_scope = create_scope(app=app) set_aa_scope_state(http_scope, config1.session_scope_key, mock_session1) set_aa_scope_state(http_scope, config2.session_scope_key, mock_session2) set_aa_scope_state(http_scope, config3.session_scope_key, mock_session3) set_aa_scope_state(http_scope, config4.session_scope_key, mock_session4) http_response_start: HTTPResponseStartEvent = { "type": "http.response.start", "status": random.randint(307, 308), "headers": {}, } config2.before_send_handler(http_response_start, http_scope) # type: ignore mock_session2.rollback.assert_called_once() mock_session1.rollback.assert_not_called() mock_session3.rollback.assert_not_called() mock_session4.rollback.assert_not_called() @pytest.mark.parametrize( ("exc", "status"), [ (IntegrityError, HTTP_409_CONFLICT), (ForeignKeyError, HTTP_409_CONFLICT), (DuplicateKeyError, HTTP_409_CONFLICT), (InvalidRequestError, HTTP_500_INTERNAL_SERVER_ERROR), (NotFoundError, HTTP_404_NOT_FOUND), ], ) def test_repository_exception_to_http_response(exc: type[RepositoryError], status: int) -> None: """Test default exception handler.""" config1 = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://") config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", engine_dependency_key="other_engine", ) plugin = SQLAlchemyInitPlugin(config=[config1, config2]) app = Litestar(route_handlers=[], plugins=[plugin]) request = RequestFactory(app=app, server="testserver").get("/wherever") response = exception_to_http_response(request, exc()) assert app.exception_handlers.get(exc) is None assert app.exception_handlers.get(RepositoryError) is not None assert response.status_code == status @pytest.mark.parametrize( ("exc", "status"), [ (IntegrityError, HTTP_409_CONFLICT), (ForeignKeyError, HTTP_409_CONFLICT), (DuplicateKeyError, HTTP_409_CONFLICT), (InvalidRequestError, HTTP_500_INTERNAL_SERVER_ERROR), (NotFoundError, HTTP_404_NOT_FOUND), ], ) def test_existing_repository_exception_to_http_response(exc: type[RepositoryError], status: int) -> None: """Test default exception handler.""" def handler(request: Request[Any, Any, Any], exc: RepositoryError) -> Response[Any]: return Response(status_code=200, content="OK") config1 = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://") config2 = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite://", session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", engine_dependency_key="other_engine", ) plugin = SQLAlchemyInitPlugin(config=[config1, config2]) app = Litestar(route_handlers=[], plugins=[plugin], exception_handlers={RepositoryError: handler}) request = RequestFactory(app=app, server="testserver").get("/wherever") response = handler(request, exc()) assert app.exception_handlers.get(exc) is None assert app.exception_handlers.get(RepositoryError) is not None assert app.exception_handlers.get(RepositoryError) == handler assert response.status_code == 200 @pytest.mark.parametrize( ("exc", "status"), [ (IntegrityError, HTTP_409_CONFLICT), (ForeignKeyError, HTTP_409_CONFLICT), (DuplicateKeyError, HTTP_409_CONFLICT), (InvalidRequestError, HTTP_500_INTERNAL_SERVER_ERROR), (NotFoundError, HTTP_404_NOT_FOUND), ], ) def test_repository_disabled_exception_to_http_response(exc: type[RepositoryError], status: int) -> None: """Test default exception handler.""" config1 = SQLAlchemySyncConfig(connection_string="sqlite://", set_default_exception_handler=False) config2 = SQLAlchemySyncConfig( connection_string="sqlite://", session_dependency_key="other_session", session_scope_key="_sqlalchemy_state_2", engine_dependency_key="other_engine", set_default_exception_handler=False, ) plugin = SQLAlchemyInitPlugin(config=[config1, config2]) app = Litestar(route_handlers=[], plugins=[plugin]) assert app.exception_handlers.get(exc) is None assert app.exception_handlers.get(RepositoryError) is None python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_litestar_re_exports.py000066400000000000000000000006521476663714600342230ustar00rootroot00000000000000# ruff: noqa: F401 import pytest def test_repository_re_exports() -> None: with pytest.warns(DeprecationWarning): from litestar.contrib.sqlalchemy import types # type: ignore from litestar.contrib.sqlalchemy.repository import ( SQLAlchemyAsyncRepository, # type: ignore SQLAlchemySyncRepository, # type: ignore wrap_sqlalchemy_exception, # type: ignore ) python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_providers.py000066400000000000000000000463441476663714600321470ustar00rootroot00000000000000"""Tests for the DI module.""" from __future__ import annotations import inspect import uuid from datetime import datetime, timedelta from typing import TYPE_CHECKING, Any, cast from unittest.mock import MagicMock, patch from litestar.di import Provide from sqlalchemy import FromClause, String, select from sqlalchemy.orm import DeclarativeBase, Mapped, Mapper, mapped_column from advanced_alchemy.extensions.litestar.providers import ( DEPENDENCY_DEFAULTS, DependencyCache, DependencyDefaults, FilterConfig, SingletonMeta, _create_filter_aggregate_function, # pyright: ignore[reportPrivateUsage] _create_statement_filters, # pyright: ignore[reportPrivateUsage] create_filter_dependencies, create_service_dependencies, create_service_provider, dep_cache, ) from advanced_alchemy.filters import ( BeforeAfter, CollectionFilter, LimitOffset, OrderBy, SearchFilter, ) from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemySyncRepository from advanced_alchemy.service import ( SQLAlchemyAsyncRepositoryService, SQLAlchemySyncRepositoryService, ) from advanced_alchemy.types.identity import BigIntIdentity from tests.helpers import anext_ class Base(DeclarativeBase): """Base class for models.""" if TYPE_CHECKING: __name__: str # type: ignore __table__: FromClause # type: ignore __mapper__: Mapper[Any] # type: ignore id: Mapped[int] = mapped_column(BigIntIdentity, primary_key=True) def to_dict(self, exclude: set[str] | None = None) -> dict[str, Any]: """Convert model to dictionary. Returns: Dict[str, Any]: A dict representation of the model """ exclude = {"sa_orm_sentinel", "_sentinel"}.union(self._sa_instance_state.unloaded).union(exclude or []) # type: ignore[attr-defined] return {field: getattr(self, field) for field in self.__mapper__.columns.keys() if field not in exclude} class DITestModel(Base): """Test model for use in tests.""" __tablename__ = "di_test_model" name: Mapped[str] = mapped_column(String) class TestSyncService(SQLAlchemySyncRepositoryService[DITestModel]): """Test sync service class.""" class Repo(SQLAlchemySyncRepository[DITestModel]): """Test repo class.""" model_type = DITestModel repository_type = Repo class TestAsyncService(SQLAlchemyAsyncRepositoryService[DITestModel]): """Test async service class.""" class Repo(SQLAlchemyAsyncRepository[DITestModel]): """Test repo class.""" model_type = DITestModel repository_type = Repo def test_singleton_pattern() -> None: """Test that the SingletonMeta creates singletons.""" class TestClass(metaclass=SingletonMeta): """Test class using SingletonMeta.""" def __init__(self) -> None: self.value = uuid.uuid4().hex # Instances should be the same instance1 = TestClass() instance2 = TestClass() assert instance1 is instance2 assert instance1.value == instance2.value def test_multiple_classes() -> None: """Test that different classes using SingletonMeta have different instances.""" class TestClass1(metaclass=SingletonMeta): """First test class using SingletonMeta.""" def __init__(self) -> None: self.value = 1 class TestClass2(metaclass=SingletonMeta): """Second test class using SingletonMeta.""" def __init__(self) -> None: self.value = 2 instance1 = TestClass1() instance2 = TestClass2() assert instance1 is not instance2 # type: ignore assert instance1.value != instance2.value def test_add_get_dependencies() -> None: """Test adding and retrieving dependencies from cache.""" # Create a new instance to avoid test interference with patch.dict(SingletonMeta._instances, {}, clear=True): # pyright: ignore[reportPrivateUsage] cache = DependencyCache() # Test with string key deps1 = {"service": Provide(lambda: "service")} cache.add_dependencies("key1", deps1) assert cache.get_dependencies("key1") == deps1 # Test with integer key deps2 = {"filter": Provide(lambda: "filter")} cache.add_dependencies(123, deps2) assert cache.get_dependencies(123) == deps2 # Test retrieving non-existent key assert cache.get_dependencies("nonexistent") is None def test_global_instance() -> None: """Test that the global dep_cache instance is a singleton.""" # Do not clear SingletonMeta._instances, so that dep_cache remains the global singleton new_cache = DependencyCache() assert new_cache is dep_cache def test_create_sync_service_provider() -> None: """Test creating a sync service provider.""" provider = create_service_provider(TestSyncService) # Ensure the provider is callable assert callable(provider) svc = next(provider(db_session=MagicMock())) assert isinstance(svc, TestSyncService) async def test_create_async_service_provider() -> None: """Test creating an async service provider.""" provider = create_service_provider(TestAsyncService) # Ensure the provider is callable assert callable(provider) svc = await anext_(provider(db_session=MagicMock())) assert isinstance(svc, TestAsyncService) def test_create_async_service_dependencies() -> None: """Test creating async service dependencies.""" with patch("advanced_alchemy.extensions.litestar.providers.create_service_provider") as mock_create_provider: mock_create_provider.return_value = lambda: "async_service" deps = create_service_dependencies( TestAsyncService, key="service", statement=select(DITestModel), config=MagicMock(), ) assert "service" in deps assert isinstance(deps["service"], Provide) # Check provider function assert deps["service"].dependency() == "async_service" # Verify create_service_provider was called correctly mock_create_provider.assert_called_once() def test_create_sync_service_dependencies() -> None: """Test creating sync service dependencies.""" with patch("advanced_alchemy.extensions.litestar.providers.create_service_provider") as mock_create_provider: mock_create_provider.return_value = lambda: "sync_service" deps = create_service_dependencies( TestSyncService, key="service", statement=select(DITestModel), config=MagicMock(), ) assert "service" in deps assert isinstance(deps["service"], Provide) # Check provider function assert deps["service"].dependency() == "sync_service" # Verify create_service_provider was called correctly mock_create_provider.assert_called_once() # Verify sync_to_thread is False for sync services assert deps["service"].sync_to_thread is False def test_create_service_dependencies_with_filters() -> None: """Test creating service dependencies with filters.""" with patch("advanced_alchemy.extensions.litestar.providers.create_service_provider") as mock_create_provider: with patch("advanced_alchemy.extensions.litestar.providers.create_filter_dependencies") as mock_create_filters: mock_create_provider.return_value = lambda: "service" mock_create_filters.return_value = {"filter1": Provide(lambda: "filter1")} deps = create_service_dependencies( TestSyncService, key="service", filters={"id_filter": int}, ) assert "service" in deps assert "filter1" in deps # Verify create_filter_dependencies was called mock_create_filters.assert_called_once_with({"id_filter": int}, DEPENDENCY_DEFAULTS) def test_create_filter_dependencies_cache_hit() -> None: """Test create_filter_dependencies with cache hit.""" # Setup cache with a pre-existing entry mock_deps = {"test": Provide(lambda: "test")} with patch.object(dep_cache, "get_dependencies", return_value=mock_deps) as mock_get: with patch.object(dep_cache, "add_dependencies") as mock_add: config = cast(FilterConfig, {"key1": 1, "key2": 2}) deps = create_filter_dependencies(config) # Verify cache was checked mock_get.assert_called_once() # Verify result is from cache assert deps == mock_deps # Verify cache wasn't updated mock_add.assert_not_called() def test_create_filter_dependencies_cache_miss() -> None: """Test create_filter_dependencies with cache miss.""" # Setup cache to return None (cache miss) mock_deps = {"test": Provide(lambda: "test")} with patch.object(dep_cache, "get_dependencies", return_value=None) as mock_get: with patch.object(dep_cache, "add_dependencies") as mock_add: with patch( "advanced_alchemy.extensions.litestar.providers._create_statement_filters", return_value=mock_deps ) as mock_create: config = cast(FilterConfig, {"key1": 1, "key2": 2}) deps = create_filter_dependencies(config) # Verify cache was checked mock_get.assert_called_once() # Verify _create_statement_filters was called mock_create.assert_called_once_with(config, DEPENDENCY_DEFAULTS) # Verify cache was updated mock_add.assert_called_once() # Verify return value assert deps == mock_deps def test_id_filter() -> None: """Test creating ID filter dependency.""" config = cast(FilterConfig, {"id_filter": int}) deps = _create_statement_filters(config) assert "id_filter" in deps assert "filters" in deps # Test the provider function provider_func = deps["id_filter"].dependency f = provider_func(ids=["1", "2", "3"]) assert isinstance(f, CollectionFilter) assert f.field_name == "id" assert f.values is not None # type: ignore assert f.values == ["1", "2", "3"] # type: ignore def test_created_at_filter() -> None: """Test creating created_at filter dependency.""" config = cast(FilterConfig, {"created_at": "created_at"}) deps = _create_statement_filters(config) assert "created_filter" in deps assert "filters" in deps # Test the provider function provider_func = deps["created_filter"].dependency before = datetime.now() later = datetime.now() + timedelta(days=1) f = provider_func(before=before, after=later) assert isinstance(f, BeforeAfter) assert f.field_name == "created_at" assert f.before == before assert f.after == later def test_updated_at_filter() -> None: """Test creating updated_at filter dependency.""" config = cast(FilterConfig, {"updated_at": "updated_at"}) deps = _create_statement_filters(config) assert "updated_filter" in deps assert "filters" in deps # Test the provider function provider_func = deps["updated_filter"].dependency f = provider_func(before=datetime.now(), after=datetime.now()) assert isinstance(f, BeforeAfter) assert f.field_name == "updated_at" def test_search_filter() -> None: """Test creating search filter dependency.""" config = cast(FilterConfig, {"search": "name", "search_ignore_case": True}) deps = _create_statement_filters(config) assert "search_filter" in deps assert "filters" in deps # Test the provider function provider_func = deps["search_filter"].dependency f = provider_func(search_string="test", ignore_case=True) assert isinstance(f, SearchFilter) assert f.field_name == "name" or f.field_name == {"name"} assert f.value == "test" assert f.ignore_case is True def test_limit_offset_filter() -> None: """Test creating limit_offset filter dependency.""" config = cast(FilterConfig, {"pagination_type": "limit_offset", "default_limit": 10, "max_limit": 100}) deps = _create_statement_filters(config) assert "limit_offset" in deps assert "filters" in deps # Test the provider function provider_func = deps["limit_offset"].dependency f = provider_func(current_page=2, page_size=5) assert isinstance(f, LimitOffset) assert f.limit == 5 assert f.offset == 5 def test_order_by_filter() -> None: """Test creating order_by filter dependency.""" config = cast(FilterConfig, {"sort_field": "name"}) deps = _create_statement_filters(config) assert "order_by" in deps assert "filters" in deps # Test the provider function provider_func = deps["order_by"].dependency f = provider_func(field_name="name", sort_order="desc") assert isinstance(f, OrderBy) assert f.field_name == "name" assert f.sort_order == "desc" def test_custom_dependency_defaults() -> None: """Test using custom dependency defaults.""" class CustomDefaults(DependencyDefaults): """Custom dependency defaults.""" LIMIT_OFFSET_DEPENDENCY_KEY = "page" ID_FILTER_DEPENDENCY_KEY = "ids" DEFAULT_PAGINATION_SIZE = 100 custom_defaults = CustomDefaults() config = cast(FilterConfig, {"id_filter": int, "id_field": "custom_id", "pagination_type": "limit_offset"}) deps = _create_statement_filters(config, custom_defaults) assert "ids" in deps assert "page" in deps assert "filters" in deps ids_func = deps["ids"].dependency f = ids_func(ids=["1", "2", "3"]) assert isinstance(f, CollectionFilter) # type: ignore assert f.field_name == "custom_id" assert f.values is not None # type: ignore assert f.values == ["1", "2", "3"] # type: ignore page_func = deps["page"].dependency f: LimitOffset = page_func(current_page=2, page_size=5) # type: ignore assert isinstance(f, LimitOffset) assert f.limit == 5 assert f.offset == 5 def test_id_filter_aggregation() -> None: """Test aggregation with ID filter.""" config = cast(FilterConfig, {"id_filter": str}) aggregate_func = _create_filter_aggregate_function(config) # Check signature sig = inspect.signature(aggregate_func) assert "id_filter" in sig.parameters # Simulate calling with filter mock_filter = MagicMock(spec=CollectionFilter) result = aggregate_func(id_filter=mock_filter) assert isinstance(result, list) assert mock_filter in result def test_created_at_filter_aggregation() -> None: """Test aggregation with created_at filter.""" config = cast(FilterConfig, {"created_at": "created_at"}) aggregate_func = _create_filter_aggregate_function(config) # Check signature sig = inspect.signature(aggregate_func) assert "created_filter" in sig.parameters # Simulate calling with filter mock_filter = MagicMock(spec=BeforeAfter) result = aggregate_func(created_filter=mock_filter) assert isinstance(result, list) assert mock_filter in result def test_updated_at_filter_aggregation() -> None: """Test aggregation with updated_at filter.""" config = cast(FilterConfig, {"updated_at": "updated_at"}) aggregate_func = _create_filter_aggregate_function(config) # Check signature sig = inspect.signature(aggregate_func) assert "updated_filter" in sig.parameters # Simulate calling with filter mock_filter = MagicMock(spec=BeforeAfter) result = aggregate_func(updated_filter=mock_filter) assert isinstance(result, list) assert mock_filter in result def test_search_filter_aggregation() -> None: """Test aggregation with search filter.""" config = cast(FilterConfig, {"search": ["name"]}) aggregate_func = _create_filter_aggregate_function(config) # Check signature sig = inspect.signature(aggregate_func) assert "search_filter" in sig.parameters # Mock search filter with valid attributes mock_filter = MagicMock(spec=SearchFilter) mock_filter.field_name = "name" mock_filter.value = "test" result = aggregate_func(search_filter=mock_filter) assert isinstance(result, list) assert mock_filter in result # Test with invalid search filter (None value) mock_filter.value = None result = aggregate_func(search_filter=mock_filter) assert mock_filter not in result def test_limit_offset_filter_aggregation() -> None: """Test aggregation with limit_offset filter.""" config = cast(FilterConfig, {"pagination_type": "limit_offset"}) aggregate_func = _create_filter_aggregate_function(config) # Check signature sig = inspect.signature(aggregate_func) assert "limit_offset" in sig.parameters # Simulate calling with filter mock_filter = MagicMock(spec=LimitOffset) result = aggregate_func(limit_offset=mock_filter) assert isinstance(result, list) assert mock_filter in result def test_order_by_filter_aggregation() -> None: """Test aggregation with order_by filter.""" config = cast(FilterConfig, {"sort_field": "name"}) aggregate_func = _create_filter_aggregate_function(config) # Check signature sig = inspect.signature(aggregate_func) assert "order_by" in sig.parameters # Mock order_by filter with valid field_name mock_filter = MagicMock(spec=OrderBy) mock_filter.field_name = "name" result = aggregate_func(order_by=mock_filter) assert isinstance(result, list) assert mock_filter in result # Test with invalid order_by filter (None field_name) mock_filter.field_name = None result = aggregate_func(order_by=mock_filter) assert mock_filter not in result def test_multiple_filters_aggregation() -> None: """Test aggregation with multiple filters.""" config = cast( FilterConfig, { "id_filter": int, "created_at": True, "updated_at": True, "search": "name", "pagination_type": "limit_offset", "sort_field": "name", }, ) aggregate_func = _create_filter_aggregate_function(config) # Check signature has all parameters sig = inspect.signature(aggregate_func) assert "id_filter" in sig.parameters assert "created_filter" in sig.parameters assert "updated_filter" in sig.parameters assert "search_filter" in sig.parameters assert "limit_offset" in sig.parameters assert "order_by" in sig.parameters # Simulate calling with multiple filters mock_id_filter = MagicMock(spec=CollectionFilter) mock_created_filter = MagicMock(spec=BeforeAfter) mock_updated_filter = MagicMock(spec=BeforeAfter) mock_search_filter = MagicMock(spec=SearchFilter) mock_search_filter.field_name = "name" mock_search_filter.value = "test" mock_limit_offset = MagicMock(spec=LimitOffset) mock_order_by = MagicMock(spec=OrderBy) mock_order_by.field_name = "name" result = aggregate_func( id_filter=mock_id_filter, created_filter=mock_created_filter, updated_filter=mock_updated_filter, search_filter=mock_search_filter, limit_offset=mock_limit_offset, order_by=mock_order_by, ) # Verify all filters are included assert len(result) == 6 assert mock_id_filter in result assert mock_created_filter in result assert mock_updated_filter in result assert mock_search_filter in result assert mock_limit_offset in result assert mock_order_by in result python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_litestar/test_serialization_plugin.py000066400000000000000000000045401476663714600343550ustar00rootroot00000000000000from types import ModuleType from typing import Callable from litestar import get from litestar.status_codes import HTTP_200_OK from litestar.testing import RequestFactory, create_test_client from sqlalchemy import String from sqlalchemy.orm import Mapped, mapped_column from advanced_alchemy.base import UUIDAuditBase from advanced_alchemy.extensions.litestar import SQLAlchemySerializationPlugin from advanced_alchemy.service.pagination import OffsetPagination async def test_serialization_plugin( create_module: Callable[[str], ModuleType], request_factory: RequestFactory, ) -> None: module = create_module( """ from __future__ import annotations from typing import Dict, List, Set, Tuple, Type, List from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column from litestar import Litestar, get, post from advanced_alchemy.extensions.litestar import SQLAlchemySerializationPlugin class Base(DeclarativeBase): id: Mapped[int] = mapped_column(primary_key=True) class A(Base): __tablename__ = "a" a: Mapped[str] @post("/a") def post_handler(data: A) -> A: return data @get("/a") def get_handler() -> List[A]: return [A(id=1, a="test"), A(id=2, a="test2")] @get("/a/1") def get_a() -> A: return A(id=1, a="test") """, ) with create_test_client( route_handlers=[module.post_handler, module.get_handler, module.get_a], plugins=[SQLAlchemySerializationPlugin()], ) as client: response = client.post("/a", json={"id": 1, "a": "test"}) assert response.status_code == 201 assert response.json() == {"id": 1, "a": "test"} response = client.get("/a") assert response.json() == [{"id": 1, "a": "test"}, {"id": 2, "a": "test2"}] response = client.get("/a/1") assert response.json() == {"id": 1, "a": "test"} class User(UUIDAuditBase): first_name: Mapped[str] = mapped_column(String(200)) def test_pagination_serialization() -> None: users = [User(first_name="ASD"), User(first_name="qwe")] @get("/paginated") async def paginated_handler() -> OffsetPagination[User]: return OffsetPagination[User](items=users, limit=2, offset=0, total=2) with create_test_client(paginated_handler, plugins=[SQLAlchemySerializationPlugin()]) as client: response = client.get("/paginated") assert response.status_code == HTTP_200_OK python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_sanic.py000066400000000000000000000204171476663714600263320ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, Any, Union, cast from unittest.mock import MagicMock import pytest from pytest import FixtureRequest from pytest_mock import MockerFixture from sanic import HTTPResponse, Request, Sanic from sanic_testing.testing import SanicTestClient # type: ignore[import-untyped] from sqlalchemy import Engine from sqlalchemy.ext.asyncio import AsyncEngine from typing_extensions import assert_type from advanced_alchemy.extensions.sanic import AdvancedAlchemy, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig AnyConfig = Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig] @pytest.fixture() def app() -> Sanic[Any, Any]: return Sanic("TestSanic") @pytest.fixture() def client(app: Sanic[Any, Any]) -> SanicTestClient: return SanicTestClient(app=app) @pytest.fixture() def sync_config() -> SQLAlchemySyncConfig: return SQLAlchemySyncConfig(connection_string="sqlite+pysqlite://") @pytest.fixture() def async_config() -> SQLAlchemyAsyncConfig: return SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite://") @pytest.fixture(params=["sync_config", "async_config"]) def config(request: FixtureRequest) -> AnyConfig: return cast(AnyConfig, request.getfixturevalue(request.param)) @pytest.fixture() def alchemy(config: AnyConfig, app: Sanic[Any, Any]) -> AdvancedAlchemy: alchemy = AdvancedAlchemy(sqlalchemy_config=config) alchemy.register(app) return alchemy @pytest.fixture() def mock_close(mocker: MockerFixture, config: AnyConfig) -> MagicMock: if isinstance(config, SQLAlchemySyncConfig): return mocker.patch("sqlalchemy.orm.Session.close") return mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") @pytest.fixture() def mock_commit(mocker: MockerFixture, config: AnyConfig) -> MagicMock: if isinstance(config, SQLAlchemySyncConfig): return mocker.patch("sqlalchemy.orm.Session.commit") return mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") @pytest.fixture() def mock_rollback(mocker: MockerFixture, config: AnyConfig) -> MagicMock: if isinstance(config, SQLAlchemySyncConfig): return mocker.patch("sqlalchemy.orm.Session.rollback") return mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.rollback") def test_infer_types_from_config(async_config: SQLAlchemyAsyncConfig, sync_config: SQLAlchemySyncConfig) -> None: if TYPE_CHECKING: sync_alchemy = AdvancedAlchemy(sqlalchemy_config=sync_config) async_alchemy = AdvancedAlchemy(sqlalchemy_config=async_config) assert_type(sync_alchemy.get_sync_engine(), Engine) assert_type(async_alchemy.get_async_engine(), AsyncEngine) def test_inject_engine(app: Sanic[Any, Any], alchemy: AdvancedAlchemy) -> None: @app.get("/") async def handler(request: Request) -> HTTPResponse: assert isinstance(getattr(request.app.ctx, alchemy.get_config().engine_key), (Engine, AsyncEngine)) return HTTPResponse(status=200) client = SanicTestClient(app=app) assert client.get("/")[1].status == 200 # pyright: ignore[reportOptionalMemberAccess,reportUnknownMemberType] """ def test_inject_session(app: Sanic, alchemy: AdvancedAlchemy, client: SanicTestClient) -> None: if isinstance(alchemy.sqlalchemy_config, SQLAlchemyAsyncConfig): app.ext.add_dependency(AsyncSession, alchemy.get_session_from_request) @app.get("/") async def handler(request: Request) -> HTTPResponse: assert isinstance(getattr(request.ctx, alchemy.session_key), AsyncSession) return HTTPResponse(status=200) assert client.get("/")[1].status == 200 else: app.ext.add_dependency(Session, alchemy.get_session_from_request) @app.get("/") async def handler(request: Request) -> HTTPResponse: assert isinstance(getattr(request.ctx, alchemy.session_key), Session) return HTTPResponse(status=200) assert client.get("/")[1].status == 200 """ """ def test_session_no_autocommit( app: Sanic, alchemy: AdvancedAlchemy, client: SanicTestClient, mock_commit: MagicMock, mock_close: MagicMock, ) -> None: alchemy.autocommit_strategy = None app.ext.add_dependency(Session, alchemy.get_session) @app.get("/") def handler(session: Session) -> None: pass assert client.get("/")[1].status == 200 mock_commit.assert_not_called() mock_close.assert_called_once() """ """ def test_session_autocommit_always( app: Sanic, alchemy: AdvancedAlchemy, client: SanicTestClient, mock_commit: MagicMock, mock_close: MagicMock, ) -> None: alchemy.autocommit_strategy = "always" app.ext.add_dependency(Session, alchemy.get_session) @app.get("/") def handler(session: Session) -> None: pass assert client.get("/")[1].status == 200 mock_commit.assert_called_once() mock_close.assert_called_once() """ """ @pytest.mark.parametrize("status", [200, 201, 202, 204, 206]) def test_session_autocommit_match_status( app: Sanic, alchemy: AdvancedAlchemy, client: SanicTestClient, mock_commit: MagicMock, mock_close: MagicMock, mock_rollback: MagicMock, status: int, ) -> None: alchemy.autocommit_strategy = "match_status" app.ext.add_dependency(Session, alchemy.get_session) @app.get("/") def handler(session: Session) -> HTTPResponse: return HTTPResponse(status=status) client.get("/") mock_commit.assert_called_once() mock_close.assert_called_once() mock_rollback.assert_not_called() """ """ @pytest.mark.parametrize("status", [300, 301, 305, 307, 308, 400, 401, 404, 450, 500, 900]) def test_session_autocommit_rollback_for_status( app: Sanic, alchemy: AdvancedAlchemy, client: SanicTestClient, mock_commit: MagicMock, mock_close: MagicMock, mock_rollback: MagicMock, status: int, ) -> None: alchemy.autocommit_strategy = "match_status" app.ext.add_dependency(Session, alchemy.get_session) @app.get("/") def handler(session: Session) -> HTTPResponse: return HTTPResponse(status=status) client.get("/") mock_commit.assert_not_called() mock_close.assert_called_once() mock_rollback.assert_called_once() """ """ @pytest.mark.parametrize("autocommit_strategy", ["always", "match_status"]) def test_session_autocommit_close_on_exception( app: Sanic, alchemy: AdvancedAlchemy, client: SanicTestClient, mock_commit: MagicMock, mock_close: MagicMock, autocommit_strategy: CommitStrategy, ) -> None: alchemy.autocommit_strategy = autocommit_strategy mock_commit.side_effect = ValueError app.ext.add_dependency(Session, alchemy.get_session) @app.get("/") def handler(session: Session) -> None: pass client.get("/") mock_commit.assert_called_once() mock_close.assert_called_once() """ """ def test_multiple_instances(app: Sanic) -> None: mock = MagicMock() config_1 = SQLAlchemySyncConfig(connection_string="sqlite+aiosqlite://") config_2 = SQLAlchemySyncConfig(connection_string="sqlite+aiosqlite:///test.db") alchemy_1 = AdvancedAlchemy(sqlalchemy_config=config_1) alchemy_2 = AdvancedAlchemy( sqlalchemy_config=config_2, engine_key="other_engine", session_key="other_session", session_maker_key="other_sessionmaker", ) Extend.register(alchemy_1) Extend.register(alchemy_2) app.ext.add_dependency(Session, alchemy_1.get_session) app.ext.add_dependency(Session, alchemy_2.get_session) app.ext.add_dependency(Engine, alchemy_1.get_engine) app.ext.add_dependency(Engine, alchemy_2.get_engine) @app.get("/") async def handler( session_1: Session, session_2: Session, engine_1: Engine, engine_2: Engine, ) -> None: assert session_1 != session_2 assert engine_1 != engine_2 mock(session=session_1, engine=engine_1) mock(session=session_2, engine=engine_2) client = SanicTestClient(app=app) _response = client.get("/") assert alchemy_1.engine_key != alchemy_2.engine_key assert alchemy_1.session_maker_key != alchemy_2.session_maker_key assert alchemy_1.session_key != alchemy_2.session_key assert alchemy_1.get_engine() is not alchemy_2.get_engine() assert alchemy_1.get_sessionmaker() is not alchemy_2.get_sessionmaker() """ python-advanced-alchemy-1.0.1/tests/unit/test_extensions/test_starlette.py000066400000000000000000000473501476663714600272510ustar00rootroot00000000000000from __future__ import annotations import sys from collections.abc import AsyncGenerator, Generator from contextlib import asynccontextmanager from typing import TYPE_CHECKING, Callable, Union, cast from unittest.mock import MagicMock import pytest from pytest import FixtureRequest from sqlalchemy import Engine, create_engine from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession from sqlalchemy.orm import Session from starlette.applications import Starlette from starlette.exceptions import HTTPException from starlette.requests import Request from starlette.responses import Response from starlette.routing import Route from starlette.testclient import TestClient from typing_extensions import Literal, assert_type from advanced_alchemy.exceptions import ImproperConfigurationError from advanced_alchemy.extensions.starlette import AdvancedAlchemy, SQLAlchemyAsyncConfig, SQLAlchemySyncConfig if TYPE_CHECKING: from pytest import FixtureRequest from pytest_mock import MockerFixture AnyConfig = Union[SQLAlchemyAsyncConfig, SQLAlchemySyncConfig] pytestmark = pytest.mark.xfail( condition=sys.version_info < (3, 9), reason="Certain versions of Starlette and FastAPI are stated to still support 3.8, but there are documented incompatibilities on various versions that have not been yanked. Marking 3.8 as an acceptable failure for now.", ) @pytest.fixture() def app() -> Starlette: return Starlette() @pytest.fixture() def client(app: Starlette) -> Generator[TestClient, None, None]: with TestClient(app=app, raise_server_exceptions=False) as client: yield client @pytest.fixture() def sync_config() -> SQLAlchemySyncConfig: return SQLAlchemySyncConfig(connection_string="sqlite+pysqlite:///:memory:") @pytest.fixture() def async_config() -> SQLAlchemyAsyncConfig: return SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:") @pytest.fixture(params=["sync_config", "async_config"]) def config(request: FixtureRequest) -> AnyConfig: return cast(AnyConfig, request.getfixturevalue(request.param)) @pytest.fixture() def alchemy(config: AnyConfig, app: Starlette) -> Generator[AdvancedAlchemy, None, None]: alchemy = AdvancedAlchemy(config, app=app) yield alchemy @pytest.fixture() def multi_alchemy(app: Starlette) -> Generator[AdvancedAlchemy, None, None]: alchemy = AdvancedAlchemy( [ SQLAlchemySyncConfig(connection_string="sqlite+pysqlite:///:memory:", bind_key="sync"), SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:"), ], app=app, ) yield alchemy async def test_infer_types_from_config(async_config: SQLAlchemyAsyncConfig, sync_config: SQLAlchemySyncConfig) -> None: if TYPE_CHECKING: sync_alchemy = AdvancedAlchemy([sync_config]) async_alchemy = AdvancedAlchemy([async_config]) assert_type(sync_alchemy.get_sync_engine(), Engine) assert_type(async_alchemy.get_async_engine(), AsyncEngine) assert_type(sync_alchemy.get_sync_config().create_session_maker(), Callable[[], Session]) assert_type(async_alchemy.get_async_config().create_session_maker(), Callable[[], AsyncSession]) with sync_alchemy.with_sync_session() as session: assert_type(session, Session) async with async_alchemy.with_async_session() as session: assert_type(session, AsyncSession) def test_init_app_not_called_raises(client: TestClient, config: SQLAlchemySyncConfig) -> None: alchemy = AdvancedAlchemy(config) with pytest.raises(ImproperConfigurationError): alchemy.app def test_inject_engine(app: Starlette) -> None: mock = MagicMock() config = SQLAlchemySyncConfig(engine_instance=create_engine("sqlite+aiosqlite://")) alchemy = AdvancedAlchemy(config=config, app=app) async def handler(request: Request) -> Response: engine = alchemy.get_engine() mock(engine) return Response(status_code=200) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: assert client.get("/").status_code == 200 assert mock.call_args[0][0] is config.engine_instance def test_inject_session(app: Starlette, alchemy: AdvancedAlchemy, client: TestClient) -> None: mock = MagicMock() async def handler(request: Request) -> Response: session = alchemy.get_session(request) mock(session) return Response(status_code=200) app.router.routes.append(Route("/", endpoint=handler)) call = client.get("/") assert call.status_code == 200 assert mock.call_count == 1 call_1_session = mock.call_args_list[0].args[0] assert isinstance( call_1_session, AsyncSession if isinstance(alchemy.config[0], SQLAlchemyAsyncConfig) else Session, ) def test_session_no_autocommit( app: Starlette, alchemy: AdvancedAlchemy, client: TestClient, mocker: MockerFixture, ) -> None: if isinstance(alchemy.config[0], SQLAlchemyAsyncConfig): mock_commit = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") mock_close = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") else: mock_commit = mocker.patch("sqlalchemy.orm.Session.commit") mock_close = mocker.patch("sqlalchemy.orm.Session.close") app.middleware_stack = app.build_middleware_stack() async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=200) app.router.routes.append(Route("/", endpoint=handler)) assert client.get("/").status_code == 200 mock_commit.assert_not_called() mock_close.assert_called_once() @pytest.mark.parametrize("status_code", [200, 201, 202, 204, 206]) def test_sync_session_autocommit_success_status( mocker: MockerFixture, status_code: int, ) -> None: mock_commit = mocker.patch("sqlalchemy.orm.Session.commit") mock_close = mocker.patch("sqlalchemy.orm.Session.close") mock_rollback = mocker.patch("sqlalchemy.orm.Session.rollback") app = Starlette() config = SQLAlchemySyncConfig(connection_string="sqlite+pysqlite:///:memory:", commit_mode="autocommit") alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=status_code) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: _ = client.get("/") mock_commit.assert_called_once() mock_close.assert_called_once() mock_rollback.assert_not_called() @pytest.mark.parametrize("status_code", [200, 201, 202, 204, 206]) def test_sync_session_autocommit_include_redirect_success_status( mocker: MockerFixture, status_code: int, ) -> None: mock_commit = mocker.patch("sqlalchemy.orm.Session.commit") mock_close = mocker.patch("sqlalchemy.orm.Session.close") mock_rollback = mocker.patch("sqlalchemy.orm.Session.rollback") app = Starlette() config = SQLAlchemySyncConfig( connection_string="sqlite+pysqlite:///:memory:", commit_mode="autocommit_include_redirect" ) alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=status_code) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: _ = client.get("/") mock_commit.assert_called_once() mock_close.assert_called_once() mock_rollback.assert_not_called() @pytest.mark.parametrize("status_code", [200, 201, 202, 204, 206]) def test_async_session_autocommit_success_status( mocker: MockerFixture, status_code: int, ) -> None: mock_commit = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") mock_close = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") mock_rollback = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.rollback") app = Starlette() config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:", commit_mode="autocommit") alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=status_code) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: _ = client.get("/") mock_commit.assert_called_once() mock_close.assert_called_once() mock_rollback.assert_not_called() @pytest.mark.parametrize("status_code", [200, 201, 202, 204, 206]) def test_async_session_autocommit_include_redirect_success_status( mocker: MockerFixture, status_code: int, ) -> None: mock_commit = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") mock_close = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") mock_rollback = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.rollback") app = Starlette() config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///:memory:", commit_mode="autocommit_include_redirect" ) alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=status_code) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: _ = client.get("/") mock_commit.assert_called_once() mock_close.assert_called_once() mock_rollback.assert_not_called() @pytest.mark.parametrize("status_code", [300, 301, 305, 307, 308, 400, 401, 404, 450, 500, 900]) def test_sync_session_autocommit_rollback_for_status( status_code: int, mocker: MockerFixture, ) -> None: mock_commit = mocker.patch("sqlalchemy.orm.Session.commit") mock_close = mocker.patch("sqlalchemy.orm.Session.close") mock_rollback = mocker.patch("sqlalchemy.orm.Session.rollback") app = Starlette() config = SQLAlchemySyncConfig(connection_string="sqlite+pysqlite:///:memory:", commit_mode="autocommit") alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=status_code) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: response = client.get("/") assert response.status_code == status_code if status_code >= 300: assert mock_commit.call_count == 0 assert mock_rollback.call_count == 1 assert mock_close.call_count == 1 else: assert mock_commit.call_count == 1 assert mock_close.call_count == 1 assert mock_rollback.call_count == 0 @pytest.mark.parametrize("status_code", [300, 301, 305, 307, 308, 400, 401, 404, 450, 500, 900]) def test_sync_session_autocommit_include_redirect_rollback_for_status( status_code: int, mocker: MockerFixture, ) -> None: mock_commit = mocker.patch("sqlalchemy.orm.Session.commit") mock_close = mocker.patch("sqlalchemy.orm.Session.close") mock_rollback = mocker.patch("sqlalchemy.orm.Session.rollback") app = Starlette() config = SQLAlchemySyncConfig( connection_string="sqlite+pysqlite:///:memory:", commit_mode="autocommit_include_redirect" ) alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=status_code) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: response = client.get("/") assert response.status_code == status_code if status_code < 400: assert mock_commit.call_count == 1 assert mock_rollback.call_count == 0 assert mock_close.call_count == 1 else: assert mock_commit.call_count == 0 assert mock_rollback.call_count == 1 assert mock_close.call_count == 1 @pytest.mark.parametrize("status_code", [300, 301, 305, 307, 308, 400, 401, 404, 450, 500, 900]) def test_async_session_autocommit_rollback_for_status( status_code: int, mocker: MockerFixture, ) -> None: mock_commit = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") mock_close = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") mock_rollback = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.rollback") app = Starlette() config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:", commit_mode="autocommit") alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=status_code) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: response = client.get("/") assert response.status_code == status_code if status_code >= 300: assert mock_commit.call_count == 0 assert mock_rollback.call_count == 1 assert mock_close.call_count == 1 else: assert mock_commit.call_count == 1 assert mock_close.call_count == 1 assert mock_rollback.call_count == 0 @pytest.mark.parametrize("status_code", [300, 301, 305, 307, 308, 400, 401, 404, 450, 500, 900]) def test_async_session_autocommit_include_redirect_rollback_for_status( status_code: int, mocker: MockerFixture, ) -> None: mock_commit = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") mock_close = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") mock_rollback = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.rollback") app = Starlette() config = SQLAlchemyAsyncConfig( connection_string="sqlite+aiosqlite:///:memory:", commit_mode="autocommit_include_redirect" ) alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> Response: _session = alchemy.get_session(request) return Response(status_code=status_code) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: response = client.get("/") assert response.status_code == status_code if status_code >= 400: assert mock_commit.call_count == 0 assert mock_rollback.call_count == 1 assert mock_close.call_count == 1 else: assert mock_commit.call_count == 1 assert mock_rollback.call_count == 0 assert mock_close.call_count == 1 @pytest.mark.parametrize("autocommit_strategy", ["autocommit", "autocommit_include_redirect"]) def test_sync_session_autocommit_close_on_exception( mocker: MockerFixture, autocommit_strategy: Literal["autocommit", "autocommit_include_redirect"], ) -> None: mock_commit = mocker.patch("sqlalchemy.orm.Session.commit") mock_rollback = mocker.patch("sqlalchemy.orm.Session.rollback") mock_close = mocker.patch("sqlalchemy.orm.Session.close") async def http_exception(request: Request, exc: HTTPException) -> Response: return Response(status_code=exc.status_code) app = Starlette(exception_handlers={HTTPException: http_exception}) # type: ignore config = SQLAlchemySyncConfig(connection_string="sqlite+pysqlite:///:memory:", commit_mode=autocommit_strategy) alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> None: _session = alchemy.get_session(request) raise HTTPException(status_code=500, detail="Intentional error for testing") app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: client.get("/") mock_commit.assert_not_called() mock_rollback.assert_called_once() mock_close.assert_called_once() @pytest.mark.parametrize("autocommit_strategy", ["autocommit", "autocommit_include_redirect"]) async def test_async_session_autocommit_close_on_exception( mocker: MockerFixture, autocommit_strategy: Literal["autocommit", "autocommit_include_redirect"], ) -> None: mock_commit = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.commit") mock_rollback = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.rollback") mock_close = mocker.patch("sqlalchemy.ext.asyncio.AsyncSession.close") async def http_exception(request: Request, exc: HTTPException) -> Response: return Response(status_code=exc.status_code) app = Starlette(exception_handlers={HTTPException: http_exception}) # type: ignore config = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///:memory:", commit_mode=autocommit_strategy) alchemy = AdvancedAlchemy(config, app=app) async def handler(request: Request) -> None: _session = alchemy.get_session(request) raise HTTPException(status_code=500, detail="Intentional error for testing") app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: client.get("/") mock_commit.assert_not_called() mock_rollback.assert_called_once() mock_close.assert_called_once() def test_multiple_instances(app: Starlette) -> None: mock = MagicMock() config_1 = SQLAlchemySyncConfig(connection_string="sqlite:///other.db") config_2 = SQLAlchemyAsyncConfig(connection_string="sqlite+aiosqlite:///test.db", bind_key="other") alchemy_1 = AdvancedAlchemy([config_1, config_2], app=app) async def handler(request: Request) -> Response: session_1 = alchemy_1.get_sync_session(request) engine_1 = alchemy_1.get_sync_engine() session_2 = alchemy_1.get_async_session(request, key="other") engine_2 = alchemy_1.get_async_engine(key="other") assert session_1 is not session_2 # type: ignore assert engine_1 is not engine_2 mock(session=session_1, engine=engine_1) mock(session=session_2, engine=engine_2) return Response(status_code=200) app.router.routes.append(Route("/", endpoint=handler)) with TestClient(app=app) as client: client.get("/") assert alchemy_1.get_sync_engine() is not alchemy_1.get_async_engine("other") async def test_lifespan_startup_shutdown_called_starlette( mocker: MockerFixture, app: Starlette, config: AnyConfig ) -> None: mock_startup = mocker.patch.object(AdvancedAlchemy, "on_startup") mock_shutdown = mocker.patch.object(AdvancedAlchemy, "on_shutdown") _alchemy = AdvancedAlchemy(config, app=app) with TestClient(app=app) as _client: # TestClient context manager triggers lifespan events pass # App starts up and shuts down within this context mock_startup.assert_called_once() mock_shutdown.assert_called_once() async def test_lifespan_with_custom_lifespan_starlette( mocker: MockerFixture, app: Starlette, config: AnyConfig ) -> None: mock_aa_startup = mocker.patch.object(AdvancedAlchemy, "on_startup") mock_aa_shutdown = mocker.patch.object(AdvancedAlchemy, "on_shutdown") mock_custom_startup = mocker.MagicMock() mock_custom_shutdown = mocker.MagicMock() @asynccontextmanager async def custom_lifespan(app_in: Starlette) -> AsyncGenerator[None, None]: mock_custom_startup() yield mock_custom_shutdown() app.router.lifespan_context = custom_lifespan # type: ignore[assignment] # Set a custom lifespan on the app _alchemy = AdvancedAlchemy(config, app=app) with TestClient(app=app) as _client: # TestClient context manager triggers lifespan events pass # App starts up and shuts down within this context mock_aa_startup.assert_called_once() mock_aa_shutdown.assert_called_once() mock_custom_startup.assert_called_once() mock_custom_shutdown.assert_called_once() python-advanced-alchemy-1.0.1/tests/unit/test_repository.py000066400000000000000000001124261476663714600242200ustar00rootroot00000000000000"""Unit tests for the SQLAlchemy Repository implementation.""" from __future__ import annotations import datetime from collections.abc import AsyncGenerator, Collection, Generator from typing import TYPE_CHECKING, Any, Union, cast from unittest.mock import AsyncMock, MagicMock from uuid import uuid4 import pytest from msgspec import Struct from pydantic import BaseModel from pytest_lazy_fixtures import lf from sqlalchemy import String from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import InstrumentedAttribute, Mapped, Session, mapped_column from advanced_alchemy import base from advanced_alchemy.exceptions import IntegrityError, RepositoryError, wrap_sqlalchemy_exception from advanced_alchemy.filters import ( BeforeAfter, CollectionFilter, LimitOffset, NotInCollectionFilter, OnBeforeAfter, ) from advanced_alchemy.repository import ( SQLAlchemyAsyncRepository, SQLAlchemySyncRepository, ) from advanced_alchemy.service.typing import ( is_msgspec_struct, is_pydantic_model, is_schema, is_schema_or_dict, is_schema_or_dict_with_field, is_schema_or_dict_without_field, is_schema_with_field, is_schema_without_field, ) from tests.helpers import maybe_async if TYPE_CHECKING: from _pytest.fixtures import FixtureRequest from pytest import MonkeyPatch from pytest_mock import MockerFixture AnyMock = Union[MagicMock, AsyncMock] class UUIDModel(base.UUIDAuditBase): """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at' columns. """ class BigIntModel(base.BigIntAuditBase): """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at' columns. """ @pytest.fixture() async def async_mock_repo() -> AsyncGenerator[SQLAlchemyAsyncRepository[MagicMock], None]: """SQLAlchemy repository with a mock model type.""" class Repo(SQLAlchemyAsyncRepository[MagicMock]): """Repo with mocked out stuff.""" model_type = MagicMock(__name__="MagicMock") # pyright:ignore[reportGeneralTypeIssues,reportAssignmentType] session = AsyncMock(spec=AsyncSession, bind=MagicMock()) yield Repo(session=session, statement=MagicMock()) @pytest.fixture() def sync_mock_repo() -> Generator[SQLAlchemySyncRepository[MagicMock], None, None]: """SQLAlchemy repository with a mock model type.""" class Repo(SQLAlchemySyncRepository[MagicMock]): """Repo with mocked out stuff.""" model_type = MagicMock(__name__="MagicMock") # pyright:ignore[reportGeneralTypeIssues,reportAssignmentType] yield Repo(session=MagicMock(spec=Session, bind=MagicMock()), statement=MagicMock()) @pytest.fixture(params=[lf("sync_mock_repo"), lf("async_mock_repo")]) def mock_repo(request: FixtureRequest) -> Generator[SQLAlchemyAsyncRepository[MagicMock], None, None]: yield cast(SQLAlchemyAsyncRepository[Any], request.param) @pytest.fixture() def mock_session_scalars( # pyright: ignore[reportUnknownParameterType] mock_repo: SQLAlchemyAsyncRepository[MagicMock], mocker: MockerFixture ) -> Generator[AnyMock, None, None]: yield mocker.patch.object(mock_repo.session, "scalars") @pytest.fixture() def mock_session_execute( # pyright: ignore[reportUnknownParameterType] mock_repo: SQLAlchemyAsyncRepository[MagicMock], mocker: MockerFixture ) -> Generator[AnyMock, None, None]: yield mocker.patch.object(mock_repo.session, "scalars") @pytest.fixture() def mock_repo_list( # pyright: ignore[reportUnknownParameterType] mock_repo: SQLAlchemyAsyncRepository[MagicMock], mocker: MockerFixture ) -> Generator[AnyMock, None, None]: yield mocker.patch.object(mock_repo, "list") @pytest.fixture() def mock_repo_execute( # pyright: ignore[reportUnknownParameterType] mock_repo: SQLAlchemyAsyncRepository[MagicMock], mocker: MockerFixture ) -> Generator[AnyMock, None, None]: yield mocker.patch.object(mock_repo, "_execute") @pytest.fixture() def mock_repo_attach_to_session( # pyright: ignore[reportUnknownParameterType] mock_repo: SQLAlchemyAsyncRepository[MagicMock], mocker: MockerFixture ) -> Generator[AnyMock, None, None]: yield mocker.patch.object(mock_repo, "_attach_to_session") @pytest.fixture() def mock_repo_count( # pyright: ignore[reportUnknownParameterType] mock_repo: SQLAlchemyAsyncRepository[MagicMock], mocker: MockerFixture ) -> Generator[AnyMock, None, None]: yield mocker.patch.object(mock_repo, "count") def test_sqlalchemy_tablename() -> None: """Test the snake case conversion for table names.""" class BigModel(base.UUIDAuditBase): """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at' columns. """ class TESTModel(base.UUIDAuditBase): """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at' columns. """ class OtherBigIntModel(base.BigIntAuditBase): """Inheriting from BigIntAuditBase gives the model 'created_at' and 'updated_at' columns. """ assert BigModel.__tablename__ == "big_model" assert TESTModel.__tablename__ == "test_model" assert OtherBigIntModel.__tablename__ == "other_big_int_model" def test_sqlalchemy_sentinel(monkeypatch: MonkeyPatch) -> None: """Test the sqlalchemy sentinel column only exists on `UUIDPrimaryKey` models.""" class AnotherModel(base.UUIDAuditBase): """Inheriting from UUIDAuditBase gives the model 'created_at' and 'updated_at' columns. """ the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore class TheTestModel(base.UUIDBase): """Inheriting from DeclarativeBase gives the model 'id' columns.""" the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore class TheBigIntModel(base.BigIntBase): """Inheriting from DeclarativeBase gives the model 'id' columns.""" the_extra_col: Mapped[str] = mapped_column(String(length=100), nullable=True) # pyright: ignore unloaded_cols = {"the_extra_col"} sa_instance_mock = MagicMock(unloaded=unloaded_cols) assert isinstance(AnotherModel._sentinel, InstrumentedAttribute) # pyright: ignore assert isinstance(TheTestModel._sentinel, InstrumentedAttribute) # pyright: ignore assert not hasattr(TheBigIntModel, "_sentinel") model1, model2, model3 = AnotherModel(), TheTestModel(), TheBigIntModel() monkeypatch.setattr(model1, "_sa_instance_state", sa_instance_mock) monkeypatch.setattr(model2, "_sa_instance_state", sa_instance_mock) monkeypatch.setattr(model3, "_sa_instance_state", sa_instance_mock) assert "created_at" not in model1.to_dict(exclude={"created_at"}) assert "the_extra_col" not in model1.to_dict(exclude={"created_at"}) assert "sa_orm_sentinel" not in model1.to_dict() assert "sa_orm_sentinel" not in model2.to_dict() assert "sa_orm_sentinel" not in model3.to_dict() assert "_sentinel" not in model1.to_dict() assert "_sentinel" not in model2.to_dict() assert "_sentinel" not in model3.to_dict() assert "the_extra_col" not in model1.to_dict() def test_wrap_sqlalchemy_integrity_error() -> None: """Test to ensure we wrap IntegrityError.""" with pytest.raises(IntegrityError), wrap_sqlalchemy_exception(): raise IntegrityError(None, None, Exception()) def test_wrap_sqlalchemy_generic_error() -> None: """Test to ensure we wrap generic SQLAlchemy exceptions.""" with pytest.raises(RepositoryError), wrap_sqlalchemy_exception(): raise SQLAlchemyError async def test_sqlalchemy_repo_add(mock_repo: SQLAlchemyAsyncRepository[Any]) -> None: """Test expected method calls for add operation.""" mock_instance = MagicMock() instance = await maybe_async(mock_repo.add(mock_instance)) assert instance is mock_instance mock_repo.session.add.assert_called_once_with(mock_instance) # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.flush.assert_called_once() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.refresh.assert_called_once_with( # pyright: ignore[reportFunctionMemberAccess] instance=mock_instance, attribute_names=None, with_for_update=None, ) mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_add_many( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mocker: MockerFixture, request: FixtureRequest, ) -> None: """Test expected method calls for add many operation.""" mock_instances = [MagicMock(), MagicMock(), MagicMock()] monkeypatch.setattr(mock_repo, "model_type", UUIDModel) mocker.patch.object(mock_repo.session, "scalars", return_value=mock_instances) instances = await maybe_async(mock_repo.add_many(mock_instances)) assert len(instances) == 3 for row in instances: assert row.id is not None mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_update_many( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mocker: MockerFixture, ) -> None: """Test expected method calls for update many operation.""" mock_instances = [MagicMock(), MagicMock(), MagicMock()] monkeypatch.setattr(mock_repo, "model_type", UUIDModel) mocker.patch.object(mock_repo.session, "scalars", return_value=mock_instances) instances = await maybe_async(mock_repo.update_many(mock_instances)) assert len(instances) == 3 for row in instances: assert row.id is not None mock_repo.session.flush.assert_called_once() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_upsert_many( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mocker: MockerFixture, ) -> None: """Test expected method calls for update many operation.""" mock_instances = [MagicMock(), MagicMock(), MagicMock()] monkeypatch.setattr(mock_repo, "model_type", UUIDModel) mocker.patch.object(mock_repo.session, "scalars", return_value=mock_instances) mocker.patch.object(mock_repo, "list", return_value=mock_instances) mocker.patch.object(mock_repo, "add_many", return_value=mock_instances) mocker.patch.object(mock_repo, "update_many", return_value=mock_instances) instances = await maybe_async(mock_repo.upsert_many(mock_instances)) assert len(instances) == 3 for row in instances: assert row.id is not None mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_delete(mock_repo: SQLAlchemyAsyncRepository[Any], mocker: MockerFixture) -> None: """Test expected method calls for delete operation.""" mock_instance = MagicMock() mocker.patch.object(mock_repo, "get", return_value=mock_instance) instance = await maybe_async(mock_repo.delete("instance-id")) assert instance is mock_instance mock_repo.session.delete.assert_called_once_with(mock_instance) # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.flush.assert_called_once() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_delete_many_uuid( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_session_scalars: AnyMock, mock_session_execute: AnyMock, mock_repo_list: AnyMock, ) -> None: """Test expected method calls for delete operation.""" mock_instances = [MagicMock(), MagicMock(id=uuid4())] mock_session_scalars.return_value = mock_instances mock_session_execute.return_value = mock_instances mock_repo_list.return_value = mock_instances monkeypatch.setattr(mock_repo, "model_type", UUIDModel) monkeypatch.setattr(mock_repo.session.bind.dialect, "insertmanyvalues_max_parameters", 2) added_instances = await maybe_async(mock_repo.add_many(mock_instances)) instances = await maybe_async(mock_repo.delete_many([obj.id for obj in added_instances])) assert len(instances) == len(mock_instances) mock_repo.session.flush.assert_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_delete_many_bigint( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_session_scalars: AnyMock, mock_session_execute: AnyMock, mock_repo_list: AnyMock, testrun_uid: str, ) -> None: """Test expected method calls for delete operation.""" mock_instances = [MagicMock(), MagicMock(id=uuid4())] mock_session_scalars.return_value = mock_instances mock_session_execute.return_value = mock_instances mock_repo_list.return_value = mock_instances monkeypatch.setattr(mock_repo, "model_type", BigIntModel) monkeypatch.setattr(mock_repo.session.bind.dialect, "insertmanyvalues_max_parameters", 2) added_instances = await maybe_async(mock_repo.add_many(mock_instances)) instances = await maybe_async(mock_repo.delete_many([obj.id for obj in added_instances])) assert len(instances) == len(mock_instances) mock_repo.session.flush.assert_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_get_member( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, ) -> None: """Test expected method calls for member get operation.""" mock_instance = MagicMock() mock_repo_execute.return_value = MagicMock(scalar_one_or_none=MagicMock(return_value=mock_instance)) instance = await maybe_async(mock_repo.get("instance-id")) assert instance is mock_instance mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_get_one_member( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, ) -> None: """Test expected method calls for member get one operation.""" mock_instance = MagicMock() mock_repo_execute.return_value = MagicMock(scalar_one_or_none=MagicMock(return_value=mock_instance)) instance = await maybe_async(mock_repo.get_one(id="instance-id")) assert instance is mock_instance mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_get_or_upsert_member_existing( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mock_repo_attach_to_session: AnyMock, ) -> None: """Test expected method calls for member get or create operation (existing).""" mock_instance = MagicMock() mock_repo_execute.return_value = MagicMock(scalar_one_or_none=MagicMock(return_value=mock_instance)) mock_repo_attach_to_session.return_value = mock_instance instance, created = await maybe_async(mock_repo.get_or_upsert(id="instance-id", upsert=False)) assert instance is mock_instance assert created is False mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.merge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.refresh.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_get_or_upsert_member_existing_upsert( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mock_repo_attach_to_session: AnyMock, ) -> None: """Test expected method calls for member get or create operation (existing).""" mock_instance = MagicMock() mock_repo_execute.return_value = MagicMock(scalar_one_or_none=MagicMock(return_value=mock_instance)) mock_repo_attach_to_session.return_value = mock_instance instance, created = await maybe_async( mock_repo.get_or_upsert(id="instance-id", upsert=True, an_extra_attribute="yep"), ) assert instance is mock_instance assert created is False mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo._attach_to_session.assert_called_once() # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] mock_repo.session.flush.assert_called_once() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.refresh.assert_called_once_with( # pyright: ignore[reportFunctionMemberAccess] instance=mock_instance, attribute_names=None, with_for_update=None, ) async def test_sqlalchemy_repo_get_or_upsert_member_existing_no_upsert( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, ) -> None: """Test expected method calls for member get or create operation (existing).""" mock_instance = MagicMock() mock_repo_execute.return_value = MagicMock(scalar_one_or_none=MagicMock(return_value=mock_instance)) instance, created = await maybe_async( mock_repo.get_or_upsert(id="instance-id", upsert=False, an_extra_attribute="yep"), ) assert instance is mock_instance assert created is False mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.add.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.refresh.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_get_or_upsert_member_created( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, ) -> None: """Test expected method calls for member get or create operation (created).""" mock_repo_execute.return_value = MagicMock(scalar_one_or_none=MagicMock(return_value=None)) instance, created = await maybe_async(mock_repo.get_or_upsert(id="new-id")) assert instance is not None assert created is True mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.add.assert_called_once_with(instance) # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.refresh.assert_called_once_with(instance=instance, attribute_names=None, with_for_update=None) # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_get_one_or_none_member( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, ) -> None: """Test expected method calls for member get one or none operation (found).""" mock_instance = MagicMock() mock_repo_execute.return_value = MagicMock(scalar_one_or_none=MagicMock(return_value=mock_instance)) instance = await maybe_async(mock_repo.get_one_or_none(id="instance-id")) assert instance is mock_instance mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_get_one_or_none_not_found( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, ) -> None: """Test expected method calls for member get one or none operation (Not found).""" mock_repo_execute.return_value = MagicMock(scalar_one_or_none=MagicMock(return_value=None)) instance = await maybe_async(mock_repo.get_one_or_none(id="instance-id")) assert instance is None mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_list( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, ) -> None: """Test expected method calls for list operation.""" mock_instances = [MagicMock(), MagicMock()] mock_repo_execute.return_value = MagicMock(scalars=MagicMock(return_value=mock_instances)) instances = await maybe_async(mock_repo.list()) assert instances == mock_instances mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_list_and_count(mock_repo: SQLAlchemyAsyncRepository[Any], mocker: MockerFixture) -> None: """Test expected method calls for list operation.""" mock_instances = [MagicMock(), MagicMock()] mock_count = len(mock_instances) mocker.patch.object(mock_repo, "_list_and_count_window", return_value=(mock_instances, mock_count)) instances, instance_count = await maybe_async(mock_repo.list_and_count()) assert instances == mock_instances assert instance_count == mock_count mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_list_and_count_basic( mock_repo: SQLAlchemyAsyncRepository[Any], mocker: MockerFixture, ) -> None: """Test expected method calls for list operation.""" mock_instances = [MagicMock(), MagicMock()] mock_count = len(mock_instances) mocker.patch.object(mock_repo, "_list_and_count_basic", return_value=(mock_instances, mock_count)) instances, instance_count = await maybe_async(mock_repo.list_and_count(count_with_window_function=False)) assert instances == mock_instances assert instance_count == mock_count mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_exists( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mock_repo_count: AnyMock, ) -> None: """Test expected method calls for exists operation.""" mock_repo_count.return_value = 1 exists = await maybe_async(mock_repo.exists(id="my-id")) assert exists mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_exists_with_filter( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mock_repo_count: AnyMock, ) -> None: """Test expected method calls for exists operation. with filter argument""" limit_filter = LimitOffset(limit=1, offset=0) mock_repo_count.return_value = 1 exists = await maybe_async(mock_repo.exists(limit_filter, id="my-id")) assert exists mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_count( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mock_repo_count: AnyMock, ) -> None: """Test expected method calls for list operation.""" mock_repo_count.return_value = 1 count = await maybe_async(mock_repo.count()) assert count == 1 mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] async def test_sqlalchemy_repo_list_with_pagination( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mocker: MockerFixture, ) -> None: """Test list operation with pagination.""" statement = MagicMock() mock_repo_execute.return_value = MagicMock() mocker.patch.object(LimitOffset, "append_to_statement", return_value=statement) mock_repo_execute.return_value = MagicMock() await maybe_async(mock_repo.list(LimitOffset(2, 3))) mock_repo._execute.assert_called_with(statement, uniquify=False) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] async def test_sqlalchemy_repo_list_with_before_after_filter( mock_repo: SQLAlchemyAsyncRepository[Any], mock_repo_execute: AnyMock, mocker: MockerFixture, ) -> None: """Test list operation with BeforeAfter filter.""" statement = MagicMock() mocker.patch.object(mock_repo.model_type.updated_at, "__lt__", return_value="lt") mocker.patch.object(mock_repo.model_type.updated_at, "__gt__", return_value="gt") mocker.patch.object(BeforeAfter, "append_to_statement", return_value=statement) mock_repo_execute.return_value = MagicMock() await maybe_async(mock_repo.list(BeforeAfter("updated_at", datetime.datetime.max, datetime.datetime.min))) mock_repo._execute.assert_called_with(statement, uniquify=False) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] async def test_sqlalchemy_repo_list_with_on_before_after_filter( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mocker: MockerFixture, ) -> None: """Test list operation with BeforeAfter filter.""" statement = MagicMock() mocker.patch.object(mock_repo.model_type.updated_at, "__le__", return_value="le") mocker.patch.object(mock_repo.model_type.updated_at, "__ge__", return_value="ge") mocker.patch.object(OnBeforeAfter, "append_to_statement", return_value=statement) mock_repo_execute.return_value = MagicMock() await maybe_async(mock_repo.list(OnBeforeAfter("updated_at", datetime.datetime.max, datetime.datetime.min))) mock_repo._execute.assert_called_with(statement, uniquify=False) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] async def test_sqlalchemy_repo_list_with_collection_filter( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mocker: MockerFixture, ) -> None: """Test behavior of list operation given CollectionFilter.""" field_name = "id" mock_repo_execute.return_value = MagicMock() mock_repo.statement.where.return_value = mock_repo.statement # pyright: ignore[reportFunctionMemberAccess] mocker.patch.object(CollectionFilter, "append_to_statement", return_value=mock_repo.statement) values = [1, 2, 3] await maybe_async(mock_repo.list(CollectionFilter(field_name, values))) mock_repo._execute.assert_called_with(mock_repo.statement, uniquify=False) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] async def test_sqlalchemy_repo_list_with_null_collection_filter( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mocker: MockerFixture, ) -> None: """Test behavior of list operation given CollectionFilter.""" field_name = "id" mock_repo_execute.return_value = MagicMock() mock_repo.statement.where.return_value = mock_repo.statement # pyright: ignore[reportFunctionMemberAccess] monkeypatch.setattr( CollectionFilter, "append_to_statement", MagicMock(return_value=mock_repo.statement), ) await maybe_async(mock_repo.list(CollectionFilter(field_name, None))) # pyright: ignore[reportFunctionMemberAccess,reportUnknownArgumentType] mock_repo._execute.assert_called_with(mock_repo.statement, uniquify=False) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] async def test_sqlalchemy_repo_empty_list_with_collection_filter( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mocker: MockerFixture, ) -> None: """Test behavior of list operation given CollectionFilter.""" field_name = "id" mock_repo_execute.return_value = MagicMock() mock_repo.statement.where.return_value = mock_repo.statement # pyright: ignore[reportFunctionMemberAccess] values: Collection[Any] = [] await maybe_async(mock_repo.list(CollectionFilter(field_name, values))) monkeypatch.setattr( CollectionFilter, "append_to_statement", MagicMock(return_value=mock_repo.statement), ) await maybe_async(mock_repo.list(CollectionFilter(field_name, values))) mock_repo._execute.assert_called_with(mock_repo.statement, uniquify=False) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] async def test_sqlalchemy_repo_list_with_not_in_collection_filter( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mocker: MockerFixture, ) -> None: """Test behavior of list operation given CollectionFilter.""" field_name = "id" mock_repo_execute.return_value = MagicMock() mock_repo.statement.where.return_value = mock_repo.statement # pyright: ignore[reportFunctionMemberAccess] monkeypatch.setattr( NotInCollectionFilter, "append_to_statement", MagicMock(return_value=mock_repo.statement), ) values = [1, 2, 3] await maybe_async(mock_repo.list(NotInCollectionFilter(field_name, values))) mock_repo._execute.assert_called_with(mock_repo.statement, uniquify=False) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] async def test_sqlalchemy_repo_list_with_null_not_in_collection_filter( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mock_repo_execute: AnyMock, mocker: MockerFixture, ) -> None: """Test behavior of list operation given CollectionFilter.""" field_name = "id" mock_repo_execute.return_value = MagicMock() mock_repo.statement.where.return_value = mock_repo.statement # pyright: ignore[reportFunctionMemberAccess] monkeypatch.setattr( NotInCollectionFilter, "append_to_statement", MagicMock(return_value=mock_repo.statement), ) await maybe_async(mock_repo.list(NotInCollectionFilter[str](field_name, None))) # pyright: ignore[reportFunctionMemberAccess] mock_repo._execute.assert_called_with(mock_repo.statement, uniquify=False) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] async def test_sqlalchemy_repo_unknown_filter_type_raises(mock_repo: SQLAlchemyAsyncRepository[Any]) -> None: """Test that repo raises exception if list receives unknown filter type.""" with pytest.raises(RepositoryError): await maybe_async(mock_repo.list("not a filter")) # type: ignore async def test_sqlalchemy_repo_update( mock_repo: SQLAlchemyAsyncRepository[Any], monkeypatch: MonkeyPatch, mocker: MockerFixture, ) -> None: """Test the sequence of repo calls for update operation.""" id_ = 3 mock_instance = MagicMock() mocker.patch.object(mock_repo, "get_id_attribute_value", return_value=id_) mocker.patch.object(mock_repo, "get") mock_repo.session.merge.return_value = mock_instance # pyright: ignore[reportFunctionMemberAccess] instance = await maybe_async(mock_repo.update(mock_instance)) assert instance is mock_instance mock_repo.session.merge.assert_called_once_with(mock_instance, load=True) # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.flush.assert_called_once() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.refresh.assert_called_once_with( # pyright: ignore[reportFunctionMemberAccess] instance=mock_instance, attribute_names=None, with_for_update=None, ) async def test_sqlalchemy_repo_upsert(mock_repo: SQLAlchemyAsyncRepository[Any], mocker: MockerFixture) -> None: """Test the sequence of repo calls for upsert operation.""" mock_instance = MagicMock() mock_repo.session.merge.return_value = mock_instance # pyright: ignore[reportFunctionMemberAccess] instance = await maybe_async(mock_repo.upsert(mock_instance)) mocker.patch.object(mock_repo, "exists", return_value=True) mocker.patch.object(mock_repo, "count", return_value=1) assert instance is mock_instance mock_repo.session.flush.assert_called_once() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.expunge.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.commit.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] mock_repo.session.refresh.assert_called_once_with( # pyright: ignore[reportFunctionMemberAccess] instance=mock_instance, attribute_names=None, with_for_update=None, ) async def test_attach_to_session_unexpected_strategy_raises_valueerror( mock_repo: SQLAlchemyAsyncRepository[Any], ) -> None: """Test to hit the error condition in SQLAlchemy._attach_to_session().""" with pytest.raises(ValueError): await maybe_async(mock_repo._attach_to_session(MagicMock(), strategy="t-rex")) # type:ignore[arg-type] async def test_execute(mock_repo: SQLAlchemyAsyncRepository[Any]) -> None: """Simple test of the abstraction over `AsyncSession.execute()`""" _ = await maybe_async(mock_repo._execute(mock_repo.statement)) # pyright: ignore[reportFunctionMemberAccess,reportPrivateUsage] mock_repo.session.execute.assert_called_once_with(mock_repo.statement) # pyright: ignore[reportFunctionMemberAccess] async def test_filter_in_collection_noop_if_collection_empty(mock_repo: SQLAlchemyAsyncRepository[Any]) -> None: """Ensures we don't filter on an empty collection.""" statement = MagicMock() filter = CollectionFilter(field_name="id", values=[]) # type:ignore[var-annotated] statement = filter.append_to_statement(statement, MagicMock()) # type:ignore[assignment] mock_repo.statement.where.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] @pytest.mark.parametrize( ("before", "after"), [ (datetime.datetime.max, datetime.datetime.min), (None, datetime.datetime.min), (datetime.datetime.max, None), ], ) async def test_filter_on_datetime_field( before: datetime.datetime, after: datetime.datetime, mock_repo: SQLAlchemyAsyncRepository[Any], mocker: MockerFixture, monkeypatch: MonkeyPatch, ) -> None: """Test through branches of _filter_on_datetime_field()""" field_mock = MagicMock(return_value=before or after) statement = MagicMock() field_mock.__gt__ = field_mock.__lt__ = lambda self, other: True # pyright: ignore[reportFunctionMemberAccess,reportUnknownLambdaType] monkeypatch.setattr( BeforeAfter, "append_to_statement", MagicMock(return_value=mock_repo.statement), ) filter = BeforeAfter(field_name="updated_at", before=before, after=after) statement = filter.append_to_statement(statement, MagicMock(return_value=before or after)) # type:ignore[assignment] mock_repo.model_type.updated_at = field_mock mock_repo.statement.where.assert_not_called() # pyright: ignore[reportFunctionMemberAccess] class MyModel(BaseModel): name: str age: int class MyStruct(Struct): name: str age: int def test_is_pydantic_model() -> None: pydantic_model = MyModel(name="Pydantic John", age=30) msgspec_struct = MyStruct(name="Msgspec Joe", age=30) old_dict = {"name": "Old Greg", "age": 30} int_value = 1 assert is_pydantic_model(pydantic_model) assert not is_pydantic_model(msgspec_struct) assert not is_pydantic_model(old_dict) assert not is_pydantic_model(int_value) def test_is_msgspec_struct() -> None: pydantic_model = MyModel(name="Pydantic John", age=30) msgspec_struct = MyStruct(name="Msgspec Joe", age=30) old_dict = {"name": "Old Greg", "age": 30} assert not is_msgspec_struct(pydantic_model) assert is_msgspec_struct(msgspec_struct) assert not is_msgspec_struct(old_dict) def test_is_schema() -> None: pydantic_model = MyModel(name="Pydantic John", age=30) msgspec_struct = MyStruct(name="Msgspec Joe", age=30) old_dict = {"name": "Old Greg", "age": 30} int_value = 1 assert is_schema(pydantic_model) assert is_schema(msgspec_struct) assert not is_schema(old_dict) assert not is_schema(int_value) assert is_schema_with_field(pydantic_model, "name") assert not is_schema_with_field(msgspec_struct, "name2") assert is_schema_without_field(pydantic_model, "name2") assert not is_schema_without_field(msgspec_struct, "name") def test_is_schema_or_dict() -> None: pydantic_model = MyModel(name="Pydantic John", age=30) msgspec_struct = MyStruct(name="Msgspec Joe", age=30) old_dict = {"name": "Old Greg", "age": 30} int_value = 1 assert is_schema_or_dict(pydantic_model) assert is_schema_or_dict(msgspec_struct) assert is_schema_or_dict(old_dict) assert not is_schema_or_dict(int_value) assert is_schema_or_dict_with_field(pydantic_model, "name") assert not is_schema_or_dict_with_field(msgspec_struct, "name2") assert is_schema_or_dict_without_field(pydantic_model, "name2") assert not is_schema_or_dict_without_field(msgspec_struct, "name") python-advanced-alchemy-1.0.1/tests/unit/test_utils/000077500000000000000000000000001476663714600225615ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_utils/__init__.py000066400000000000000000000000001476663714600246600ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tests/unit/test_utils/test_module_loader.py000066400000000000000000000034171476663714600270120ustar00rootroot00000000000000from pathlib import Path import pytest from _pytest.monkeypatch import MonkeyPatch from advanced_alchemy.config import GenericAlembicConfig from advanced_alchemy.utils.module_loader import import_string, module_to_os_path def test_import_string() -> None: cls = import_string("advanced_alchemy.config.GenericAlembicConfig") assert type(cls) is type(GenericAlembicConfig) with pytest.raises(ImportError): _ = import_string("GenericAlembicConfigNew") _ = import_string("advanced_alchemy.config.GenericAlembicConfigNew") _ = import_string("imaginary_module_that_doesnt_exist.Config") # a random nonexistent class def test_module_path(tmp_path: Path, monkeypatch: MonkeyPatch) -> None: the_path = module_to_os_path("advanced_alchemy.config") assert the_path.exists() tmp_path.joinpath("simple_module.py").write_text("x = 'foo'") monkeypatch.syspath_prepend(tmp_path) # pyright: ignore[reportUnknownMemberType] os_path = module_to_os_path("simple_module") assert os_path == Path(tmp_path) with pytest.raises( ( ImportError, TypeError, ) ): _ = module_to_os_path("advanced_alchemy.config.GenericAlembicConfig") _ = module_to_os_path("advanced_alchemy.config.GenericAlembicConfig.extra.module") def test_import_non_existing_attribute_raises() -> None: with pytest.raises(ImportError): import_string("advanced_alchemy.config.SuperGenericAlembicConfig") def test_import_string_cached(tmp_path: Path, monkeypatch: MonkeyPatch) -> None: tmp_path.joinpath("testmodule.py").write_text("x = 'foo'") monkeypatch.chdir(tmp_path) monkeypatch.syspath_prepend(tmp_path) # pyright: ignore[reportUnknownMemberType] assert import_string("testmodule.x") == "foo" python-advanced-alchemy-1.0.1/tests/unit/test_utils/test_portals.py000066400000000000000000000035071476663714600256630ustar00rootroot00000000000000import asyncio from collections.abc import Coroutine from typing import Any, Callable import pytest from advanced_alchemy.utils.portals import Portal, PortalProvider @pytest.fixture async def async_function() -> Callable[[int], Coroutine[Any, Any, int]]: async def sample_async_function(x: int) -> int: await asyncio.sleep(0.1) return x * 2 return sample_async_function def test_portal_provider_singleton() -> None: provider1 = PortalProvider() provider2 = PortalProvider() assert provider1 is provider2, "PortalProvider is not a singleton" def test_portal_provider_start_stop() -> None: provider = PortalProvider() provider.start() assert provider.is_running, "Provider should be running after start()" assert provider.is_ready, "Provider should be ready after start()" provider.stop() assert not provider.is_running, "Provider should not be running after stop()" def test_portal_provider_call(async_function: Callable[[int], Coroutine[Any, Any, int]]) -> None: provider = PortalProvider() provider.start() result = provider.call(async_function, 5) assert result == 10, "The result of the async function should be 10" provider.stop() def test_portal_provider_call_exception() -> None: async def faulty_async_function() -> None: raise ValueError("Intentional error") provider = PortalProvider() provider.start() with pytest.raises(ValueError, match="Intentional error"): provider.call(faulty_async_function) provider.stop() def test_portal_call(async_function: Callable[[int], Coroutine[Any, Any, int]]) -> None: provider = PortalProvider() portal = Portal(provider) provider.start() result = portal.call(async_function, 3) assert result == 6, "The result of the async function should be 6" provider.stop() python-advanced-alchemy-1.0.1/tests/unit/test_utils/test_text.py000066400000000000000000000007421476663714600251610ustar00rootroot00000000000000from advanced_alchemy.utils.text import check_email, slugify def test_check_email() -> None: valid_email = "test@test.com" valid_email_upper = "TEST@TEST.COM" assert check_email(valid_email) == valid_email assert check_email(valid_email_upper) == valid_email def test_slugify() -> None: string = "This is a Test!" expected_slug = "this-is-a-test" assert slugify(string) == expected_slug assert slugify(string, separator="_") == "this_is_a_test" python-advanced-alchemy-1.0.1/tools/000077500000000000000000000000001476663714600174015ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tools/__init__.py000066400000000000000000000000001476663714600215000ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tools/build_docs.py000066400000000000000000000056671476663714600221000ustar00rootroot00000000000000from __future__ import annotations import argparse import importlib.metadata import json import os import shutil import subprocess from collections.abc import Generator from contextlib import contextmanager from pathlib import Path from typing import TYPE_CHECKING, TypedDict, cast if TYPE_CHECKING: from collections.abc import Generator REDIRECT_TEMPLATE = """ Page Redirection You are being redirected. If this does not work, click this link """ parser = argparse.ArgumentParser() parser.add_argument("--version", required=False) parser.add_argument("output") class VersionSpec(TypedDict): versions: list[str] latest: str @contextmanager def checkout(branch: str, skip: bool = False) -> Generator[None]: if not skip: subprocess.run(["git", "checkout", branch], check=True) # noqa: S603, S607 yield if not skip: subprocess.run(["git", "checkout", "-"], check=True) # noqa: S603, S607 def load_version_spec() -> VersionSpec: versions_file = Path("docs/_static/versions.json") if versions_file.exists(): return cast("VersionSpec", json.loads(versions_file.read_text())) return {"versions": [], "latest": ""} def build(output_dir: str, version: str | None) -> None: if version is None: version = importlib.metadata.version("advanced_alchemy").rsplit(".")[0] else: os.environ["_ADVANCED_ALCHEMY_DOCS_BUILD_VERSION"] = version subprocess.run(["make", "docs"], check=True) # noqa: S603, S607 Path(output_dir).mkdir(exist_ok=True, parents=True) Path(output_dir).joinpath(".nojekyll").touch(exist_ok=True) version_spec = load_version_spec() is_latest = version == version_spec["latest"] docs_src_path = Path("docs/_build/html") Path(output_dir).joinpath("index.html").write_text(REDIRECT_TEMPLATE.format(target="latest")) if is_latest: shutil.copytree(docs_src_path, Path(output_dir) / "latest", dirs_exist_ok=True) shutil.copytree(docs_src_path, Path(output_dir) / version, dirs_exist_ok=True) # copy existing versions into our output dir to preserve them when cleaning the branch with checkout("gh-pages", skip=True): for other_version in [*version_spec["versions"], "latest"]: other_version_path = Path(other_version) other_version_target_path = Path(output_dir) / other_version if other_version_path.exists() and not other_version_target_path.exists(): shutil.copytree(other_version_path, other_version_target_path) def main() -> None: args = parser.parse_args() build(output_dir=args.output, version=args.version) if __name__ == "__main__": main() python-advanced-alchemy-1.0.1/tools/convert_docs.sh000077500000000000000000000002721476663714600224310ustar00rootroot00000000000000#!/bin/bash CHANGELOG=docs/changelog.rst filename="${CHANGELOG%.*}" echo "Converting $CHANGELOG to $filename.md" pandoc --wrap=preserve $CHANGELOG -f rst -t markdown -o "$filename".md python-advanced-alchemy-1.0.1/tools/prepare_release.py000066400000000000000000000360111476663714600231120ustar00rootroot00000000000000import asyncio import contextlib import datetime import os import pathlib import re import shutil import subprocess import sys from collections import defaultdict from collections.abc import Generator from dataclasses import dataclass from typing import Optional import click import httpx import msgspec _polar = "[Polar.sh](https://polar.sh/litestar-org)" _open_collective = "[OpenCollective](https://opencollective.com/litestar)" _github_sponsors = "[GitHub Sponsors](https://github.com/sponsors/litestar-org/)" class PullRequest(msgspec.Struct, kw_only=True): title: str number: int body: str created_at: str user: "RepoUser" merge_commit_sha: Optional[str] = None class Comp(msgspec.Struct): sha: str class _Commit(msgspec.Struct): message: str url: str commit: _Commit class RepoUser(msgspec.Struct): login: str id: int type: str @dataclass class PRInfo: url: str title: str clean_title: str cc_type: str number: int closes: list[int] created_at: datetime.datetime description: str user: RepoUser @dataclass class ReleaseInfo: base: str release_tag: str version: str pull_requests: dict[str, list[PRInfo]] first_time_prs: list[PRInfo] @property def compare_url(self) -> str: return f"https://github.com/litestar-org/advanced-alchemy/compare/{self.base}...{self.release_tag}" def _pr_number_from_commit(comp: Comp) -> Optional[int]: # this is an ugly hack, but it appears to actually be the most reliably way to # extract the most "reliable" way to extract the info we want from GH ยฏ\_(ใƒ„)_/ยฏ message_head = comp.commit.message.split("\n\n")[0] match = re.search(r"\(#(\d+)\)$", message_head) if not match: print(f"Could not find PR number in {message_head}") # noqa: T201 return int(match[1]) if match else None class _Thing: def __init__(self, *, gh_token: str, base: str, release_branch: str, tag: str, version: str) -> None: self._gh_token = gh_token self._base = base self._new_release_tag = tag self._release_branch = release_branch self._new_release_version = version self._base_client = httpx.AsyncClient( headers={ "Authorization": f"Bearer {gh_token}", } ) self._api_client = httpx.AsyncClient( headers={ **self._base_client.headers, "X-GitHub-Api-Version": "2022-11-28", "Accept": "application/vnd.github+json", }, base_url="https://api.github.com/repos/litestar-org/advanced-alchemy/", ) async def get_closing_issues_references(self, pr_number: int) -> list[int]: graphql_query = """{ repository(owner: "litestar-org", name: "advanced-alchemy") { pullRequest(number: %d) { id closingIssuesReferences (first: 10) { edges { node { number } } } } } }""" query = graphql_query % (pr_number,) res = await self._base_client.post("https://api.github.com/graphql", json={"query": query}) if res.is_client_error: return [] data = res.json() return [ edge["node"]["number"] for edge in data["data"]["repository"]["pullRequest"]["closingIssuesReferences"]["edges"] ] async def _get_pr_info_for_pr(self, number: int) -> Optional[PRInfo]: res = await self._api_client.get(f"/pulls/{number}") if res.is_client_error: click.secho( f"Could not get PR info for {number}. Fetch request returned a status of {res.status_code}", fg="yellow", ) return None res.raise_for_status() data = res.json() if not data["body"]: data["body"] = "" if not data: return None pr = msgspec.convert(data, type=PullRequest) cc_prefix, clean_title = pr.title.split(":", maxsplit=1) cc_type = cc_prefix.split("(", maxsplit=1)[0].lower() closes_issues = await self.get_closing_issues_references(pr_number=pr.number) return PRInfo( number=pr.number, cc_type=cc_type, clean_title=clean_title.strip(), url=f"https://github.com/litestar-org/advanced-alchemy/pull/{pr.number}", closes=closes_issues, title=pr.title, created_at=datetime.datetime.strptime(pr.created_at, "%Y-%m-%dT%H:%M:%S%z"), description=pr.body, user=pr.user, ) async def get_prs(self) -> dict[str, list[PRInfo]]: res = await self._api_client.get(f"/compare/{self._base}...{self._release_branch}") res.raise_for_status() compares = msgspec.convert(res.json()["commits"], list[Comp]) pr_numbers = list(filter(None, (_pr_number_from_commit(c) for c in compares))) pulls = await asyncio.gather(*map(self._get_pr_info_for_pr, pr_numbers)) prs: dict[str, list[PRInfo]] = defaultdict(list) for pr in pulls: if not pr: continue if pr.user.type != "Bot": prs[pr.cc_type].append(pr) return prs async def _get_first_time_contributions(self, prs: dict[str, list[PRInfo]]) -> list[PRInfo]: # there's probably a way to peel this information out of the GraphQL API but # this was easier to implement, and it works well enough ยฏ\_(ใƒ„)_/ยฏ # the logic is: if we don't find a commit to the main branch, dated before the # first commit within this release, it's the user's first contribution prs_by_user_login: dict[str, list[PRInfo]] = defaultdict(list) for pr in [p for type_prs in prs.values() for p in type_prs]: prs_by_user_login[pr.user.login].append(pr) first_prs: list[PRInfo] = [] async def is_user_first_commit(user_login: str) -> None: first_pr = sorted(prs_by_user_login[user_login], key=lambda p: p.created_at)[0] res = await self._api_client.get( "/commits", params={ "author": user_login, "sha": "main", "until": first_pr.created_at.isoformat(), "per_page": 1, }, ) res.raise_for_status() if len(res.json()) == 0: first_prs.append(first_pr) await asyncio.gather(*map(is_user_first_commit, prs_by_user_login.keys())) return first_prs async def get_release_info(self) -> ReleaseInfo: prs = await self.get_prs() first_time_contributors = await self._get_first_time_contributions(prs) return ReleaseInfo( pull_requests=prs, first_time_prs=first_time_contributors, base=self._base, release_tag=self._new_release_tag, version=self._new_release_version, ) async def create_draft_release(self, body: str, release_branch: str) -> str: res = await self._api_client.post( "/releases", json={ "tag_name": self._new_release_tag, "target_commitish": release_branch, "name": self._new_release_tag, "draft": True, "body": body, }, ) res.raise_for_status() return res.json()["html_url"] # type: ignore[no-any-return] class GHReleaseWriter: def __init__(self) -> None: self.text = "" def add_line(self, line: str) -> None: self.text += line + "\n" def add_pr_descriptions(self, infos: list[PRInfo]) -> None: for info in infos: self.add_line(f"* {info.title} by @{info.user.login} in {info.url}") class ChangelogEntryWriter: def __init__(self) -> None: self.text = "" self._level = 0 self._indent = " " self._cc_type_map = {"fix": "bugfix", "feat": "feature"} def add_line(self, line: str) -> None: self.text += (self._indent * self._level) + line + "\n" def add_change(self, pr: PRInfo) -> None: with self.directive( "change", arg=pr.clean_title, type=self._cc_type_map.get(pr.cc_type, "misc"), pr=str(pr.number), issue=", ".join(map(str, pr.closes)), ): self.add_line("") for line in pr.description.splitlines(): self.add_line(line) @contextlib.contextmanager def directive(self, name: str, arg: Optional[str] = None, **options: str) -> Generator[None, None, None]: self.add_line(f".. {name}:: {arg or ''}") self._level += 1 for key, value in options.items(): if value: self.add_line(f":{key}: {value}") yield self._level -= 1 self.add_line("") def build_gh_release_notes(release_info: ReleaseInfo) -> str: # this is for the most part just recreating GitHub's autogenerated release notes # but with three important differences: # 1. PRs are sorted into categories # 2. The conventional commit type is stripped from the title # 3. It works with our release branch process. GitHub doesn't pick up (all) commits # made there depending on how things were merged doc = GHReleaseWriter() doc.add_line("## Sponsors ๐ŸŒŸ") doc.add_line(f"- A huge 'Thank you!' to all sponsors across {_polar}, {_open_collective} and {_github_sponsors}!") doc.add_line("## What's changed") if release_info.first_time_prs: doc.add_line("\n## New contributors ๐ŸŽ‰") for pr in release_info.first_time_prs: doc.add_line(f"* @{pr.user.login} made their first contribution in {pr.url}") if fixes := release_info.pull_requests.get("fix"): doc.add_line("\n### Bugfixes ๐Ÿ›") doc.add_pr_descriptions(fixes) if features := release_info.pull_requests.get("feat"): doc.add_line("\nNew features ๐Ÿš€") doc.add_pr_descriptions(features) ignore_sections = {"fix", "feat", "ci", "chore"} if other := [pr for k, prs in release_info.pull_requests.items() if k not in ignore_sections for pr in prs]: doc.add_line("\n") doc.add_line("### Other changes") doc.add_pr_descriptions(other) doc.add_line("\n**Full Changelog**") doc.add_line(release_info.compare_url) return doc.text def build_changelog_entry(release_info: ReleaseInfo, interactive: bool = False) -> str: doc = ChangelogEntryWriter() with doc.directive("changelog", release_info.version): doc.add_line(f":date: {datetime.datetime.now(tz=datetime.timezone.utc).date().isoformat()}") doc.add_line("") change_types = {"fix", "feat"} for prs in release_info.pull_requests.values(): for pr in prs: cc_type = pr.cc_type if cc_type in change_types or (interactive and click.confirm(f"Include PR #{pr.number} {pr.title!r}?")): doc.add_change(pr) else: click.secho(f"Ignoring change with type {cc_type}", fg="yellow") return doc.text def _get_gh_token() -> str: if gh_token := os.getenv("GH_TOKEN"): click.secho("Using GitHub token from env", fg="blue") return gh_token gh_executable = shutil.which("gh") if not gh_executable: click.secho("GitHub CLI not installed", fg="yellow") else: click.secho("Using GitHub CLI to obtain GitHub token", fg="blue") proc = subprocess.run([gh_executable, "auth", "token"], check=True, capture_output=True, text=True) # noqa: S603 if out := (proc.stdout or "").strip(): return out click.secho("Could not find any GitHub token", fg="red") sys.exit(1) def _get_latest_tag() -> str: click.secho("Using latest tag", fg="blue") return subprocess.run( # noqa: S602 "git tag --sort=taggerdate | tail -1", # noqa: S607 check=True, capture_output=True, text=True, shell=True, ).stdout.strip() def _write_changelog_entry(changelog_entry: str) -> None: changelog_path = pathlib.Path("docs/changelog.rst") changelog_lines = changelog_path.read_text().splitlines() line_no = next( (i for i, line in enumerate(changelog_lines) if line.startswith(".. changelog::")), None, ) if not line_no: msg = "Changelog start not found" raise ValueError(msg) changelog_lines[line_no:line_no] = changelog_entry.splitlines() changelog_path.write_text("\n".join(changelog_lines)) def update_pyproject_version(new_version: str) -> None: # can't use tomli-w / tomllib for this as is messes up the formatting pyproject = pathlib.Path("pyproject.toml") content = pyproject.read_text() content = re.sub(r'(\nversion ?= ?")\d+\.\d+\.\d+("\s*\n)', rf"\g<1>{new_version}\g<2>", content) pyproject.write_text(content) @click.command() @click.argument("version") @click.option("--base", help="Previous release tag. Defaults to the latest tag") @click.option("--branch", help="Release branch", default="main") @click.option( "--gh-token", help="GitHub token. If not provided, read from the GH_TOKEN env variable. " "Alternatively, if the GitHub CLI is installed, it will be used to fetch a token", ) @click.option( "-i", "--interactive", is_flag=True, help="Interactively decide which commits should be included in the release notes", default=False, ) @click.option("-c", "--create-draft-release", is_flag=True, help="Create draft release on GitHub") def cli( base: Optional[str], branch: str, version: str, gh_token: Optional[str], interactive: bool, create_draft_release: bool, ) -> None: if gh_token is None: gh_token = _get_gh_token() if base is None: base = _get_latest_tag() if not re.match(r"\d+\.\d+\.\d+", version): click.secho(f"Invalid version: {version!r}") sys.exit(1) new_tag = f"v{version}" click.secho(f"Creating release notes for tag {new_tag}, using {base} as a base", fg="cyan") thing = _Thing(gh_token=gh_token, base=base, release_branch=branch, tag=new_tag, version=version) loop = asyncio.new_event_loop() release_info = loop.run_until_complete(thing.get_release_info()) gh_release_notes = build_gh_release_notes(release_info) changelog_entry = build_changelog_entry(release_info, interactive=interactive) click.secho("Writing changelog entry", fg="green") _write_changelog_entry(changelog_entry) if create_draft_release: click.secho("Creating draft release", fg="blue") release_url = loop.run_until_complete(thing.create_draft_release(body=gh_release_notes, release_branch=branch)) click.echo(f"Draft release available at: {release_url}") else: click.echo(gh_release_notes) loop.close() if __name__ == "__main__": cli() python-advanced-alchemy-1.0.1/tools/pypi_readme.py000066400000000000000000000016401476663714600222520ustar00rootroot00000000000000import re from pathlib import Path PYPI_BANNER = 'Litestar Logo - Light' def generate_pypi_readme() -> None: source = Path("README.md").read_text(encoding="utf-8") output = re.sub(r"[\w\W]*", PYPI_BANNER, source) output = re.sub(r"[\w\W]*", "", output) output = re.sub(r"", "", output) # ensure a newline here so the other pre-commit hooks don't complain output = output.strip() + "\n" Path("docs/PYPI_README.md").write_text(output, encoding="utf-8") if __name__ == "__main__": generate_pypi_readme() python-advanced-alchemy-1.0.1/tools/sphinx_ext/000077500000000000000000000000001476663714600215725ustar00rootroot00000000000000python-advanced-alchemy-1.0.1/tools/sphinx_ext/__init__.py000066400000000000000000000007111476663714600237020ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING from tools.sphinx_ext import changelog, missing_references if TYPE_CHECKING: from sphinx.application import Sphinx def setup(app: Sphinx) -> dict[str, bool]: ext_config = {} ext_config.update(missing_references.setup(app)) # type: ignore[arg-type] ext_config.update(changelog.setup(app)) # type: ignore[arg-type] return ext_config # type: ignore[return-value] python-advanced-alchemy-1.0.1/tools/sphinx_ext/changelog.py000066400000000000000000000140431476663714600240750ustar00rootroot00000000000000from collections.abc import Callable from functools import partial from typing import TYPE_CHECKING, Any, ClassVar, Literal, Optional, Union, cast from docutils import nodes from docutils.parsers.rst import directives from sphinx.application import Sphinx from sphinx.util.docutils import SphinxDirective from sphinx.util.nodes import clean_astext if TYPE_CHECKING: from sphinx.domains.std import StandardDomain _GH_BASE_URL = "https://github.com/litestar-org/advanced-alchemy" def _parse_gh_reference(raw: str, type_: Literal["issues", "pull"]) -> list[str]: return [f"{_GH_BASE_URL}/{type_}/{r.strip()}" for r in raw.split(" ") if r] class Change(nodes.General, nodes.Element): """A change node for the changelog.""" class ChangeDirective(SphinxDirective): """A directive for the changelog.""" required_arguments = 1 has_content = True final_argument_whitespace = True option_spec: ClassVar[Optional[dict[str, Callable[[str], Any]]]] = { "type": partial(directives.choice, values=("feature", "bugfix", "misc")), "breaking": directives.flag, "issue": directives.unchanged, "pr": directives.unchanged, } def run(self) -> list[nodes.Node]: """Run the directive. Returns: A list of nodes. """ self.assert_has_content() change_type = self.options.get("type", "misc").lower() title = self.arguments[0] change_node = nodes.container("\n".join(self.content)) change_node.attributes["classes"].append("changelog-change") self.state.nested_parse(self.content, self.content_offset, change_node) # pyright: ignore[reportUnknownMemberType] reference_links = [ *_parse_gh_reference(self.options.get("issue", ""), "issues"), *_parse_gh_reference(self.options.get("pr", ""), "pull"), ] references_paragraph = nodes.paragraph() references_paragraph.append(nodes.Text("References: ")) for i, link in enumerate(reference_links, 1): link_node = nodes.inline() link_node += nodes.reference("", link, refuri=link, external=True) references_paragraph.append(link_node) if i != len(reference_links): references_paragraph.append(nodes.Text(", ")) change_node.append(references_paragraph) return [ Change( "", change_node, title=self.state.inliner.parse(title, 0, self.state.memo, change_node)[0], change_type=change_type, breaking="breaking" in self.options, ), ] class ChangelogDirective(SphinxDirective): required_arguments = 1 has_content = True option_spec = {"date": directives.unchanged} def run(self) -> list[nodes.Node]: self.assert_has_content() version = self.arguments[0] release_date = self.options.get("date") changelog_node = nodes.section() changelog_node += nodes.title(version, version) section_target = nodes.target("", "", ids=[version]) if release_date: changelog_node += nodes.strong("", "Released: ") changelog_node += nodes.Text(release_date) self.state.nested_parse(self.content, self.content_offset, changelog_node) # pyright: ignore[reportUnknownMemberType] domain = cast("StandardDomain", self.env.get_domain("std")) change_group_lists = { "feature": nodes.definition_list(), "bugfix": nodes.definition_list(), "misc": nodes.definition_list(), } change_group_titles = {"bugfix": "Bugfixes", "feature": "Features", "misc": "Other changes"} nodes_to_remove = [] for _i, change_node in enumerate(changelog_node.findall(Change)): change_type = change_node.attributes["change_type"] title = change_node.attributes["title"] list_item = nodes.definition_list_item("") term = nodes.term() term += title target_id = f"{version}-{nodes.fully_normalize_name(title[0].astext())}" term += nodes.reference( "#", "#", refuri=f"#{target_id}", internal=True, classes=["headerlink"], ids=[target_id], ) reference_id = f"change:{target_id}" domain.anonlabels[reference_id] = self.env.docname, target_id domain.labels[reference_id] = ( self.env.docname, target_id, f"Change: {clean_astext(title[0])}", ) if change_node.attributes["breaking"]: breaking_notice = nodes.inline("breaking", "breaking") breaking_notice.attributes["classes"].append("breaking-change") term += breaking_notice list_item += [term] list_item += nodes.definition("", change_node.children[0]) nodes_to_remove.append(change_node) # pyright: ignore[reportUnknownMemberType] change_group_lists[change_type] += list_item for node in nodes_to_remove: # pyright: ignore[reportUnknownVariableType] changelog_node.remove(node) # pyright: ignore[reportUnknownArgumentType] for change_group_type, change_group_list in change_group_lists.items(): if not change_group_list.children: continue section = nodes.section() target_id = f"{version}-{change_group_type}" target_node = nodes.target("", "", ids=[target_id]) title = change_group_titles[change_group_type] section += nodes.title(title, title) section += change_group_list changelog_node += [target_node, section] return [section_target, changelog_node] def setup(app: Sphinx) -> dict[str, Union[str, bool]]: app.add_directive("changelog", ChangelogDirective) app.add_directive("change", ChangeDirective) return {"parallel_read_safe": True, "parallel_write_safe": True} python-advanced-alchemy-1.0.1/tools/sphinx_ext/missing_references.py000066400000000000000000000262311476663714600260220ustar00rootroot00000000000000"""Sphinx extension for changelog and change directives.""" # ruff: noqa: FIX002 PLR0911 ARG001 from __future__ import annotations import ast import importlib import inspect from pathlib import Path from typing import TYPE_CHECKING if TYPE_CHECKING: from collections.abc import Generator from docutils.nodes import Element, Node from sphinx.addnodes import pending_xref from sphinx.application import Sphinx from sphinx.environment import BuildEnvironment def _get_module_ast(source_file: str) -> ast.AST | ast.Module: return ast.parse(Path(source_file).read_text(encoding="utf-8")) def _get_import_nodes(nodes: list[ast.stmt]) -> Generator[ast.Import | ast.ImportFrom, None, None]: for node in nodes: if isinstance(node, (ast.Import, ast.ImportFrom)): yield node elif isinstance(node, ast.If) and getattr(node.test, "id", None) == "TYPE_CHECKING": yield from _get_import_nodes(node.body) def get_module_global_imports(module_import_path: str, reference_target_source_obj: str) -> set[str]: """Return a set of names that are imported globally within the containing module of ``reference_target_source_obj``, including imports in ``if TYPE_CHECKING`` blocks. """ module = importlib.import_module(module_import_path) obj = getattr(module, reference_target_source_obj) tree = _get_module_ast(inspect.getsourcefile(obj)) # type: ignore[arg-type] # pyright: ignore[reportArgumentType] import_nodes = _get_import_nodes(tree.body) # type: ignore[attr-defined] return {path.asname or path.name for import_node in import_nodes for path in import_node.names} def _resolve_local_reference(module_path: str, target: str) -> bool: """Attempt to resolve a reference within the local codebase. Args: module_path: The module path (e.g., 'advanced_alchemy.base') target: The target class/attribute name Returns: bool: True if reference exists, False otherwise """ try: module = importlib.import_module(module_path) if "." in target: # Handle fully qualified names (e.g., advanced_alchemy.base.BasicAttributes) parts = target.split(".") current = module for part in parts: current = getattr(current, part) return True return hasattr(module, target) except (ImportError, AttributeError): return False def _resolve_sqlalchemy_reference(target: str) -> bool: """Attempt to resolve SQLAlchemy references. Args: target: The target class/attribute name Returns: bool: True if reference exists, False otherwise """ try: import sqlalchemy if "." in target: # Handle nested attributes (e.g., Connection.in_transaction) obj_name, attr_name = target.rsplit(".", 1) obj = getattr(sqlalchemy, obj_name) return hasattr(obj, attr_name) return hasattr(sqlalchemy, target) except (ImportError, AttributeError): return False def _resolve_litestar_reference(target: str) -> bool: """Attempt to resolve Litestar references. Args: target: The target class/attribute name Returns: bool: True if reference exists, False otherwise """ try: import litestar from litestar import datastructures # Handle common Litestar classes if target in {"Litestar", "State", "Scope", "Message", "AppConfig", "BeforeMessageSendHookHandler"}: return True if target.startswith("datastructures."): _, attr = target.split(".") return hasattr(datastructures, attr) if target.startswith("config.app."): return True # These are valid Litestar config references return hasattr(litestar, target) except ImportError: return False def _resolve_sqlalchemy_type_reference(target: str) -> bool: """Attempt to resolve SQLAlchemy type references. Args: target: The target class/attribute name Returns: bool: True if reference exists, False otherwise """ try: from sqlalchemy import types as sa_types type_classes = { "TypeEngine", "TypeDecorator", "UserDefinedType", "ExternalType", "Dialect", "_types.TypeDecorator", } if target in type_classes: return True if target.startswith("_types."): _, attr = target.split(".") return hasattr(sa_types, attr) return hasattr(sa_types, target) except ImportError: return False def _resolve_advanced_alchemy_reference(target: str, module: str) -> bool: """Attempt to resolve Advanced Alchemy references. Args: target: The target class/attribute name module: The current module context Returns: bool: True if reference exists, False otherwise """ # Handle base module references base_classes = { "BasicAttributes", "CommonTableAttributes", "AuditColumns", "BigIntPrimaryKey", "UUIDPrimaryKey", "UUIDv6PrimaryKey", "UUIDv7PrimaryKey", "NanoIDPrimaryKey", "Empty", "TableArgsType", "DeclarativeBase", } # Handle config module references config_classes = { "EngineT", "SessionT", "SessionMakerT", "ConnectionT", "GenericSessionConfig", "GenericAlembicConfig", } func_references = {"repository.SQLAlchemyAsyncRepositoryProtocol.add_many"} # Handle type module references type_classes = {"DateTimeUTC", "ORA_JSONB", "GUID", "EncryptedString", "EncryptedText"} if target in base_classes or target in config_classes or target in type_classes: return True # Handle fully qualified references if target.startswith("advanced_alchemy."): parts = target.split(".") if parts[-1] in base_classes | config_classes | type_classes | func_references: return True # Handle module-relative references return bool(module.startswith("advanced_alchemy.")) def _resolve_serialization_reference(target: str) -> bool: """Attempt to resolve serialization-related references. Args: target: The target class/attribute name Returns: bool: True if reference exists, False otherwise """ serialization_attrs = {"decode_json", "encode_json", "serialization.decode_json", "serialization.encode_json"} return target in serialization_attrs def _resolve_click_reference(target: str) -> bool: """Attempt to resolve Click references. Args: target: The target class/attribute name Returns: bool: True if reference exists, False otherwise """ try: import click if target == "Group": return True return hasattr(click, target) except ImportError: return False def on_warn_missing_reference(app: Sphinx, domain: str, node: Node) -> bool | None: """Handle warning for missing references by checking if they are valid type imports.""" if node.tagname != "pending_xref": # type: ignore[attr-defined] return None if not hasattr(node, "attributes"): return None attributes = node.attributes # type: ignore[attr-defined,unused-ignore] target = attributes["reftarget"] ref_type = attributes.get("reftype") module = attributes.get("py:module", "") # Handle TypeVar references if hasattr(target, "__class__") and target.__class__.__name__ == "TypeVar": return True # Handle Advanced Alchemy references if _resolve_advanced_alchemy_reference(target, module): return True # Handle SQLAlchemy type system references if ref_type in {"class", "meth", "attr"} and any( x in target for x in ["TypeDecorator", "TypeEngine", "Dialect", "ExternalType", "UserDefinedType"] ): return _resolve_sqlalchemy_type_reference(target) # Handle SQLAlchemy core references if target.startswith("sqlalchemy.") or ( ref_type in ("class", "attr", "obj", "meth") and target in { "Engine", "Session", "Connection", "MetaData", "AsyncSession", "AsyncEngine", "AsyncConnection", "sessionmaker", "async_sessionmaker", } ): clean_target = target.replace("sqlalchemy.", "") if _resolve_sqlalchemy_reference(clean_target): return True # Handle Litestar references if ref_type in {"class", "obj"} and ( target.startswith(("datastructures.", "config.app.")) or target in { "Litestar", "State", "Scope", "Message", "AppConfig", "BeforeMessageSendHookHandler", "FieldDefinition", "ImproperConfigurationError", } ): return _resolve_litestar_reference(target) # Handle serialization references if ref_type in {"attr", "meth"} and _resolve_serialization_reference(target): return True # Handle Click references if ref_type == "class" and _resolve_click_reference(target): return True return None def on_missing_reference(app: Sphinx, env: BuildEnvironment, node: pending_xref, contnode: Element) -> Element | None: """Handle missing references by attempting to resolve them through different methods. Args: app: The Sphinx application instance env: The Sphinx build environment node: The pending cross-reference node contnode: The content node Returns: Element | None: The resolved reference node if found, None otherwise """ if not hasattr(node, "attributes"): return None attributes = node.attributes # type: ignore[attr-defined,unused-ignore] target = attributes["reftarget"] # Remove this check since it's causing issues if not isinstance(target, str): return None py_domain = env.domains["py"] # autodoc sometimes incorrectly resolves these types, so we try to resolve them as py:data first and fall back to any new_node = py_domain.resolve_xref(env, node["refdoc"], app.builder, "data", target, node, contnode) if new_node is None: resolved_xrefs = py_domain.resolve_any_xref(env, node["refdoc"], app.builder, target, node, contnode) for ref in resolved_xrefs: if ref: return ref[1] return new_node def on_env_before_read_docs(app: Sphinx, env: BuildEnvironment, docnames: set[str]) -> None: tmp_examples_path = Path.cwd() / "docs/_build/_tmp_examples" tmp_examples_path.mkdir(exist_ok=True, parents=True) env.tmp_examples_path = tmp_examples_path # type: ignore[attr-defined] # pyright: ignore[reportAttributeAccessIssue] def setup(app: Sphinx) -> dict[str, bool]: app.connect("env-before-read-docs", on_env_before_read_docs) app.connect("missing-reference", on_missing_reference) app.connect("warn-missing-reference", on_warn_missing_reference) app.add_config_value("ignore_missing_refs", default={}, rebuild="") return {"parallel_read_safe": True, "parallel_write_safe": True} python-advanced-alchemy-1.0.1/uv.lock000066400000000000000000023356471476663714600175710ustar00rootroot00000000000000version = 1 revision = 1 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.13'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", "python_full_version < '3.10'", ] [[package]] name = "advanced-alchemy" version = "1.0.1" source = { editable = "." } dependencies = [ { name = "alembic" }, { name = "eval-type-backport", marker = "python_full_version < '3.10'" }, { name = "greenlet" }, { name = "sqlalchemy" }, { name = "typing-extensions" }, ] [package.optional-dependencies] cli = [ { name = "rich-click" }, ] nanoid = [ { name = "fastnanoid" }, ] uuid = [ { name = "uuid-utils" }, ] [package.dev-dependencies] build = [ { name = "bump-my-version" }, ] cockroachdb = [ { name = "asyncpg" }, { name = "psycopg", extra = ["binary", "pool"] }, { name = "psycopg2-binary" }, { name = "sqlalchemy-cockroachdb" }, ] dev = [ { name = "aioodbc" }, { name = "aiosqlite" }, { name = "asgi-lifespan" }, { name = "asyncmy" }, { name = "asyncpg" }, { name = "asyncpg-stubs" }, { name = "auto-pytabs", extra = ["sphinx"] }, { name = "bump-my-version" }, { name = "click" }, { name = "coverage" }, { name = "duckdb" }, { name = "duckdb-engine" }, { name = "fastapi", extra = ["all"] }, { name = "flask", extra = ["async"] }, { name = "flask-sqlalchemy" }, { name = "litestar", extra = ["cli"] }, { name = "mypy" }, { name = "myst-parser", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "myst-parser", version = "4.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "oracledb" }, { name = "pre-commit" }, { name = "psycopg", extra = ["binary", "pool"] }, { name = "psycopg2-binary" }, { name = "pydantic-extra-types" }, { name = "pyodbc" }, { name = "pyright" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-click" }, { name = "pytest-cov" }, { name = "pytest-databases" }, { name = "pytest-lazy-fixtures" }, { name = "pytest-mock" }, { name = "pytest-rerunfailures" }, { name = "pytest-sugar" }, { name = "pytest-xdist" }, { name = "pytz" }, { name = "rich-click" }, { name = "ruff" }, { name = "sanic", extra = ["ext"] }, { name = "sanic-testing" }, { name = "shibuya" }, { name = "slotscheck" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-autobuild" }, { name = "sphinx-autodoc-typehints", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx-autodoc-typehints", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx-autodoc-typehints", version = "3.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-click" }, { name = "sphinx-copybutton" }, { name = "sphinx-design" }, { name = "sphinx-paramlinks" }, { name = "sphinx-togglebutton" }, { name = "sphinx-toolbox" }, { name = "sphinxcontrib-mermaid" }, { name = "sqlalchemy-cockroachdb" }, { name = "sqlalchemy-spanner" }, { name = "starlette" }, { name = "time-machine" }, { name = "types-aiofiles" }, { name = "types-colorama" }, { name = "types-docutils" }, { name = "types-pillow" }, { name = "types-psycopg2" }, { name = "types-pygments" }, { name = "types-pymysql" }, { name = "types-python-dateutil" }, { name = "types-pytz" }, { name = "types-pyyaml" }, { name = "types-ujson" }, ] doc = [ { name = "auto-pytabs", extra = ["sphinx"] }, { name = "myst-parser", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "myst-parser", version = "4.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "shibuya" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-autobuild" }, { name = "sphinx-autodoc-typehints", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx-autodoc-typehints", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx-autodoc-typehints", version = "3.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-click" }, { name = "sphinx-copybutton" }, { name = "sphinx-design" }, { name = "sphinx-paramlinks" }, { name = "sphinx-togglebutton" }, { name = "sphinx-toolbox" }, { name = "sphinxcontrib-mermaid" }, ] duckdb = [ { name = "duckdb" }, { name = "duckdb-engine" }, { name = "pytz" }, ] fastapi = [ { name = "fastapi", extra = ["all"] }, { name = "starlette" }, ] flask = [ { name = "flask", extra = ["async"] }, { name = "flask-sqlalchemy" }, ] lint = [ { name = "asyncpg-stubs" }, { name = "mypy" }, { name = "pre-commit" }, { name = "pyright" }, { name = "ruff" }, { name = "slotscheck" }, { name = "types-aiofiles" }, { name = "types-colorama" }, { name = "types-docutils" }, { name = "types-pillow" }, { name = "types-psycopg2" }, { name = "types-pygments" }, { name = "types-pymysql" }, { name = "types-python-dateutil" }, { name = "types-pytz" }, { name = "types-pyyaml" }, { name = "types-ujson" }, ] litestar = [ { name = "litestar", extra = ["cli"] }, ] mssql = [ { name = "aioodbc" }, { name = "pyodbc" }, ] mysql = [ { name = "asyncmy" }, ] oracle = [ { name = "oracledb" }, ] postgres = [ { name = "asyncpg" }, { name = "psycopg", extra = ["binary", "pool"] }, { name = "psycopg2-binary" }, ] sanic = [ { name = "sanic", extra = ["ext"] }, { name = "sanic-testing" }, ] spanner = [ { name = "sqlalchemy-spanner" }, ] sqlite = [ { name = "aiosqlite" }, ] test = [ { name = "asgi-lifespan" }, { name = "click" }, { name = "coverage" }, { name = "pydantic-extra-types" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-click" }, { name = "pytest-cov" }, { name = "pytest-databases" }, { name = "pytest-lazy-fixtures" }, { name = "pytest-mock" }, { name = "pytest-rerunfailures" }, { name = "pytest-sugar" }, { name = "pytest-xdist" }, { name = "rich-click" }, { name = "time-machine" }, ] [package.metadata] requires-dist = [ { name = "alembic", specifier = ">=1.12.0" }, { name = "eval-type-backport", marker = "python_full_version < '3.10'" }, { name = "fastnanoid", marker = "extra == 'nanoid'", specifier = ">=0.4.1" }, { name = "greenlet" }, { name = "rich-click", marker = "extra == 'cli'" }, { name = "sqlalchemy", specifier = ">=2.0.20" }, { name = "typing-extensions", specifier = ">=4.0.0" }, { name = "uuid-utils", marker = "extra == 'uuid'", specifier = ">=0.6.1" }, ] provides-extras = ["cli", "nanoid", "uuid"] [package.metadata.requires-dev] build = [{ name = "bump-my-version" }] cockroachdb = [ { name = "asyncpg", specifier = ">=0.29.0" }, { name = "psycopg", extras = ["binary", "pool"], specifier = ">=3.2.3" }, { name = "psycopg2-binary", specifier = ">=2.9.10" }, { name = "sqlalchemy-cockroachdb", specifier = ">=2.0.2" }, ] dev = [ { name = "aioodbc", specifier = ">=0.5.0" }, { name = "aiosqlite", specifier = ">=0.20.0" }, { name = "asgi-lifespan" }, { name = "asyncmy", specifier = ">=0.2.9" }, { name = "asyncpg", specifier = ">=0.29.0" }, { name = "asyncpg-stubs" }, { name = "auto-pytabs", extras = ["sphinx"], specifier = ">=0.5.0" }, { name = "bump-my-version" }, { name = "click" }, { name = "coverage", specifier = ">=7.6.1" }, { name = "duckdb", specifier = ">=1.1.2" }, { name = "duckdb-engine", specifier = ">=0.13.4" }, { name = "fastapi", extras = ["all"], specifier = ">=0.115.3" }, { name = "flask", extras = ["async"] }, { name = "flask-sqlalchemy", specifier = ">=3.1.1" }, { name = "litestar", extras = ["cli"], specifier = ">=2.15.0" }, { name = "mypy", specifier = ">=1.13.0" }, { name = "myst-parser" }, { name = "oracledb", specifier = ">=2.4.1" }, { name = "pre-commit", specifier = ">=3.5.0" }, { name = "psycopg", extras = ["binary", "pool"], specifier = ">=3.2.3" }, { name = "psycopg2-binary", specifier = ">=2.9.10" }, { name = "pydantic-extra-types" }, { name = "pyodbc", specifier = ">=5.2.0" }, { name = "pyright", specifier = ">=1.1.386" }, { name = "pytest", specifier = ">=7.4.4" }, { name = "pytest-asyncio", specifier = ">=0.23.8" }, { name = "pytest-click" }, { name = "pytest-cov", specifier = ">=5.0.0" }, { name = "pytest-databases" }, { name = "pytest-lazy-fixtures", specifier = ">=1.1.1" }, { name = "pytest-mock", specifier = ">=3.14.0" }, { name = "pytest-rerunfailures" }, { name = "pytest-sugar", specifier = ">=1.0.0" }, { name = "pytest-xdist", specifier = ">=3.6.1" }, { name = "pytz", specifier = ">=2024.2" }, { name = "rich-click" }, { name = "ruff", specifier = ">=0.7.1" }, { name = "sanic" }, { name = "sanic", extras = ["ext"], specifier = ">=24.6.0" }, { name = "sanic-testing", specifier = ">=24.6.0" }, { name = "shibuya" }, { name = "slotscheck", specifier = ">=0.16.5" }, { name = "sphinx", marker = "python_full_version < '3.10'", specifier = ">=7.0.0" }, { name = "sphinx", marker = "python_full_version >= '3.10'", specifier = ">=8.0.0" }, { name = "sphinx-autobuild", specifier = ">=2021.3.14" }, { name = "sphinx-autodoc-typehints" }, { name = "sphinx-click", specifier = ">=6.0.0" }, { name = "sphinx-copybutton", specifier = ">=0.5.2" }, { name = "sphinx-design", specifier = ">=0.5.0" }, { name = "sphinx-paramlinks", specifier = ">=0.6.0" }, { name = "sphinx-togglebutton", specifier = ">=0.3.2" }, { name = "sphinx-toolbox", specifier = ">=3.8.1" }, { name = "sphinxcontrib-mermaid", specifier = ">=0.9.2" }, { name = "sqlalchemy-cockroachdb", specifier = ">=2.0.2" }, { name = "sqlalchemy-spanner", specifier = ">=1.7.0" }, { name = "starlette" }, { name = "time-machine", specifier = ">=2.15.0" }, { name = "types-aiofiles" }, { name = "types-colorama" }, { name = "types-docutils" }, { name = "types-pillow" }, { name = "types-psycopg2" }, { name = "types-pygments" }, { name = "types-pymysql" }, { name = "types-python-dateutil" }, { name = "types-pytz" }, { name = "types-pyyaml" }, { name = "types-ujson" }, ] doc = [ { name = "auto-pytabs", extras = ["sphinx"], specifier = ">=0.5.0" }, { name = "myst-parser" }, { name = "shibuya" }, { name = "sphinx", marker = "python_full_version < '3.10'", specifier = ">=7.0.0" }, { name = "sphinx", marker = "python_full_version >= '3.10'", specifier = ">=8.0.0" }, { name = "sphinx-autobuild", specifier = ">=2021.3.14" }, { name = "sphinx-autodoc-typehints" }, { name = "sphinx-click", specifier = ">=6.0.0" }, { name = "sphinx-copybutton", specifier = ">=0.5.2" }, { name = "sphinx-design", specifier = ">=0.5.0" }, { name = "sphinx-paramlinks", specifier = ">=0.6.0" }, { name = "sphinx-togglebutton", specifier = ">=0.3.2" }, { name = "sphinx-toolbox", specifier = ">=3.8.1" }, { name = "sphinxcontrib-mermaid", specifier = ">=0.9.2" }, ] duckdb = [ { name = "duckdb", specifier = ">=1.1.2" }, { name = "duckdb-engine", specifier = ">=0.13.4" }, { name = "pytz", specifier = ">=2024.2" }, ] fastapi = [ { name = "fastapi", extras = ["all"], specifier = ">=0.115.3" }, { name = "starlette" }, ] flask = [ { name = "flask", extras = ["async"] }, { name = "flask-sqlalchemy", specifier = ">=3.1.1" }, ] lint = [ { name = "asyncpg-stubs" }, { name = "mypy", specifier = ">=1.13.0" }, { name = "pre-commit", specifier = ">=3.5.0" }, { name = "pyright", specifier = ">=1.1.386" }, { name = "ruff", specifier = ">=0.7.1" }, { name = "slotscheck", specifier = ">=0.16.5" }, { name = "types-aiofiles" }, { name = "types-colorama" }, { name = "types-docutils" }, { name = "types-pillow" }, { name = "types-psycopg2" }, { name = "types-pygments" }, { name = "types-pymysql" }, { name = "types-python-dateutil" }, { name = "types-pytz" }, { name = "types-pyyaml" }, { name = "types-ujson" }, ] litestar = [{ name = "litestar", extras = ["cli"], specifier = ">=2.15.0" }] mssql = [ { name = "aioodbc", specifier = ">=0.5.0" }, { name = "pyodbc", specifier = ">=5.2.0" }, ] mysql = [{ name = "asyncmy", specifier = ">=0.2.9" }] oracle = [{ name = "oracledb", specifier = ">=2.4.1" }] postgres = [ { name = "asyncpg", specifier = ">=0.29.0" }, { name = "psycopg", extras = ["binary", "pool"], specifier = ">=3.2.3" }, { name = "psycopg2-binary", specifier = ">=2.9.10" }, ] sanic = [ { name = "sanic" }, { name = "sanic", extras = ["ext"], specifier = ">=24.6.0" }, { name = "sanic-testing", specifier = ">=24.6.0" }, ] spanner = [{ name = "sqlalchemy-spanner", specifier = ">=1.7.0" }] sqlite = [{ name = "aiosqlite", specifier = ">=0.20.0" }] test = [ { name = "asgi-lifespan" }, { name = "click" }, { name = "coverage", specifier = ">=7.6.1" }, { name = "pydantic-extra-types" }, { name = "pytest", specifier = ">=7.4.4" }, { name = "pytest-asyncio", specifier = ">=0.23.8" }, { name = "pytest-click" }, { name = "pytest-cov", specifier = ">=5.0.0" }, { name = "pytest-databases" }, { name = "pytest-lazy-fixtures", specifier = ">=1.1.1" }, { name = "pytest-mock", specifier = ">=3.14.0" }, { name = "pytest-rerunfailures" }, { name = "pytest-sugar", specifier = ">=1.0.0" }, { name = "pytest-xdist", specifier = ">=3.6.1" }, { name = "rich-click" }, { name = "time-machine", specifier = ">=2.15.0" }, ] [[package]] name = "aiofiles" version = "24.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 } wheels = [ { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 }, ] [[package]] name = "aioodbc" version = "0.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyodbc" }, ] sdist = { url = "https://files.pythonhosted.org/packages/45/87/3a7580938f217212a574ba0d1af78203fc278fc439815f3fc515a7fdc12b/aioodbc-0.5.0.tar.gz", hash = "sha256:cbccd89ce595c033a49c9e6b4b55bbace7613a104b8a46e3d4c58c4bc4f25075", size = 41298 } wheels = [ { url = "https://files.pythonhosted.org/packages/b0/80/4d1565bc16b53cd603c73dc4bc770e2e6418d957417e05031314760dc28c/aioodbc-0.5.0-py3-none-any.whl", hash = "sha256:bcaf16f007855fa4bf0ce6754b1f72c6c5a3d544188849577ddd55c5dc42985e", size = 19449 }, ] [[package]] name = "aiosqlite" version = "0.21.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454 } wheels = [ { url = "https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792 }, ] [[package]] name = "alabaster" version = "0.7.16" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10'", ] sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776 } wheels = [ { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511 }, ] [[package]] name = "alabaster" version = "1.0.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210 } wheels = [ { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929 }, ] [[package]] name = "alembic" version = "1.15.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "sqlalchemy" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4a/ed/901044acb892caa5604bf818d2da9ab0df94ef606c6059fdf367894ebf60/alembic-1.15.1.tar.gz", hash = "sha256:e1a1c738577bca1f27e68728c910cd389b9a92152ff91d902da649c192e30c49", size = 1924789 } wheels = [ { url = "https://files.pythonhosted.org/packages/99/f7/d398fae160568472ddce0b3fde9c4581afc593019a6adc91006a66406991/alembic-1.15.1-py3-none-any.whl", hash = "sha256:197de710da4b3e91cf66a826a5b31b5d59a127ab41bd0fc42863e2902ce2bbbe", size = 231753 }, ] [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, ] [[package]] name = "anyio" version = "4.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "idna" }, { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } wheels = [ { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, ] [[package]] name = "apeye" version = "1.4.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "apeye-core" }, { name = "domdf-python-tools" }, { name = "platformdirs" }, { name = "requests" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4f/6b/cc65e31843d7bfda8313a9dc0c77a21e8580b782adca53c7cb3e511fe023/apeye-1.4.1.tar.gz", hash = "sha256:14ea542fad689e3bfdbda2189a354a4908e90aee4bf84c15ab75d68453d76a36", size = 99219 } wheels = [ { url = "https://files.pythonhosted.org/packages/89/7b/2d63664777b3e831ac1b1d8df5bbf0b7c8bee48e57115896080890527b1b/apeye-1.4.1-py3-none-any.whl", hash = "sha256:44e58a9104ec189bf42e76b3a7fe91e2b2879d96d48e9a77e5e32ff699c9204e", size = 107989 }, ] [[package]] name = "apeye-core" version = "1.1.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "domdf-python-tools" }, { name = "idna" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e5/4c/4f108cfd06923bd897bf992a6ecb6fb122646ee7af94d7f9a64abd071d4c/apeye_core-1.1.5.tar.gz", hash = "sha256:5de72ed3d00cc9b20fea55e54b7ab8f5ef8500eb33a5368bc162a5585e238a55", size = 96511 } wheels = [ { url = "https://files.pythonhosted.org/packages/77/9f/fa9971d2a0c6fef64c87ba362a493a4f230eff4ea8dfb9f4c7cbdf71892e/apeye_core-1.1.5-py3-none-any.whl", hash = "sha256:dc27a93f8c9e246b3b238c5ea51edf6115ab2618ef029b9f2d9a190ec8228fbf", size = 99286 }, ] [[package]] name = "asgi-lifespan" version = "2.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sniffio" }, ] sdist = { url = "https://files.pythonhosted.org/packages/6a/da/e7908b54e0f8043725a990bf625f2041ecf6bfe8eb7b19407f1c00b630f7/asgi-lifespan-2.1.0.tar.gz", hash = "sha256:5e2effaf0bfe39829cf2d64e7ecc47c7d86d676a6599f7afba378c31f5e3a308", size = 15627 } wheels = [ { url = "https://files.pythonhosted.org/packages/2f/f5/c36551e93acba41a59939ae6a0fb77ddb3f2e8e8caa716410c65f7341f72/asgi_lifespan-2.1.0-py3-none-any.whl", hash = "sha256:ed840706680e28428c01e14afb3875d7d76d3206f3d5b2f2294e059b5c23804f", size = 10895 }, ] [[package]] name = "asgiref" version = "3.8.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/29/38/b3395cc9ad1b56d2ddac9970bc8f4141312dbaec28bc7c218b0dfafd0f42/asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590", size = 35186 } wheels = [ { url = "https://files.pythonhosted.org/packages/39/e3/893e8757be2612e6c266d9bb58ad2e3651524b5b40cf56761e985a28b13e/asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47", size = 23828 }, ] [[package]] name = "async-timeout" version = "5.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274 } wheels = [ { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233 }, ] [[package]] name = "asyncmy" version = "0.2.10" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b5/76/55cc0577f9e838c5a5213bf33159b9e484c9d9820a2bafd4d6bfa631bf86/asyncmy-0.2.10.tar.gz", hash = "sha256:f4b67edadf7caa56bdaf1c2e6cf451150c0a86f5353744deabe4426fe27aff4e", size = 63889 } wheels = [ { url = "https://files.pythonhosted.org/packages/78/c9/412b137c52f6c6437faba27412ccb32721571c42e59bc4f799796316866b/asyncmy-0.2.10-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:c2237c8756b8f374099bd320c53b16f7ec0cee8258f00d72eed5a2cd3d251066", size = 1803880 }, { url = "https://files.pythonhosted.org/packages/74/f3/c9520f489dc42a594c8ad3cbe2088ec511245a3c55c3333e6fa949838420/asyncmy-0.2.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:6e98d4fbf7ea0d99dfecb24968c9c350b019397ba1af9f181d51bb0f6f81919b", size = 1736363 }, { url = "https://files.pythonhosted.org/packages/52/9c/3c531a414290cbde9313cad54bb525caf6b1055ffa56bb271bf70512b533/asyncmy-0.2.10-cp310-cp310-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:b1b1ee03556c7eda6422afc3aca132982a84706f8abf30f880d642f50670c7ed", size = 4970043 }, { url = "https://files.pythonhosted.org/packages/03/64/176ed8a79d3a24b2e8ba7a11b429553f29fea20276537651526f3a87660b/asyncmy-0.2.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e2b97672ea3f0b335c0ffd3da1a5727b530f82f5032cd87e86c3aa3ac6df7f3", size = 5168645 }, { url = "https://files.pythonhosted.org/packages/81/3f/46f126663649784ab6586bc9b482bca432a35588714170621db8d33d76e4/asyncmy-0.2.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c6471ce1f9ae1e6f0d55adfb57c49d0bcf5753a253cccbd33799ddb402fe7da2", size = 4988493 }, { url = "https://files.pythonhosted.org/packages/5f/c6/acce7ea4b74e092582d65744418940b2b8c661102a22a638f58e7b651c6f/asyncmy-0.2.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10e2a10fe44a2b216a1ae58fbdafa3fed661a625ec3c030c560c26f6ab618522", size = 5158496 }, { url = "https://files.pythonhosted.org/packages/d5/01/d8fa0291083e9a0d899addda1f7608da37d28fff9bb4df1bd6f7f37354db/asyncmy-0.2.10-cp310-cp310-win32.whl", hash = "sha256:a791ab117787eb075bc37ed02caa7f3e30cca10f1b09ec7eeb51d733df1d49fc", size = 1624372 }, { url = "https://files.pythonhosted.org/packages/cf/a0/ad6669fd2870492749c189a72c881716a3727b7f0bc972fc8cea7a40879c/asyncmy-0.2.10-cp310-cp310-win_amd64.whl", hash = "sha256:bd16fdc0964a4a1a19aec9797ca631c3ff2530013fdcd27225fc2e48af592804", size = 1694174 }, { url = "https://files.pythonhosted.org/packages/72/1a/21b4af0d19862cc991f1095f006981a4f898599060dfa59f136e292b3e9a/asyncmy-0.2.10-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:7af0f1f31f800a8789620c195e92f36cce4def68ee70d625534544d43044ed2a", size = 1806974 }, { url = "https://files.pythonhosted.org/packages/1d/ce/3579a88123ead38e60e0b6e744224907e3d7a668518f9a46ed584df4f788/asyncmy-0.2.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:800116ab85dc53b24f484fb644fefffac56db7367a31e7d62f4097d495105a2c", size = 1738218 }, { url = "https://files.pythonhosted.org/packages/e2/39/10646bbafce22025be25aa709e83f0cdd3fb9089304cf9d3169a80540850/asyncmy-0.2.10-cp311-cp311-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:39525e9d7e557b83db268ed14b149a13530e0d09a536943dba561a8a1c94cc07", size = 5346417 }, { url = "https://files.pythonhosted.org/packages/8f/f8/3fb0d0481def3a0900778f7d04f50028a4a2d987087a2f1e718e6c236e01/asyncmy-0.2.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76e199d6b57918999efc702d2dbb182cb7ba8c604cdfc912517955219b16eaea", size = 5553197 }, { url = "https://files.pythonhosted.org/packages/82/a5/8281e8c0999fc6303b5b522ee82d1e338157a74f8bbbaa020e392b69156a/asyncmy-0.2.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ca8fdd7dbbf2d9b4c2d3a5fac42b058707d6a483b71fded29051b8ae198a250", size = 5337915 }, { url = "https://files.pythonhosted.org/packages/fe/f4/425108f5c6976ceb67b8f95bc73480fe777a95e7a89a29299664f5cb380f/asyncmy-0.2.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0df23db54e38602c803dacf1bbc1dcc4237a87223e659681f00d1a319a4f3826", size = 5524662 }, { url = "https://files.pythonhosted.org/packages/ff/32/17291b12dce380abbbec888ea9d4e863fd2116530bf2c87c1ab40b39f9d1/asyncmy-0.2.10-cp311-cp311-win32.whl", hash = "sha256:a16633032be020b931acfd7cd1862c7dad42a96ea0b9b28786f2ec48e0a86757", size = 1622375 }, { url = "https://files.pythonhosted.org/packages/e2/a3/76e65877de5e6fc853373908079adb711f80ed09aab4e152a533e0322375/asyncmy-0.2.10-cp311-cp311-win_amd64.whl", hash = "sha256:cca06212575922216b89218abd86a75f8f7375fc9c28159ea469f860785cdbc7", size = 1696693 }, { url = "https://files.pythonhosted.org/packages/b8/82/5a4b1aedae9b35f7885f10568437d80507d7a6704b51da2fc960a20c4948/asyncmy-0.2.10-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:42295530c5f36784031f7fa42235ef8dd93a75d9b66904de087e68ff704b4f03", size = 1783558 }, { url = "https://files.pythonhosted.org/packages/39/24/0fce480680531a29b51e1d2680a540c597e1a113aa1dc58cb7483c123a6b/asyncmy-0.2.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:641a853ffcec762905cbeceeb623839c9149b854d5c3716eb9a22c2b505802af", size = 1729268 }, { url = "https://files.pythonhosted.org/packages/c8/96/74dc1aaf1ab0bde88d3c6b3a70bd25f18796adb4e91b77ad580efe232df5/asyncmy-0.2.10-cp312-cp312-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:c554874223dd36b1cfc15e2cd0090792ea3832798e8fe9e9d167557e9cf31b4d", size = 5343513 }, { url = "https://files.pythonhosted.org/packages/9a/04/14662ff5b9cfab5cc11dcf91f2316e2f80d88fbd2156e458deef3e72512a/asyncmy-0.2.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd16e84391dde8edb40c57d7db634706cbbafb75e6a01dc8b68a63f8dd9e44ca", size = 5592344 }, { url = "https://files.pythonhosted.org/packages/7c/ac/3cf0abb3acd4f469bd012a1b4a01968bac07a142fca510da946b6ab1bf4f/asyncmy-0.2.10-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9f6b44c4bf4bb69a2a1d9d26dee302473099105ba95283b479458c448943ed3c", size = 5300819 }, { url = "https://files.pythonhosted.org/packages/5c/23/6d05254d1c89ad15e7f32eb3df277afc7bbb2220faa83a76bea0b7bc6407/asyncmy-0.2.10-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:16d398b1aad0550c6fe1655b6758455e3554125af8aaf1f5abdc1546078c7257", size = 5548799 }, { url = "https://files.pythonhosted.org/packages/fe/32/b7ce9782c741b6a821a0d11772f180f431a5c3ba6eaf2e6dfa1c3cbcf4df/asyncmy-0.2.10-cp312-cp312-win32.whl", hash = "sha256:59d2639dcc23939ae82b93b40a683c15a091460a3f77fa6aef1854c0a0af99cc", size = 1597544 }, { url = "https://files.pythonhosted.org/packages/94/08/7de4f4a17196c355e4706ceba0ab60627541c78011881a7c69f41c6414c5/asyncmy-0.2.10-cp312-cp312-win_amd64.whl", hash = "sha256:4c6674073be97ffb7ac7f909e803008b23e50281131fef4e30b7b2162141a574", size = 1679064 }, { url = "https://files.pythonhosted.org/packages/12/c8/eaa11a1716ce4505fa4d06d04abd8e1bda3aaa71c7d29209330dbd061b7a/asyncmy-0.2.10-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:244289bd1bea84384866bde50b09fe5b24856640e30a04073eacb71987b7b6ad", size = 1807310 }, { url = "https://files.pythonhosted.org/packages/3a/50/4137cb6f0e2e57bee6ff71c5cbabea66efb88b90abc9d409609368d8314a/asyncmy-0.2.10-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:6c9d024b160b9f869a21e62c4ef34a7b7a4b5a886ae03019d4182621ea804d2c", size = 1739290 }, { url = "https://files.pythonhosted.org/packages/cf/e9/46a7315d8a927ac012806c9502fd4d0b210554b415ef4a44319f961475b6/asyncmy-0.2.10-cp39-cp39-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:b57594eea942224626203503f24fa88a47eaab3f13c9f24435091ea910f4b966", size = 4967850 }, { url = "https://files.pythonhosted.org/packages/98/a2/fc991b329594bb372ddba296c89d7ace34271e35d92260cbea397abec40c/asyncmy-0.2.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:346192941470ac2d315f97afa14c0131ff846c911da14861baf8a1f8ed541664", size = 5169902 }, { url = "https://files.pythonhosted.org/packages/1c/c6/754aaf8d28ea76cf86cef6d07489f277221dbc8e1fd38490a037a3138e58/asyncmy-0.2.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:957c2b48c5228e5f91fdf389daf38261a7b8989ad0eb0d1ba4e5680ef2a4a078", size = 5012169 }, { url = "https://files.pythonhosted.org/packages/b2/6e/b66524785b89929da09adb5373ab360cc4ac2d97153dbd5b32e9904ac375/asyncmy-0.2.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:472989d7bfa405c108a7f3c408bbed52306504fb3aa28963d833cb7eeaafece0", size = 5186908 }, { url = "https://files.pythonhosted.org/packages/90/2f/36fbf0a7555507ce06bf5fa6f743d94f7bc38c1e6bfb5e9ba5dd51001b33/asyncmy-0.2.10-cp39-cp39-win32.whl", hash = "sha256:714b0fdadd72031e972de2bbbd14e35a19d5a7e001594f0c8a69f92f0d05acc9", size = 1626531 }, { url = "https://files.pythonhosted.org/packages/ff/99/cd737fbc8c1c14a0c39ca6d7e8f482c73a3990ecb150f2e7b2c5f2d665ab/asyncmy-0.2.10-cp39-cp39-win_amd64.whl", hash = "sha256:9fb58645d3da0b91db384f8519b16edc7dc421c966ada8647756318915d63696", size = 1696557 }, { url = "https://files.pythonhosted.org/packages/83/32/3317d5290737a3c4685343fe37e02567518357c46ed87c51f47139d31ded/asyncmy-0.2.10-pp310-pypy310_pp73-macosx_13_0_x86_64.whl", hash = "sha256:f10c977c60a95bd6ec6b8654e20c8f53bad566911562a7ad7117ca94618f05d3", size = 1627680 }, { url = "https://files.pythonhosted.org/packages/e9/e1/afeb50deb0554006c48b9f4f7b6b726e0aa42fa96d7cfbd3fdd0800765e2/asyncmy-0.2.10-pp310-pypy310_pp73-macosx_14_0_arm64.whl", hash = "sha256:aab07fbdb9466beaffef136ffabe388f0d295d8d2adb8f62c272f1d4076515b9", size = 1593957 }, { url = "https://files.pythonhosted.org/packages/be/c1/56d3721e2b2eab84320058c3458da168d143446031eca3799aed481c33d2/asyncmy-0.2.10-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:63144322ade68262201baae73ad0c8a06b98a3c6ae39d1f3f21c41cc5287066a", size = 1756531 }, { url = "https://files.pythonhosted.org/packages/ac/1a/295f06eb8e5926749265e08da9e2dc0dc14e0244bf36843997a1c8e18a50/asyncmy-0.2.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9659d95c6f2a611aec15bdd928950df937bf68bc4bbb68b809ee8924b6756067", size = 1752746 }, { url = "https://files.pythonhosted.org/packages/ab/09/3a5351acc6273c28333cad8193184de0070c617fd8385fd8ba23d789e08d/asyncmy-0.2.10-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8ced4bd938e95ede0fb9fa54755773df47bdb9f29f142512501e613dd95cf4a4", size = 1614903 }, { url = "https://files.pythonhosted.org/packages/a3/d1/28829c381e52166563706f2bc5e8043ab8599fc1d7e9c8ab26b21f2b33f4/asyncmy-0.2.10-pp39-pypy39_pp73-macosx_13_0_x86_64.whl", hash = "sha256:4651caaee6f4d7a8eb478a0dc460f8e91ab09a2d8d32444bc2b235544c791947", size = 1625889 }, { url = "https://files.pythonhosted.org/packages/00/7f/110e9ef7cb38ff599725bbed08b76f656b2eae7505971ebc2a78b20716b9/asyncmy-0.2.10-pp39-pypy39_pp73-macosx_14_0_arm64.whl", hash = "sha256:ac091b327f01c38d91c697c810ba49e5f836890d48f6879ba0738040bb244290", size = 1592247 }, { url = "https://files.pythonhosted.org/packages/1c/e6/036d5c23193f2c24b8dd4610eeae70380034d9ef37c29785c1624a19c92f/asyncmy-0.2.10-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux_2_5_i686.manylinux1_i686.manylinux2014_i686.whl", hash = "sha256:e1d2d9387cd3971297486c21098e035c620149c9033369491f58fe4fc08825b6", size = 1754251 }, { url = "https://files.pythonhosted.org/packages/94/59/f97378316a48168e380948c814b346038f0f72fd99c986c42cba493edc7e/asyncmy-0.2.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux_2_5_x86_64.manylinux1_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a760cb486ddb2c936711325236e6b9213564a9bb5deb2f6949dbd16c8e4d739e", size = 1751010 }, { url = "https://files.pythonhosted.org/packages/24/7b/3f90c33daab8409498a6e57760c6bd23ba3ecef3c684b59c9c6177030073/asyncmy-0.2.10-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1586f26633c05b16bcfc46d86e9875f4941280e12afa79a741cdf77ae4ccfb4d", size = 1613533 }, ] [[package]] name = "asyncpg" version = "0.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746 } wheels = [ { url = "https://files.pythonhosted.org/packages/bb/07/1650a8c30e3a5c625478fa8aafd89a8dd7d85999bf7169b16f54973ebf2c/asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e", size = 673143 }, { url = "https://files.pythonhosted.org/packages/a0/9a/568ff9b590d0954553c56806766914c149609b828c426c5118d4869111d3/asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0", size = 645035 }, { url = "https://files.pythonhosted.org/packages/de/11/6f2fa6c902f341ca10403743701ea952bca896fc5b07cc1f4705d2bb0593/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f", size = 2912384 }, { url = "https://files.pythonhosted.org/packages/83/83/44bd393919c504ffe4a82d0aed8ea0e55eb1571a1dea6a4922b723f0a03b/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af", size = 2947526 }, { url = "https://files.pythonhosted.org/packages/08/85/e23dd3a2b55536eb0ded80c457b0693352262dc70426ef4d4a6fc994fa51/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75", size = 2895390 }, { url = "https://files.pythonhosted.org/packages/9b/26/fa96c8f4877d47dc6c1864fef5500b446522365da3d3d0ee89a5cce71a3f/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f", size = 3015630 }, { url = "https://files.pythonhosted.org/packages/34/00/814514eb9287614188a5179a8b6e588a3611ca47d41937af0f3a844b1b4b/asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf", size = 568760 }, { url = "https://files.pythonhosted.org/packages/f0/28/869a7a279400f8b06dd237266fdd7220bc5f7c975348fea5d1e6909588e9/asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50", size = 625764 }, { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506 }, { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922 }, { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565 }, { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962 }, { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791 }, { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696 }, { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358 }, { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375 }, { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162 }, { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025 }, { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243 }, { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059 }, { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596 }, { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632 }, { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186 }, { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064 }, { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373 }, { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745 }, { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103 }, { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471 }, { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253 }, { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720 }, { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404 }, { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623 }, { url = "https://files.pythonhosted.org/packages/b4/82/d94f3ed6921136a0ef40a825740eda19437ccdad7d92d924302dca1d5c9e/asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad", size = 673026 }, { url = "https://files.pythonhosted.org/packages/4e/db/7db8b73c5d86ec9a21807f405e0698f8f637a8a3ca14b7b6fd4259b66bcf/asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff", size = 644732 }, { url = "https://files.pythonhosted.org/packages/eb/a0/1f1910659d08050cb3e8f7d82b32983974798d7fd4ddf7620b8e2023d4ac/asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708", size = 2911761 }, { url = "https://files.pythonhosted.org/packages/4d/53/5aa0d92488ded50bab2b6626430ed9743b0b7e2d864a2b435af1ccbf219a/asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144", size = 2946595 }, { url = "https://files.pythonhosted.org/packages/c5/cd/d6d548d8ee721f4e0f7fbbe509bbac140d556c2e45814d945540c96cf7d4/asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb", size = 2890135 }, { url = "https://files.pythonhosted.org/packages/46/f0/28df398b685dabee20235e24880e1f6486d84ae7e6b0d11bdebc17740e7a/asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547", size = 3011889 }, { url = "https://files.pythonhosted.org/packages/c8/07/8c7ffe6fe8bccff9b12fcb6410b1b2fa74b917fd8b837806a40217d5228b/asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a", size = 569406 }, { url = "https://files.pythonhosted.org/packages/05/51/f59e4df6d9b8937530d4b9fdee1598b93db40c631fe94ff3ce64207b7a95/asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773", size = 626581 }, ] [[package]] name = "asyncpg-stubs" version = "0.30.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asyncpg" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/8c/54/060d7ccafa322f15676daa5e32d34453e4a0ff24abb1f44182ffddc8a2d2/asyncpg_stubs-0.30.1.tar.gz", hash = "sha256:236b88fca49d0d181939ed35c21f79c9c404bc3cca51f8493f6d55210ecfcaf3", size = 20343 } wheels = [ { url = "https://files.pythonhosted.org/packages/5f/51/29715a2551471a9ff4e196f02955e915ccbf7477c90bb2d6e59737d94f1b/asyncpg_stubs-0.30.1-py3-none-any.whl", hash = "sha256:a9d2ed3e53964da6aa6057b46b767b335532b85fa2a0b0ed124922f06d844ae9", size = 26880 }, ] [[package]] name = "auto-pytabs" version = "0.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ruff" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f9/ff/f5752f43f659ee62dd563af5bb0fe0a63111c3ff4708e9596279385f52bb/auto_pytabs-0.5.0.tar.gz", hash = "sha256:30087831c8be5b2314e663efd06c96b84c096572a060a492540f586362cc4326", size = 15362 } wheels = [ { url = "https://files.pythonhosted.org/packages/6e/df/e76dc1261882283f7ae93ebbf75438e85d8bb713a51dbbd5d17fef29e607/auto_pytabs-0.5.0-py3-none-any.whl", hash = "sha256:e59fb6d2f8b41b05d0906a322dd4bb1a86749d429483ec10036587de3657dcc8", size = 13748 }, ] [package.optional-dependencies] sphinx = [ { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] [[package]] name = "autodocsumm" version = "0.2.14" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/03/96/92afe8a7912b327c01f0a8b6408c9556ee13b1aba5b98d587ac7327ff32d/autodocsumm-0.2.14.tar.gz", hash = "sha256:2839a9d4facc3c4eccd306c08695540911042b46eeafcdc3203e6d0bab40bc77", size = 46357 } wheels = [ { url = "https://files.pythonhosted.org/packages/87/bc/3f66af9beb683728e06ca08797e4e9d3e44f432f339718cae3ba856a9cad/autodocsumm-0.2.14-py3-none-any.whl", hash = "sha256:3bad8717fc5190802c60392a7ab04b9f3c97aa9efa8b3780b3d81d615bfe5dc0", size = 14640 }, ] [[package]] name = "babel" version = "2.17.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852 } wheels = [ { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537 }, ] [[package]] name = "beautifulsoup4" version = "4.13.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "soupsieve" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f0/3c/adaf39ce1fb4afdd21b611e3d530b183bb7759c9b673d60db0e347fd4439/beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b", size = 619516 } wheels = [ { url = "https://files.pythonhosted.org/packages/f9/49/6abb616eb3cbab6a7cca303dc02fdf3836de2e0b834bf966a7f5271a34d8/beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16", size = 186015 }, ] [[package]] name = "blinker" version = "1.9.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460 } wheels = [ { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458 }, ] [[package]] name = "bracex" version = "2.5.post1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d6/6c/57418c4404cd22fe6275b8301ca2b46a8cdaa8157938017a9ae0b3edf363/bracex-2.5.post1.tar.gz", hash = "sha256:12c50952415bfa773d2d9ccb8e79651b8cdb1f31a42f6091b804f6ba2b4a66b6", size = 26641 } wheels = [ { url = "https://files.pythonhosted.org/packages/4b/02/8db98cdc1a58e0abd6716d5e63244658e6e63513c65f469f34b6f1053fd0/bracex-2.5.post1-py3-none-any.whl", hash = "sha256:13e5732fec27828d6af308628285ad358047cec36801598368cb28bc631dbaf6", size = 11558 }, ] [[package]] name = "bump-my-version" version = "1.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "httpx" }, { name = "pydantic" }, { name = "pydantic-settings" }, { name = "questionary" }, { name = "rich" }, { name = "rich-click" }, { name = "tomlkit" }, { name = "wcmatch" }, ] sdist = { url = "https://files.pythonhosted.org/packages/61/c9/22f5e6de03ec21357fd37e61fad2970043c406a9af217a0bfc68747148d8/bump_my_version-1.0.2.tar.gz", hash = "sha256:2f156877d2cdcda69afcb257ae4564c26e70f2fd5e5b15f2c7f26ab9e91502da", size = 1102688 } wheels = [ { url = "https://files.pythonhosted.org/packages/bb/ce/dc13887c45dead36075a210487ff66304ef0dc3fbc997d2b12bcde2f0401/bump_my_version-1.0.2-py3-none-any.whl", hash = "sha256:61d350b8c71968dd4520fc6b9df8b982c7df254cd30858b8645eff0f4eaf380b", size = 58573 }, ] [[package]] name = "cachecontrol" version = "0.14.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "msgpack" }, { name = "requests" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b7/a4/3390ac4dfa1773f661c8780368018230e8207ec4fd3800d2c0c3adee4456/cachecontrol-0.14.2.tar.gz", hash = "sha256:7d47d19f866409b98ff6025b6a0fca8e4c791fb31abbd95f622093894ce903a2", size = 28832 } wheels = [ { url = "https://files.pythonhosted.org/packages/c8/63/baffb44ca6876e7b5fc8fe17b24a7c07bf479d604a592182db9af26ea366/cachecontrol-0.14.2-py3-none-any.whl", hash = "sha256:ebad2091bf12d0d200dfc2464330db638c5deb41d546f6d7aca079e87290f3b0", size = 21780 }, ] [package.optional-dependencies] filecache = [ { name = "filelock" }, ] [[package]] name = "cachetools" version = "5.5.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/6c/81/3747dad6b14fa2cf53fcf10548cf5aea6913e96fab41a3c198676f8948a5/cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", size = 28380 } wheels = [ { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080 }, ] [[package]] name = "certifi" version = "2025.1.31" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } wheels = [ { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, ] [[package]] name = "cffi" version = "1.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } wheels = [ { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220 }, { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605 }, { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910 }, { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200 }, { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565 }, { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635 }, { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218 }, { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486 }, { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911 }, { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632 }, { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820 }, { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290 }, ] [[package]] name = "cfgv" version = "3.4.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } wheels = [ { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, ] [[package]] name = "charset-normalizer" version = "3.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } wheels = [ { url = "https://files.pythonhosted.org/packages/0d/58/5580c1716040bc89206c77d8f74418caf82ce519aae06450393ca73475d1/charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", size = 198013 }, { url = "https://files.pythonhosted.org/packages/d0/11/00341177ae71c6f5159a08168bcb98c6e6d196d372c94511f9f6c9afe0c6/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", size = 141285 }, { url = "https://files.pythonhosted.org/packages/01/09/11d684ea5819e5a8f5100fb0b38cf8d02b514746607934134d31233e02c8/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", size = 151449 }, { url = "https://files.pythonhosted.org/packages/08/06/9f5a12939db324d905dc1f70591ae7d7898d030d7662f0d426e2286f68c9/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", size = 143892 }, { url = "https://files.pythonhosted.org/packages/93/62/5e89cdfe04584cb7f4d36003ffa2936681b03ecc0754f8e969c2becb7e24/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", size = 146123 }, { url = "https://files.pythonhosted.org/packages/a9/ac/ab729a15c516da2ab70a05f8722ecfccc3f04ed7a18e45c75bbbaa347d61/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", size = 147943 }, { url = "https://files.pythonhosted.org/packages/03/d2/3f392f23f042615689456e9a274640c1d2e5dd1d52de36ab8f7955f8f050/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", size = 142063 }, { url = "https://files.pythonhosted.org/packages/f2/e3/e20aae5e1039a2cd9b08d9205f52142329f887f8cf70da3650326670bddf/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", size = 150578 }, { url = "https://files.pythonhosted.org/packages/8d/af/779ad72a4da0aed925e1139d458adc486e61076d7ecdcc09e610ea8678db/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", size = 153629 }, { url = "https://files.pythonhosted.org/packages/c2/b6/7aa450b278e7aa92cf7732140bfd8be21f5f29d5bf334ae987c945276639/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", size = 150778 }, { url = "https://files.pythonhosted.org/packages/39/f4/d9f4f712d0951dcbfd42920d3db81b00dd23b6ab520419626f4023334056/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", size = 146453 }, { url = "https://files.pythonhosted.org/packages/49/2b/999d0314e4ee0cff3cb83e6bc9aeddd397eeed693edb4facb901eb8fbb69/charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", size = 95479 }, { url = "https://files.pythonhosted.org/packages/2d/ce/3cbed41cff67e455a386fb5e5dd8906cdda2ed92fbc6297921f2e4419309/charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", size = 102790 }, { url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 }, { url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 }, { url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 }, { url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 }, { url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 }, { url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 }, { url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 }, { url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 }, { url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 }, { url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 }, { url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 }, { url = "https://files.pythonhosted.org/packages/b5/b6/9674a4b7d4d99a0d2df9b215da766ee682718f88055751e1e5e753c82db0/charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", size = 95205 }, { url = "https://files.pythonhosted.org/packages/1e/ab/45b180e175de4402dcf7547e4fb617283bae54ce35c27930a6f35b6bef15/charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", size = 102441 }, { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, { url = "https://files.pythonhosted.org/packages/7f/c0/b913f8f02836ed9ab32ea643c6fe4d3325c3d8627cf6e78098671cafff86/charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41", size = 197867 }, { url = "https://files.pythonhosted.org/packages/0f/6c/2bee440303d705b6fb1e2ec789543edec83d32d258299b16eed28aad48e0/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f", size = 141385 }, { url = "https://files.pythonhosted.org/packages/3d/04/cb42585f07f6f9fd3219ffb6f37d5a39b4fd2db2355b23683060029c35f7/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2", size = 151367 }, { url = "https://files.pythonhosted.org/packages/54/54/2412a5b093acb17f0222de007cc129ec0e0df198b5ad2ce5699355269dfe/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770", size = 143928 }, { url = "https://files.pythonhosted.org/packages/5a/6d/e2773862b043dcf8a221342954f375392bb2ce6487bcd9f2c1b34e1d6781/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4", size = 146203 }, { url = "https://files.pythonhosted.org/packages/b9/f8/ca440ef60d8f8916022859885f231abb07ada3c347c03d63f283bec32ef5/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537", size = 148082 }, { url = "https://files.pythonhosted.org/packages/04/d2/42fd330901aaa4b805a1097856c2edf5095e260a597f65def493f4b8c833/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496", size = 142053 }, { url = "https://files.pythonhosted.org/packages/9e/af/3a97a4fa3c53586f1910dadfc916e9c4f35eeada36de4108f5096cb7215f/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78", size = 150625 }, { url = "https://files.pythonhosted.org/packages/26/ae/23d6041322a3556e4da139663d02fb1b3c59a23ab2e2b56432bd2ad63ded/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7", size = 153549 }, { url = "https://files.pythonhosted.org/packages/94/22/b8f2081c6a77cb20d97e57e0b385b481887aa08019d2459dc2858ed64871/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6", size = 150945 }, { url = "https://files.pythonhosted.org/packages/c7/0b/c5ec5092747f801b8b093cdf5610e732b809d6cb11f4c51e35fc28d1d389/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294", size = 146595 }, { url = "https://files.pythonhosted.org/packages/0c/5a/0b59704c38470df6768aa154cc87b1ac7c9bb687990a1559dc8765e8627e/charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5", size = 95453 }, { url = "https://files.pythonhosted.org/packages/85/2d/a9790237cb4d01a6d57afadc8573c8b73c609ade20b80f4cda30802009ee/charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765", size = 102811 }, { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, ] [[package]] name = "click" version = "8.1.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } wheels = [ { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, ] [[package]] name = "coverage" version = "7.7.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/02/36/465f5492443265e1278f9a82ffe6aeed3f1db779da0d6e7d4611a5cfb6af/coverage-7.7.0.tar.gz", hash = "sha256:cd879d4646055a573775a1cec863d00c9ff8c55860f8b17f6d8eee9140c06166", size = 809969 } wheels = [ { url = "https://files.pythonhosted.org/packages/10/f5/2b801fe88f199707cf9ec66dcee036e7073b5a208a4a161b64371b3f1e35/coverage-7.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a538a23119d1e2e2ce077e902d02ea3d8e0641786ef6e0faf11ce82324743944", size = 210608 }, { url = "https://files.pythonhosted.org/packages/07/44/bcc030cf977d1069a28742c0a67284c6e831ef172f914055b3d45da83f89/coverage-7.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1586ad158523f4133499a4f322b230e2cfef9cc724820dbd58595a5a236186f4", size = 211042 }, { url = "https://files.pythonhosted.org/packages/2c/3f/b427f17e1bcf3e1f5ac42fc0b6cb623616f2aedcfc7fde17a058afb62568/coverage-7.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b6c96d69928a3a6767fab8dc1ce8a02cf0156836ccb1e820c7f45a423570d98", size = 240168 }, { url = "https://files.pythonhosted.org/packages/58/92/6e8d71c5e651f152ffc518ec4cd7add87035533e88af29e233635c0f0dfb/coverage-7.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f18d47641282664276977c604b5a261e51fefc2980f5271d547d706b06a837f", size = 238079 }, { url = "https://files.pythonhosted.org/packages/40/33/1c25ae35c16972dc379c24cd7dde20359d076dee00687825c92a53e43b02/coverage-7.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a1e18a85bd066c7c556d85277a7adf4651f259b2579113844835ba1a74aafd", size = 239216 }, { url = "https://files.pythonhosted.org/packages/4d/3d/adf40bdd07a49e1880632c1bc6b31f42d32cf0bfe6b4d294a8f706d70078/coverage-7.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:70f0925c4e2bfc965369f417e7cc72538fd1ba91639cf1e4ef4b1a6b50439b3b", size = 239126 }, { url = "https://files.pythonhosted.org/packages/72/a5/51e39811cd0ec0569a25fe8e6bac0a00efa222a3e49d51d64f5ba0dce24a/coverage-7.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b0fac2088ec4aaeb5468b814bd3ff5e5978364bfbce5e567c44c9e2854469f6c", size = 237842 }, { url = "https://files.pythonhosted.org/packages/ab/b7/c5796906cd9eed6d258138f1fddc8d6af01b6d07b3c183bac4a9731ac383/coverage-7.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b3e212a894d8ae07fde2ca8b43d666a6d49bbbddb10da0f6a74ca7bd31f20054", size = 238136 }, { url = "https://files.pythonhosted.org/packages/d7/8a/bd34ea3c602b3ef323a001d375f9b1d663e901079bb26b5f9b8f96fae32b/coverage-7.7.0-cp310-cp310-win32.whl", hash = "sha256:f32b165bf6dfea0846a9c9c38b7e1d68f313956d60a15cde5d1709fddcaf3bee", size = 213320 }, { url = "https://files.pythonhosted.org/packages/94/60/6e7efe849e305a233623a80aaeba7ebb02809fa63ab8a1e49c4323b8083b/coverage-7.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:a2454b12a3f12cc4698f3508912e6225ec63682e2ca5a96f80a2b93cef9e63f3", size = 214219 }, { url = "https://files.pythonhosted.org/packages/e8/ec/9e0c9358a3bd56b1ddbf266b889ea9d51ee29e58fb72712d5600663fa806/coverage-7.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a0a207c87a9f743c8072d059b4711f8d13c456eb42dac778a7d2e5d4f3c253a7", size = 210722 }, { url = "https://files.pythonhosted.org/packages/be/bd/7b47a4302423a13960ee30682900d7ca20cee15c978b1d9ea9594d59d352/coverage-7.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d673e3add00048215c2cc507f1228a7523fd8bf34f279ac98334c9b07bd2656", size = 211154 }, { url = "https://files.pythonhosted.org/packages/c6/7c/ae54d9022440196bf9f3fad535388678a3db186980ff58a4956ddeb849a2/coverage-7.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f81fe93dc1b8e5673f33443c0786c14b77e36f1025973b85e07c70353e46882b", size = 243787 }, { url = "https://files.pythonhosted.org/packages/2d/21/913a2a2d89a2221f4410fbea4ff84e64ddf4367a4b9eb2c328bd01a1a401/coverage-7.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8c7524779003d59948c51b4fcbf1ca4e27c26a7d75984f63488f3625c328b9b", size = 241473 }, { url = "https://files.pythonhosted.org/packages/40/f1/5ae36fffd542fb86ab3b2d5e012af0840265f3dd001ad0ffabe9e4dbdcf6/coverage-7.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c124025430249118d018dcedc8b7426f39373527c845093132196f2a483b6dd", size = 243259 }, { url = "https://files.pythonhosted.org/packages/47/1b/abc87bad7f606a4df321bd8300413fe13700099a163e7d63453c7c70c1b2/coverage-7.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e7f559c36d5cdc448ee13e7e56ed7b6b5d44a40a511d584d388a0f5d940977ba", size = 242904 }, { url = "https://files.pythonhosted.org/packages/e0/b3/ff0cf15f5709996727dda2fa00af6f4da92ea3e16168400346f2f742341a/coverage-7.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:37cbc7b0d93dfd133e33c7ec01123fbb90401dce174c3b6661d8d36fb1e30608", size = 241079 }, { url = "https://files.pythonhosted.org/packages/05/c9/fcad82aad05b1eb8040e6c25ae7a1303716cc05718d4dd326e0fab31aa14/coverage-7.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7d2a65876274acf544703e943c010b60bd79404e3623a1e5d52b64a6e2728de5", size = 241617 }, { url = "https://files.pythonhosted.org/packages/59/9f/d1efe149afa5c3a459c08bf04f7e6917ef4ee8e3440df5c3e87d6b972870/coverage-7.7.0-cp311-cp311-win32.whl", hash = "sha256:f5a2f71d6a91238e7628f23538c26aa464d390cbdedf12ee2a7a0fb92a24482a", size = 213372 }, { url = "https://files.pythonhosted.org/packages/88/d2/4b58f03e399185b01fb3168d4b870882de9c7a10e273f99c8f25ec690302/coverage-7.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:ae8006772c6b0fa53c33747913473e064985dac4d65f77fd2fdc6474e7cd54e4", size = 214285 }, { url = "https://files.pythonhosted.org/packages/b7/47/f7b870caa26082ff8033be074ac61dc175a6b0c965adf7b910f92a6d7cfe/coverage-7.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:056d3017ed67e7ddf266e6f57378ece543755a4c9231e997789ab3bd11392c94", size = 210907 }, { url = "https://files.pythonhosted.org/packages/ea/eb/40b39bdc6c1da403257f0fcb2c1b2fd81ff9f66c13abbe3862f42780e1c1/coverage-7.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33c1394d8407e2771547583b66a85d07ed441ff8fae5a4adb4237ad39ece60db", size = 211162 }, { url = "https://files.pythonhosted.org/packages/53/08/42a2db41b4646d6261122773e222dd7105e2306526f2d7846de6fee808ec/coverage-7.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fbb7a0c3c21908520149d7751cf5b74eb9b38b54d62997b1e9b3ac19a8ee2fe", size = 245223 }, { url = "https://files.pythonhosted.org/packages/78/2a/0ceb328a7e67e8639d5c7800b8161d4b5f489073ac8d5ac33b11eadee218/coverage-7.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb356e7ae7c2da13f404bf8f75be90f743c6df8d4607022e759f5d7d89fe83f8", size = 242114 }, { url = "https://files.pythonhosted.org/packages/ba/68/42b13b849d40af1581830ff06c60f4ec84649764f4a58d5c6e20ae11cbd4/coverage-7.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce730d484038e97f27ea2dbe5d392ec5c2261f28c319a3bb266f6b213650135", size = 244371 }, { url = "https://files.pythonhosted.org/packages/68/66/ab7c3b9fdbeb8bdd322f5b67b1886463834dba2014a534caba60fb0075ea/coverage-7.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa4dff57fc21a575672176d5ab0ef15a927199e775c5e8a3d75162ab2b0c7705", size = 244134 }, { url = "https://files.pythonhosted.org/packages/01/74/b833d299a479681957d6b238e16a0725586e1d56ec1e43658f3184550bb0/coverage-7.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b667b91f4f714b17af2a18e220015c941d1cf8b07c17f2160033dbe1e64149f0", size = 242353 }, { url = "https://files.pythonhosted.org/packages/f9/c5/0ed656d65da39bbab8e8fc367dc3d465a7501fea0f2b1caccfb4f6361c9f/coverage-7.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:693d921621a0c8043bfdc61f7d4df5ea6d22165fe8b807cac21eb80dd94e4bbd", size = 243543 }, { url = "https://files.pythonhosted.org/packages/87/b5/142bcff3828e4cce5d4c9ddc9222de1664464263acca09638e4eb0dbda7c/coverage-7.7.0-cp312-cp312-win32.whl", hash = "sha256:52fc89602cde411a4196c8c6894afb384f2125f34c031774f82a4f2608c59d7d", size = 213543 }, { url = "https://files.pythonhosted.org/packages/29/74/99d226985def03284bad6a9aff27a1079a8881ec7523b5980b00a5260527/coverage-7.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ce8cf59e09d31a4915ff4c3b94c6514af4c84b22c4cc8ad7c3c546a86150a92", size = 214344 }, { url = "https://files.pythonhosted.org/packages/45/2f/df6235ec963b9eb6b6b2f3c24f70448f1ffa13b9a481c155a6caff176395/coverage-7.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4545485fef7a8a2d8f30e6f79ce719eb154aab7e44217eb444c1d38239af2072", size = 210934 }, { url = "https://files.pythonhosted.org/packages/f3/85/ff19510bf642e334845318ddb73a550d2b17082831fa9ae053ce72288be7/coverage-7.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1393e5aa9441dafb0162c36c8506c648b89aea9565b31f6bfa351e66c11bcd82", size = 211212 }, { url = "https://files.pythonhosted.org/packages/2d/6a/af6582a419550d35eacc3e1bf9f4a936dda0ae559632a0bc4e3aef694ac8/coverage-7.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:316f29cc3392fa3912493ee4c83afa4a0e2db04ff69600711f8c03997c39baaa", size = 244727 }, { url = "https://files.pythonhosted.org/packages/55/62/7c49526111c91f3d7d27e111c22c8d08722f5b661c3f031b625b4d7bc4d9/coverage-7.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1ffde1d6bc2a92f9c9207d1ad808550873748ac2d4d923c815b866baa343b3f", size = 241768 }, { url = "https://files.pythonhosted.org/packages/62/4b/2dc27700782be9795cbbbe98394dd19ef74815d78d5027ed894972cd1b4a/coverage-7.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:416e2a8845eaff288f97eaf76ab40367deafb9073ffc47bf2a583f26b05e5265", size = 243790 }, { url = "https://files.pythonhosted.org/packages/d3/11/9cc1ae56d3015edca69437f3121c2b44de309f6828980b29e4cc9b13246d/coverage-7.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5efdeff5f353ed3352c04e6b318ab05c6ce9249c25ed3c2090c6e9cadda1e3b2", size = 243861 }, { url = "https://files.pythonhosted.org/packages/db/e4/2398ed93edcf42ff43002d91c37be11514d825cec382606654fd44f4b8fa/coverage-7.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:57f3bd0d29bf2bd9325c0ff9cc532a175110c4bf8f412c05b2405fd35745266d", size = 241942 }, { url = "https://files.pythonhosted.org/packages/ec/fe/b6bd35b17a2b8d26bdb21d5ea4351a837ec01edf552655e833629af05b90/coverage-7.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ab7090f04b12dc6469882ce81244572779d3a4b67eea1c96fb9ecc8c607ef39", size = 243228 }, { url = "https://files.pythonhosted.org/packages/6d/06/d8701bae1e5d865edeb00a6c2a71bd7659ca6af349789271c6fd16a57909/coverage-7.7.0-cp313-cp313-win32.whl", hash = "sha256:180e3fc68ee4dc5af8b33b6ca4e3bb8aa1abe25eedcb958ba5cff7123071af68", size = 213572 }, { url = "https://files.pythonhosted.org/packages/d7/c1/7e67780bfcaed6bed20100c9e1b2645e3414577b4bdad169578325249045/coverage-7.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:55143aa13c49491f5606f05b49ed88663446dce3a4d3c5d77baa4e36a16d3573", size = 214372 }, { url = "https://files.pythonhosted.org/packages/ed/25/50b0447442a415ad3da33093c589d9ef945dd6933225f1ce0ac97476397e/coverage-7.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:cc41374d2f27d81d6558f8a24e5c114580ffefc197fd43eabd7058182f743322", size = 211774 }, { url = "https://files.pythonhosted.org/packages/13/cc/3daddc707e934d3c0aafaa4a9b217f53fcf4133d4e40cc6ae63aa51243b8/coverage-7.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:89078312f06237417adda7c021c33f80f7a6d2db8572a5f6c330d89b080061ce", size = 211995 }, { url = "https://files.pythonhosted.org/packages/98/99/c92f43355d3d67f6bf8c946a350f2174e18f9ea7c8a1e36c9eb84ab7d20b/coverage-7.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b2f144444879363ea8834cd7b6869d79ac796cb8f864b0cfdde50296cd95816", size = 256226 }, { url = "https://files.pythonhosted.org/packages/25/62/65f0f33c08e0a1632f1e487b9c2d252e8bad6a77a942836043972b0ba6d2/coverage-7.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:60e6347d1ed882b1159ffea172cb8466ee46c665af4ca397edbf10ff53e9ffaf", size = 251937 }, { url = "https://files.pythonhosted.org/packages/b2/10/99a9565aaeb159aade178c6509c8324a9c9e825b01f02242a37c2a8869f8/coverage-7.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb203c0afffaf1a8f5b9659a013f8f16a1b2cad3a80a8733ceedc968c0cf4c57", size = 254276 }, { url = "https://files.pythonhosted.org/packages/a7/12/206196edbf0b82250b11bf5c252fe25ebaa2b7c8d66edb0c194e7b3403fe/coverage-7.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ad0edaa97cb983d9f2ff48cadddc3e1fb09f24aa558abeb4dc9a0dbacd12cbb4", size = 255366 }, { url = "https://files.pythonhosted.org/packages/a5/82/a2abb8d4cdd99c6a443ab6682c0eee5797490a2113a45ffaa8b6b31c5dcc/coverage-7.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c5f8a5364fc37b2f172c26a038bc7ec4885f429de4a05fc10fdcb53fb5834c5c", size = 253536 }, { url = "https://files.pythonhosted.org/packages/4d/7d/3747e000e60ad5dd8157bd978f99979967d56cb35c55235980c85305db86/coverage-7.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4e09534037933bf6eb31d804e72c52ec23219b32c1730f9152feabbd7499463", size = 254344 }, { url = "https://files.pythonhosted.org/packages/45/56/7c33f8a6de1b3b079374d2ae490ccf76fb7c094a23f72d10f071989fc3ef/coverage-7.7.0-cp313-cp313t-win32.whl", hash = "sha256:1b336d06af14f8da5b1f391e8dec03634daf54dfcb4d1c4fb6d04c09d83cef90", size = 214284 }, { url = "https://files.pythonhosted.org/packages/95/ab/657bfa6171800a67bd1c005402f06d6b78610820ef1364ea4f85b04bbb5b/coverage-7.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b54a1ee4c6f1905a436cbaa04b26626d27925a41cbc3a337e2d3ff7038187f07", size = 215445 }, { url = "https://files.pythonhosted.org/packages/d1/42/0e77be6f2fafe7f3de88ddf9f8d9a0d8e9a75f9517081d261d31439908c7/coverage-7.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c8fbce80b2b8bf135d105aa8f5b36eae0c57d702a1cc3ebdea2a6f03f6cdde5", size = 210604 }, { url = "https://files.pythonhosted.org/packages/0e/62/a82adc7818545fca3987367c6b20f239645678438f7da5827a4960bcbe7f/coverage-7.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9710521f07f526de30ccdead67e6b236fe996d214e1a7fba8b36e2ba2cd8261", size = 211031 }, { url = "https://files.pythonhosted.org/packages/a6/50/a98b418fcaf531b2829b2a06f47f8c5cbc0dcce4a9aa63c5f30bf47d1a92/coverage-7.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7789e700f33f2b133adae582c9f437523cd5db8de845774988a58c360fc88253", size = 239791 }, { url = "https://files.pythonhosted.org/packages/58/f7/0a8f891fce6f389b1062a520aff130fa6974433efeb549dd19cbdccc76b3/coverage-7.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c36093aca722db73633cf2359026ed7782a239eb1c6db2abcff876012dc4cf", size = 237718 }, { url = "https://files.pythonhosted.org/packages/a9/8f/362c91661e6c43ff86b65b15bbb60ad1ad4924e9d1e35a0d5f08eb3337c4/coverage-7.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c075d167a6ec99b798c1fdf6e391a1d5a2d054caffe9593ba0f97e3df2c04f0e", size = 238820 }, { url = "https://files.pythonhosted.org/packages/dd/4b/56520dba6f38ad59e96cdeb8c7eafa47781576d2baabdfa10f8c1813b37b/coverage-7.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d013c07061751ae81861cae6ec3a4fe04e84781b11fd4b6b4201590234b25c7b", size = 238595 }, { url = "https://files.pythonhosted.org/packages/4d/e6/acfae468bd1f9b691b29d42f93bfd7080c05021103f03580934c066a3844/coverage-7.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:104bf640f408f4e115b85110047c7f27377e1a8b7ba86f7db4fa47aa49dc9a8e", size = 236820 }, { url = "https://files.pythonhosted.org/packages/22/4f/9b65332326b0c5b7de197a52e766e2bd547beec6948e1d5c4063289e3281/coverage-7.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:39abcacd1ed54e2c33c54bdc488b310e8ef6705833f7148b6eb9a547199d375d", size = 237800 }, { url = "https://files.pythonhosted.org/packages/bb/99/1c2214678731517d91774b75ed5c0f72feefee3270c232c286b314518d7d/coverage-7.7.0-cp39-cp39-win32.whl", hash = "sha256:8e336b56301774ace6be0017ff85c3566c556d938359b61b840796a0202f805c", size = 213341 }, { url = "https://files.pythonhosted.org/packages/21/30/4d9ae5544f839da30e42e03850d1dfe4ab184d6307ed971e70178760a68d/coverage-7.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:8c938c6ae59be67ac19a7204e079efc94b38222cd7d0269f96e45e18cddeaa59", size = 214227 }, { url = "https://files.pythonhosted.org/packages/cb/69/6a5eac32d2e8721274ef75df1b9fd6a8f7e8231e41ff7bc5501f19835f25/coverage-7.7.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:3b0e6e54591ae0d7427def8a4d40fca99df6b899d10354bab73cd5609807261c", size = 202813 }, { url = "https://files.pythonhosted.org/packages/2a/ac/60f409a448e5b0e9b8539716f683568aa5848c1be903cdbbc805a552cdf8/coverage-7.7.0-py3-none-any.whl", hash = "sha256:708f0a1105ef2b11c79ed54ed31f17e6325ac936501fc373f24be3e6a578146a", size = 202803 }, ] [package.optional-dependencies] toml = [ { name = "tomli", marker = "python_full_version <= '3.11'" }, ] [[package]] name = "cryptography" version = "44.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/cd/25/4ce80c78963834b8a9fd1cc1266be5ed8d1840785c0f2e1b73b8d128d505/cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0", size = 710807 } wheels = [ { url = "https://files.pythonhosted.org/packages/92/ef/83e632cfa801b221570c5f58c0369db6fa6cef7d9ff859feab1aae1a8a0f/cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7", size = 6676361 }, { url = "https://files.pythonhosted.org/packages/30/ec/7ea7c1e4c8fc8329506b46c6c4a52e2f20318425d48e0fe597977c71dbce/cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1", size = 3952350 }, { url = "https://files.pythonhosted.org/packages/27/61/72e3afdb3c5ac510330feba4fc1faa0fe62e070592d6ad00c40bb69165e5/cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb", size = 4166572 }, { url = "https://files.pythonhosted.org/packages/26/e4/ba680f0b35ed4a07d87f9e98f3ebccb05091f3bf6b5a478b943253b3bbd5/cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843", size = 3958124 }, { url = "https://files.pythonhosted.org/packages/9c/e8/44ae3e68c8b6d1cbc59040288056df2ad7f7f03bbcaca6b503c737ab8e73/cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5", size = 3678122 }, { url = "https://files.pythonhosted.org/packages/27/7b/664ea5e0d1eab511a10e480baf1c5d3e681c7d91718f60e149cec09edf01/cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c", size = 4191831 }, { url = "https://files.pythonhosted.org/packages/2a/07/79554a9c40eb11345e1861f46f845fa71c9e25bf66d132e123d9feb8e7f9/cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a", size = 3960583 }, { url = "https://files.pythonhosted.org/packages/bb/6d/858e356a49a4f0b591bd6789d821427de18432212e137290b6d8a817e9bf/cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308", size = 4191753 }, { url = "https://files.pythonhosted.org/packages/b2/80/62df41ba4916067fa6b125aa8c14d7e9181773f0d5d0bd4dcef580d8b7c6/cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688", size = 4079550 }, { url = "https://files.pythonhosted.org/packages/f3/cd/2558cc08f7b1bb40683f99ff4327f8dcfc7de3affc669e9065e14824511b/cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7", size = 4298367 }, { url = "https://files.pythonhosted.org/packages/71/59/94ccc74788945bc3bd4cf355d19867e8057ff5fdbcac781b1ff95b700fb1/cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79", size = 2772843 }, { url = "https://files.pythonhosted.org/packages/ca/2c/0d0bbaf61ba05acb32f0841853cfa33ebb7a9ab3d9ed8bb004bd39f2da6a/cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa", size = 3209057 }, { url = "https://files.pythonhosted.org/packages/9e/be/7a26142e6d0f7683d8a382dd963745e65db895a79a280a30525ec92be890/cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3", size = 6677789 }, { url = "https://files.pythonhosted.org/packages/06/88/638865be7198a84a7713950b1db7343391c6066a20e614f8fa286eb178ed/cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639", size = 3951919 }, { url = "https://files.pythonhosted.org/packages/d7/fc/99fe639bcdf58561dfad1faa8a7369d1dc13f20acd78371bb97a01613585/cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd", size = 4167812 }, { url = "https://files.pythonhosted.org/packages/53/7b/aafe60210ec93d5d7f552592a28192e51d3c6b6be449e7fd0a91399b5d07/cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181", size = 3958571 }, { url = "https://files.pythonhosted.org/packages/16/32/051f7ce79ad5a6ef5e26a92b37f172ee2d6e1cce09931646eef8de1e9827/cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea", size = 3679832 }, { url = "https://files.pythonhosted.org/packages/78/2b/999b2a1e1ba2206f2d3bca267d68f350beb2b048a41ea827e08ce7260098/cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699", size = 4193719 }, { url = "https://files.pythonhosted.org/packages/72/97/430e56e39a1356e8e8f10f723211a0e256e11895ef1a135f30d7d40f2540/cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9", size = 3960852 }, { url = "https://files.pythonhosted.org/packages/89/33/c1cf182c152e1d262cac56850939530c05ca6c8d149aa0dcee490b417e99/cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23", size = 4193906 }, { url = "https://files.pythonhosted.org/packages/e1/99/87cf26d4f125380dc674233971069bc28d19b07f7755b29861570e513650/cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922", size = 4081572 }, { url = "https://files.pythonhosted.org/packages/b3/9f/6a3e0391957cc0c5f84aef9fbdd763035f2b52e998a53f99345e3ac69312/cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4", size = 4298631 }, { url = "https://files.pythonhosted.org/packages/e2/a5/5bc097adb4b6d22a24dea53c51f37e480aaec3465285c253098642696423/cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5", size = 2773792 }, { url = "https://files.pythonhosted.org/packages/33/cf/1f7649b8b9a3543e042d3f348e398a061923ac05b507f3f4d95f11938aa9/cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6", size = 3210957 }, { url = "https://files.pythonhosted.org/packages/99/10/173be140714d2ebaea8b641ff801cbcb3ef23101a2981cbf08057876f89e/cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb", size = 3396886 }, { url = "https://files.pythonhosted.org/packages/2f/b4/424ea2d0fce08c24ede307cead3409ecbfc2f566725d4701b9754c0a1174/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41", size = 3892387 }, { url = "https://files.pythonhosted.org/packages/28/20/8eaa1a4f7c68a1cb15019dbaad59c812d4df4fac6fd5f7b0b9c5177f1edd/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562", size = 4109922 }, { url = "https://files.pythonhosted.org/packages/11/25/5ed9a17d532c32b3bc81cc294d21a36c772d053981c22bd678396bc4ae30/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5", size = 3895715 }, { url = "https://files.pythonhosted.org/packages/63/31/2aac03b19c6329b62c45ba4e091f9de0b8f687e1b0cd84f101401bece343/cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa", size = 4109876 }, { url = "https://files.pythonhosted.org/packages/99/ec/6e560908349843718db1a782673f36852952d52a55ab14e46c42c8a7690a/cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d", size = 3131719 }, { url = "https://files.pythonhosted.org/packages/d6/d7/f30e75a6aa7d0f65031886fa4a1485c2fbfe25a1896953920f6a9cfe2d3b/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d", size = 3887513 }, { url = "https://files.pythonhosted.org/packages/9c/b4/7a494ce1032323ca9db9a3661894c66e0d7142ad2079a4249303402d8c71/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471", size = 4107432 }, { url = "https://files.pythonhosted.org/packages/45/f8/6b3ec0bc56123b344a8d2b3264a325646d2dcdbdd9848b5e6f3d37db90b3/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615", size = 3891421 }, { url = "https://files.pythonhosted.org/packages/57/ff/f3b4b2d007c2a646b0f69440ab06224f9cf37a977a72cdb7b50632174e8a/cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390", size = 4107081 }, ] [[package]] name = "cssutils" version = "2.11.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "more-itertools" }, ] sdist = { url = "https://files.pythonhosted.org/packages/33/9f/329d26121fe165be44b1dfff21aa0dc348f04633931f1d20ed6cf448a236/cssutils-2.11.1.tar.gz", hash = "sha256:0563a76513b6af6eebbe788c3bf3d01c920e46b3f90c8416738c5cfc773ff8e2", size = 711657 } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/ec/bb273b7208c606890dc36540fe667d06ce840a6f62f9fae7e658fcdc90fb/cssutils-2.11.1-py3-none-any.whl", hash = "sha256:a67bfdfdff4f3867fab43698ec4897c1a828eca5973f4073321b3bccaf1199b1", size = 385747 }, ] [[package]] name = "dict2css" version = "0.3.0.post1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cssutils" }, { name = "domdf-python-tools" }, ] sdist = { url = "https://files.pythonhosted.org/packages/24/eb/776eef1f1aa0188c0fc165c3a60b71027539f71f2eedc43ad21b060e9c39/dict2css-0.3.0.post1.tar.gz", hash = "sha256:89c544c21c4ca7472c3fffb9d37d3d926f606329afdb751dc1de67a411b70719", size = 7845 } wheels = [ { url = "https://files.pythonhosted.org/packages/fe/47/290daabcf91628f4fc0e17c75a1690b354ba067066cd14407712600e609f/dict2css-0.3.0.post1-py3-none-any.whl", hash = "sha256:f006a6b774c3e31869015122ae82c491fd25e7de4a75607a62aa3e798f837e0d", size = 25647 }, ] [[package]] name = "distlib" version = "0.3.9" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } wheels = [ { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, ] [[package]] name = "dnspython" version = "2.7.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 } wheels = [ { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, ] [[package]] name = "docker" version = "7.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "requests" }, { name = "urllib3" }, ] sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } wheels = [ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, ] [[package]] name = "docutils" version = "0.21.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444 } wheels = [ { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408 }, ] [[package]] name = "domdf-python-tools" version = "3.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "natsort" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/36/8b/ab2d8a292bba8fe3135cacc8bfd3576710a14b8f2d0a8cde19130d5c9d21/domdf_python_tools-3.10.0.tar.gz", hash = "sha256:2ae308d2f4f1e9145f5f4ba57f840fbfd1c2983ee26e4824347789649d3ae298", size = 100458 } wheels = [ { url = "https://files.pythonhosted.org/packages/5b/11/208f72084084d3f6a2ed5ebfdfc846692c3f7ad6dce65e400194924f7eed/domdf_python_tools-3.10.0-py3-none-any.whl", hash = "sha256:5e71c1be71bbcc1f881d690c8984b60e64298ec256903b3147f068bc33090c36", size = 126860 }, ] [[package]] name = "duckdb" version = "1.2.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/41/b4/34b98425d643e412f52703829b5ed2da7d7cb6dd40c80a3aa210002cafa8/duckdb-1.2.1.tar.gz", hash = "sha256:15d49030d04572540cc1c8ad8a491ce018a590ec995d5d38c8f5f75b6422413e", size = 11591514 } wheels = [ { url = "https://files.pythonhosted.org/packages/88/38/3b4fc59d585d6f0dfd86ebd7eaabecddf237717dfd2bc45e0b8d29d97a4b/duckdb-1.2.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b1b26271c22d1265379949b71b1d13a413f8048ea49ed04b3a33f257c384fa7c", size = 15250747 }, { url = "https://files.pythonhosted.org/packages/2a/48/00712205ab64a5c0af120fe0481822b89c99ad29559e46993339de3a20aa/duckdb-1.2.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:47946714d3aa423782678d37bfface082a9c43d232c44c4b79d70a1137e4c356", size = 31914009 }, { url = "https://files.pythonhosted.org/packages/83/62/5b03ed3ad42b05eb47657e59b7d3c9b8912bd621c06f5303e2e98f1323d5/duckdb-1.2.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:2c3d3f069a114cfb4ebf5e35798953c93491cfb5866cfc57a4921f8b5d38cc05", size = 16771835 }, { url = "https://files.pythonhosted.org/packages/02/08/99e91459e1007e140a27a0d7cd09806db99b4a2cc59b8ab1f8ee8560a10d/duckdb-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:433406949970f4a8ab5416f62af224d418d3bbafe81585ede77057752c04017e", size = 18724706 }, { url = "https://files.pythonhosted.org/packages/6b/95/73681dfa03f05ed49ce0476e4b826ce079ea72d0779ebd51d79d51a0d86e/duckdb-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42d156dacb1fd39b7293ee200d16af2cc9d08e57f7f7b5e800aa35bd265fc41f", size = 20191133 }, { url = "https://files.pythonhosted.org/packages/1e/a3/efa40117d0261c8c8d431c06016c80e8cb735d198d94e5a8c0ae4f9e95bd/duckdb-1.2.1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e11ccbfd088dbac68dc35f4119fb385a878ca1cce720111c394f513d89a8b5f", size = 18733708 }, { url = "https://files.pythonhosted.org/packages/79/53/e3bbf938c5b99a8c95bf66505457bf3d6947951b3f98ebffa5bf5f1ba02a/duckdb-1.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:66322686a31a566b4c98f079513b1eba21a7de1d716b5b7d3a55aef8f97ee369", size = 22248683 }, { url = "https://files.pythonhosted.org/packages/63/79/ecd3cd85ed0859fc965bc0a2e3574627a8834c654db7f7155287de7f8f1d/duckdb-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1cbb84c65f8ef2fe32f4cbc8c7ed339c3ae6cf3e5814a314fa4b79a8ce9686a", size = 11362762 }, { url = "https://files.pythonhosted.org/packages/58/82/b119808dde71e42cc1fc77ac4a912e38c84eb47fa6ca4bc90652f99b7252/duckdb-1.2.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:99c47ea82df549c284e4e4d8c89a940af4f19c03427f6f42cafeb3c152536bc5", size = 15252717 }, { url = "https://files.pythonhosted.org/packages/8a/ff/015fd0cdec48791c36d6251916b456e96ed9fb71a791a7385b26cec14810/duckdb-1.2.1-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:203ebdf401d049135492cc3d49146cfd704d866ee9cc52b18e80a586aceabb69", size = 31915709 }, { url = "https://files.pythonhosted.org/packages/d7/d2/72ef2cf81562fdb6068b1e2cd19a878943067ce812060a4bc91e61d0e92d/duckdb-1.2.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ac5f7c15176b6fb90f1f3bed08a99b9d32f55b58cd3d9d2ed6a1037a8fda2024", size = 16772294 }, { url = "https://files.pythonhosted.org/packages/b5/06/b454b94ceec3a813c5122a99b0259ced53874b15fb2dfdb669164dbcb153/duckdb-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97b2c13f4f9290db60c783b93b79ce521a3890ff8d817a6670afb760e030043b", size = 18728528 }, { url = "https://files.pythonhosted.org/packages/50/52/6e6f5b5b07841cec334ca6b98f2e02b7bb54ab3b99c49aa3a161cc0b4b37/duckdb-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d493e051f594175a2a5bdcae5c008d3cc424805e3282292c1204f597880de8ea", size = 20197440 }, { url = "https://files.pythonhosted.org/packages/f5/dc/01c3f5a47d7433d1e261042f61e6b3d77634f28706975b3027697fa19de8/duckdb-1.2.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c252be2ed07817916342823b271253459932c60d7f7ee4e28f33650552cda24", size = 18736032 }, { url = "https://files.pythonhosted.org/packages/1e/e4/7ef6b8e08c410fc13ba9f62ecf2802e8e2adcae38a5ea7a4f6829b99f32d/duckdb-1.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:832627f11b370d708543a86d18d5eda4eacb7ca51fdc83c74629adfff2ec1bf2", size = 22251245 }, { url = "https://files.pythonhosted.org/packages/a5/b7/e3f5d60117fe31623122a44b6d3e8f1cee9d87a23810c9c35bb1d743d4d2/duckdb-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:d05e5914857b4d93b136de385d81a65165a6c24a6ecf6eee3dcd0017233bff6c", size = 11363523 }, { url = "https://files.pythonhosted.org/packages/5d/70/2c1240415afc176ac7019f0fd5add3310ba93c80885a55d7fecc194108e6/duckdb-1.2.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:7e587410e05343ffaf9a21bacb6811aad253bd443ab4ff869fdaa645908f47a4", size = 15263653 }, { url = "https://files.pythonhosted.org/packages/2c/6e/83caef4d3b6e68da768ec564d5c9b982a84d9167ead0ad674b69810d7bb8/duckdb-1.2.1-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:8cb84295cafbf2510326f4ae18d401fc2d45b6d4811c43f1b7451a69a0a74f5f", size = 31955476 }, { url = "https://files.pythonhosted.org/packages/35/fb/ee33f3417d4778ab183d47fe8569dc7906a1b95f69cfb10f15d5f88e8dcf/duckdb-1.2.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:1b6dfefadc455347a2c649d41ebd561b32574b4191508043c9ee81fa0da95485", size = 16798219 }, { url = "https://files.pythonhosted.org/packages/21/11/9cf670a88f39dd18854883c38b9374c745e47d69896bb8dbc9cc239a43d6/duckdb-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d75d9fdf5865399f634d824c8d427c7666d1f2c640115178115459fa69b20b0", size = 18730807 }, { url = "https://files.pythonhosted.org/packages/d4/5f/7b511dcaa772f9ae20c7f3fe05dd88174729fbcb67e15b349b72a3855712/duckdb-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4a05d182d1dec1ff4acb53a266b3b8024afcc1ed0d399f5784ff1607a4271e9", size = 20199069 }, { url = "https://files.pythonhosted.org/packages/9c/58/7942a1d7c84a045e1513acc7e753ac67f2f272601a2c21d71b4cb85967e7/duckdb-1.2.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:317af7385b4f1d0c90ca029a71ce3d4f9571549c162798d58a0b20ba0a11762e", size = 18753393 }, { url = "https://files.pythonhosted.org/packages/6b/00/57417ae7d9bd47c71284bff7f69736bdde0f213ce312292e4f553449a667/duckdb-1.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41fca1666d0905e929ede0899a4275d67835a285b98e28fce446e8c3e53cfe8c", size = 22290931 }, { url = "https://files.pythonhosted.org/packages/71/bc/acb4d48f41dada36e723e9786d1ebe89f8e1db6685b86a2a1f0551bd5e16/duckdb-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f8f19f145442dbdfae029b68208fc237816f70b3d25bb77ed31ace79b6059fa5", size = 11365235 }, { url = "https://files.pythonhosted.org/packages/e3/3b/d154fcde6205aafd2002ddec7eef37e5c7907c3aa63b51f6d9f7d2ec1442/duckdb-1.2.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:bc9ed3adea35e7e688750e80330b5b93cd430483d68a5f880dac76bedca14c0e", size = 15264713 }, { url = "https://files.pythonhosted.org/packages/20/3f/e54f898c62a3d6873c090f06bab62544ac33826ec65e7598af7c09264a14/duckdb-1.2.1-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:b26ff415d89860b7013d711fce916f919ad058dbf0a3fc4bcdff5323ec4bbfa0", size = 31955551 }, { url = "https://files.pythonhosted.org/packages/11/b9/19ecfcc13b402686cf6f121cb08451f7655bd653990fdabfda1f2db87081/duckdb-1.2.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:0e26037b138a22f72fe44697b605ccac06e223c108b3f4a3e91e7ffad45ee673", size = 16797823 }, { url = "https://files.pythonhosted.org/packages/35/69/20fe0c748371866bdd150d60b065498b7414537c4ad0f7235b5ae604ac99/duckdb-1.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e2f530e8290e4b2d2c341bc709a6a0c9ec7a0e1c7a4679afa7bd4db972fcf12", size = 18731358 }, { url = "https://files.pythonhosted.org/packages/cc/f7/ba9b39791a0415c48d4696f10217e44ac526e450b811bc68f9acf0ef3b5c/duckdb-1.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7985129c4bc810cb08938043822bb1fc4b67c11f4c1b025527f9c888e0638b6a", size = 20198769 }, { url = "https://files.pythonhosted.org/packages/9c/6c/07717799b64e34dd383c4fe9a3a53f5506c97ada096b103154c8856dc68b/duckdb-1.2.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be76e55e9a36febcb0c7c7c28b8fae0b33bbcf6a84b3b23eb23e7ee3e65e3394", size = 18754621 }, { url = "https://files.pythonhosted.org/packages/53/8b/f971b0cd6cfc3ac094d31998b789a8fb372bd0813fbb47c932342fc926f0/duckdb-1.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d8f5066ae9acc6cee22c7a455696511d993bdbfc55bb9466360b073b5c8cba67", size = 22291214 }, { url = "https://files.pythonhosted.org/packages/1e/1c/4e29e52a35b5af451b24232b6f89714180da71c904017e62f7cc5477f135/duckdb-1.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:6112711457b6014ac041492bedf8b6a97403666aefa20a4a4f3479db10136501", size = 11365219 }, { url = "https://files.pythonhosted.org/packages/9e/9d/c6af575a6ab29b760954e59eb0882a5b6bafb0ead1b1085aca3317750be0/duckdb-1.2.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:18a3ebb6895e53ddcc9f677625576d85a54236a0fc060927bc356de365c8d382", size = 15250858 }, { url = "https://files.pythonhosted.org/packages/10/d4/544d675f388dd0bf4c286429160c9ba4e7b49ae80a1fa1c70b79e0416873/duckdb-1.2.1-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:7928a1f7a0568e3f384dbb2896d33fe96061444033692c8a954ac75a06efbda3", size = 31913696 }, { url = "https://files.pythonhosted.org/packages/3a/69/98f319f15cd2b76552fb5a0d0c07d042ee0f3940475d8d86558bc6de766d/duckdb-1.2.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:1adecebea8369b289232ec57e0fab87b572bca960acbeff89e8b7c2d202636a3", size = 16771268 }, { url = "https://files.pythonhosted.org/packages/e2/0c/81d26f905980aba8de77d00b27999202f733dddfe23911424f3a4feb6800/duckdb-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e728ab0415d3e9ff575806304482bf89f39e55df660ab8ed194335b045e5a0", size = 18722533 }, { url = "https://files.pythonhosted.org/packages/1c/de/ed0159a400394d0b6e97554c6e417367df163ebc8a07285f210a4d62b564/duckdb-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:594dcf9f7637e5db3d8d9e676a95721be5cf9657ffa22b27e19dddd519bca6fb", size = 20191388 }, { url = "https://files.pythonhosted.org/packages/63/ac/74c3fe0bfb0efa144e4e30909d1fefde86fbe6250148a5c596725e8be26b/duckdb-1.2.1-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a874d242f489bf649e6f03f3132d8d278371a8baf0ce55b48200af0de70d8f1f", size = 18719722 }, { url = "https://files.pythonhosted.org/packages/8e/e5/4a63024c3bff1e8ee9d0e91cbdb779f593bb2a0cd12d3bf9e6b3327ae8b4/duckdb-1.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:55c9b4214dd80e6adf73c7224529e0df290426d9fe5b6568dcd004916e690b84", size = 22237298 }, { url = "https://files.pythonhosted.org/packages/41/f5/fc2aa7c1dfd28a009e58f52c0e3923f88a9314b3552d15aad7948468e917/duckdb-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:6043d37e289df828fada6245381c3d1b67b71e0245f1b599b6c4c2634318aed2", size = 11398738 }, ] [[package]] name = "duckdb-engine" version = "0.15.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "duckdb" }, { name = "packaging" }, { name = "sqlalchemy" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ac/2d/ea892e63f8b372a3aa48caa2de9eed8e4c731275b9715a1334be6b784329/duckdb_engine-0.15.0.tar.gz", hash = "sha256:59f67ec95ebf9eb4dea22994664dfd34edce3c7416b862daa46da43f572ad6ef", size = 47695 } wheels = [ { url = "https://files.pythonhosted.org/packages/0c/92/a3b7edba792772f364ad6c57ceb8685fb5ae5f893704650f2b46978f9b34/duckdb_engine-0.15.0-py3-none-any.whl", hash = "sha256:d18acd73f03202145e1baa86605dca3612080fd0a849dbc42b38111ffee6857c", size = 49634 }, ] [[package]] name = "editorconfig" version = "0.17.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b4/29/785595a0d8b30ab8d2486559cfba1d46487b8dcbd99f74960b6b4cca92a4/editorconfig-0.17.0.tar.gz", hash = "sha256:8739052279699840065d3a9f5c125d7d5a98daeefe53b0e5274261d77cb49aa2", size = 13369 } wheels = [ { url = "https://files.pythonhosted.org/packages/af/e5/8dba39ea24ca3de0e954e668107692f4dfc13a85300a531fa9a39e83fde4/EditorConfig-0.17.0-py3-none-any.whl", hash = "sha256:fe491719c5f65959ec00b167d07740e7ffec9a3f362038c72b289330b9991dfc", size = 16276 }, ] [[package]] name = "email-validator" version = "2.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython" }, { name = "idna" }, ] sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967 } wheels = [ { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521 }, ] [[package]] name = "eval-type-backport" version = "0.2.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/30/ea/8b0ac4469d4c347c6a385ff09dc3c048c2d021696664e26c7ee6791631b5/eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1", size = 9079 } wheels = [ { url = "https://files.pythonhosted.org/packages/ce/31/55cd413eaccd39125368be33c46de24a1f639f2e12349b0361b4678f3915/eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a", size = 5830 }, ] [[package]] name = "exceptiongroup" version = "1.2.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } wheels = [ { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, ] [[package]] name = "execnet" version = "2.1.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524 } wheels = [ { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612 }, ] [[package]] name = "faker" version = "37.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tzdata" }, ] sdist = { url = "https://files.pythonhosted.org/packages/37/62/80f15fe1b5abf3e5b09815178d7eb63a150fc7fcfebd5271ca4aab1d885a/faker-37.0.2.tar.gz", hash = "sha256:948bd27706478d3aa0b6f9f58b9f25207098f6ca79852c7b49c44a8ced2bc59b", size = 1875441 } wheels = [ { url = "https://files.pythonhosted.org/packages/a9/8b/b738d3d79ee4502ca966a2a4fa6833c11f50130127bdd57729e9b29c6d2f/faker-37.0.2-py3-none-any.whl", hash = "sha256:8955706c56c28099585e9e2b6f814eb0a3a227eb36a2ee3eb9ab577c4764eacc", size = 1918397 }, ] [[package]] name = "fastapi" version = "0.115.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b5/28/c5d26e5860df807241909a961a37d45e10533acef95fc368066c7dd186cd/fastapi-0.115.11.tar.gz", hash = "sha256:cc81f03f688678b92600a65a5e618b93592c65005db37157147204d8924bf94f", size = 294441 } wheels = [ { url = "https://files.pythonhosted.org/packages/b3/5d/4d8bbb94f0dbc22732350c06965e40740f4a92ca560e90bb566f4f73af41/fastapi-0.115.11-py3-none-any.whl", hash = "sha256:32e1541b7b74602e4ef4a0260ecaf3aadf9d4f19590bba3e1bf2ac4666aa2c64", size = 94926 }, ] [package.optional-dependencies] all = [ { name = "email-validator" }, { name = "fastapi-cli", extra = ["standard"] }, { name = "httpx" }, { name = "itsdangerous" }, { name = "jinja2" }, { name = "orjson" }, { name = "pydantic-extra-types" }, { name = "pydantic-settings" }, { name = "python-multipart" }, { name = "pyyaml" }, { name = "ujson" }, { name = "uvicorn", extra = ["standard"] }, ] [[package]] name = "fastapi-cli" version = "0.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "rich-toolkit" }, { name = "typer" }, { name = "uvicorn", extra = ["standard"] }, ] sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753 } wheels = [ { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705 }, ] [package.optional-dependencies] standard = [ { name = "uvicorn", extra = ["standard"] }, ] [[package]] name = "fastnanoid" version = "0.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fa/ba/526d8595043d479a4cc612680dabcbf03b72ec9c21551f66f49b5c1c8aa9/fastnanoid-0.4.1.tar.gz", hash = "sha256:c56185bf4da6959fe229584d526246aafc2297e9e69bd1a5886065f2bc532612", size = 7839 } wheels = [ { url = "https://files.pythonhosted.org/packages/17/4e/2884c9cfa143ad8210489f11d48c23bed6277a7793b29f716320133753a6/fastnanoid-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:086e7b79e8ceefc3fc18b4136af2a904288dad8e095ae1851f2ed32025b58f0f", size = 194552 }, { url = "https://files.pythonhosted.org/packages/ba/5f/f646acbfa04bba326988c029ec0f99f24b838e3b668d475df8bef976ae98/fastnanoid-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ee2b9d9351c2b993d44600f4e347ded762708a2cc483a59fbb042d56b650c0e", size = 233701 }, { url = "https://files.pythonhosted.org/packages/0f/08/ace3e1a07b8674415e5b1dfe51c391853b5a7674ed8cf768ec8146c5b61c/fastnanoid-0.4.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ecf8e27a1e706ddc8babed6998bbcb28d9625a4f2857f28089bfaf398f27bf0", size = 231891 }, { url = "https://files.pythonhosted.org/packages/2c/73/926e28b89f73dcfb30b8e12464fb5de94c79fbc1f15976ae0ffb1bbb2a14/fastnanoid-0.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fd5042be5f15041758ddda75123145112411658e03bcd3abf79f2318f97bfc", size = 261365 }, { url = "https://files.pythonhosted.org/packages/0b/c0/805f94bbcf9f65b99f914a087f1fabf2437df0849b2d4749119ee6f63a01/fastnanoid-0.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ba551afa02b73137686645537223ad4091c4a9e8e8c10286a5cf5c0c54ccb6d", size = 262593 }, { url = "https://files.pythonhosted.org/packages/9e/53/4e5713c22fff3ae9c62762cf6a3342f14ebcfc67278afd8c82cf8aa96b3b/fastnanoid-0.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57efcd9774e59f8902ed0c6128e497e2616237092b10e7915860bc9089fc1e5f", size = 227368 }, { url = "https://files.pythonhosted.org/packages/72/71/3f5806c9f8a37e8e0c54059c2f2b8d808e207e61db7d065e3b0c74db6817/fastnanoid-0.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9abdb225dcaf94b8bec02d2877783d097ea34a3e635a39c0c7b4ab9c72f0601f", size = 234937 }, { url = "https://files.pythonhosted.org/packages/3d/85/0bd45406bc513d9fe4a63c5b9bb0e8932effd7eaeb6a5d007654c1027317/fastnanoid-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a4f94084f1d443acc3e6397a82239a9a15cb41c97e546118b7f07a71b8286331", size = 412001 }, { url = "https://files.pythonhosted.org/packages/f8/ce/5102614cea37fdd712a3da35fb64cde0cb501fc8f2f957c5e0f0114aaf8c/fastnanoid-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3e4dd130f4e1140c812fe9ed5e1ff40d5cfc603a56fc92c36979b52daac79dc1", size = 494252 }, { url = "https://files.pythonhosted.org/packages/b3/28/4f0f07b4a6eec48761cca926d4f6823586795af4ecfe9e729fb78f381f81/fastnanoid-0.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3570cdfeb7caa33166bafbef79ae3627c785147c03c5ad3d67234921e4f833ee", size = 415651 }, { url = "https://files.pythonhosted.org/packages/a3/bd/6893d01372ec27b639ef54e1256b52ac0617e3fb14f090b6d0d8ea68347d/fastnanoid-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d5eea924e93f2bc1e1d1400c588f2211965d82419921a0e1bbccb65a585aa12", size = 398280 }, { url = "https://files.pythonhosted.org/packages/63/9d/21daaaa2ec24ef990376303a2140774b908daf8c255f1b2ad6406d3ccac3/fastnanoid-0.4.1-cp310-none-win32.whl", hash = "sha256:b31ea0bf8acae7963ee14f5d5e132562bc539913308aa78b8b1ac4bb1b75011c", size = 98811 }, { url = "https://files.pythonhosted.org/packages/40/95/58ac4545349e975fd956111874f9412735eede7220a1b2840f0d850da53b/fastnanoid-0.4.1-cp310-none-win_amd64.whl", hash = "sha256:3d58ac0e249b4a8b811c8d512d98450c29d61bfa5b3184fd96f0d08b87d6053c", size = 105875 }, { url = "https://files.pythonhosted.org/packages/5b/b3/907e7c538fb8d7b187277aaab6de6f4def4f2f13cedc96d83d882ed07909/fastnanoid-0.4.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6cd70d61365546b0d0f7d5dee32032a1e050472338cbcd727a1b8b99d8db9f2c", size = 198462 }, { url = "https://files.pythonhosted.org/packages/3d/ce/6fe33cd55d2b44ca92c8dcad188b5ca0dd6e3cc4c406ac51f9aa5057e423/fastnanoid-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9cd2d280ce6acaa2b2caf83e6f1f16f5ab909c98ee2ef558311c4b5c2c811b69", size = 194317 }, { url = "https://files.pythonhosted.org/packages/9d/41/26212a05cf7a9b65246d39fca75656417c78a615062e11599c8b7a704a09/fastnanoid-0.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af1f10ad89a570196699328bbcf39fb91937d2646f44c8feb7965de5125a1830", size = 233399 }, { url = "https://files.pythonhosted.org/packages/74/81/a1477f74df79a9993455ef677265f65618c8ceb37c285f89191d77def686/fastnanoid-0.4.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f42cf1f995b72560d610eceaf4ded31d62ec520afcece557c4a25e02e1721ef1", size = 231687 }, { url = "https://files.pythonhosted.org/packages/1c/36/b0b50671dacc815e19e763f2d6e3a8f3d891509ebd21e78ab928cc1343f1/fastnanoid-0.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03b203d3a0ef9347acdfae10a9f8c0de2542770d347a436d81f59395e73dec16", size = 261198 }, { url = "https://files.pythonhosted.org/packages/7f/68/f3eae6ff80921f18c0239a9319cefa3537d73dc0cdf861f407d7bc427f23/fastnanoid-0.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ad1c9b058546ba60c94534331bbc32bfc088feb41f8fe24be6b4acc15713dc1", size = 261751 }, { url = "https://files.pythonhosted.org/packages/4f/1d/1b7449508a1ea387fb3c2cb2a73f13baaf2081633d25c4119f29fc5159f4/fastnanoid-0.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3b309f811b8c9d0ead8a96d37e40734c258c010bea5dec7ea24e824e7934bb8", size = 227076 }, { url = "https://files.pythonhosted.org/packages/77/ae/3aead74b29a12d8e72f93e5f6f9e4d6203e56961ec5d9a9eac1b849fcf1e/fastnanoid-0.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a29ec0aa8ca23ad88bbf30a449d54946b049b7b26aec514dbc6cc31d0d2581c5", size = 234791 }, { url = "https://files.pythonhosted.org/packages/b3/f1/02803e861f4e1f7994a848ae43a618408343f7caed5ea6c4cc9e557a3d77/fastnanoid-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e243e291d08c7c03d700cc0e8e907c5aef88d099f446d1a2c6659cf21647ed66", size = 411720 }, { url = "https://files.pythonhosted.org/packages/da/d6/6e887cb7327d52cd6885a54b94ad1046fc2a54508fd6306a324da77c5461/fastnanoid-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:be22f19d640cb8c6aef9dd38dc08da593ccc012be0210253777bf87d982900fc", size = 493986 }, { url = "https://files.pythonhosted.org/packages/07/98/0b2bed65176ba2e0505688ca94dc396ad51072900c3e696da87a30e36f1f/fastnanoid-0.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4488296cd2eb87d6078cfe7fdf72393a87483241456b3ea1bde6225ca477d6d5", size = 415552 }, { url = "https://files.pythonhosted.org/packages/54/64/b57146c8770c9bf1a8ef791c5edcf1c53b7b2466cb687f6ed1f6859802d9/fastnanoid-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:038b1f0626d728c241dafc4eb8469c3f928297025ab5d29e1b5534060e4c9859", size = 398109 }, { url = "https://files.pythonhosted.org/packages/5e/50/f3bc7005eccce3565bf6132eaa625ac5226242adbb9b5d6faeef8fa077af/fastnanoid-0.4.1-cp311-none-win32.whl", hash = "sha256:0f11ada2493d38a8634762d9eb625928a9fa6ea069a9e1d98d08ca4acd75b2e7", size = 98795 }, { url = "https://files.pythonhosted.org/packages/7c/24/9be8d080143dbdbcba57e8a93ade8ae68b8adc7a17851b28f343f8efc721/fastnanoid-0.4.1-cp311-none-win_amd64.whl", hash = "sha256:bf092ab330ad8535310938111cf5d24e649a9d600d8895fa9b9a10899be2d6fb", size = 105793 }, { url = "https://files.pythonhosted.org/packages/fd/cd/e614b91c31176e50fc2beb5a99c4c027df36be7ab000b3a7c7af782a26af/fastnanoid-0.4.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e1970288e8cb7aafbd0b64f8ac8ef947445ca0a22dbcbab490486b1d3671c761", size = 198051 }, { url = "https://files.pythonhosted.org/packages/4b/db/99ce5dbc4527a1a993612a1b941c949d73123b25b680abfc1a91f1bd5b93/fastnanoid-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5e833d14a5aab861399e7d7967d91883f3a389c216c1adfbacef162cada5c58b", size = 194232 }, { url = "https://files.pythonhosted.org/packages/ba/08/ab3b573c4b2301476e8177b7a68022dac24272f970c0a658008f10c42f95/fastnanoid-0.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82699d5b3353dca26ace5b587a6d95af7f6e506647c0d620a23fa32558d672a3", size = 232999 }, { url = "https://files.pythonhosted.org/packages/c6/b2/9e3de343798afb336a914a61b62a0ef18a932c6bc854981b36bece4e94b5/fastnanoid-0.4.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0924275254c0ce8514d14ed0bfd2629a7d2d180296d7c22ce6ab72590a09c2e3", size = 231391 }, { url = "https://files.pythonhosted.org/packages/01/92/9c2b7b9a5d8396e6aaba9854559870e1efbda2676806af015611416f22ed/fastnanoid-0.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c576e5096ac66b057dfea31243e8a2ec37fd92c22ac35dde4aca15eb5e54eb7d", size = 260956 }, { url = "https://files.pythonhosted.org/packages/af/98/eab314e6b056e9b75e80f746288f6059696393ebafbd74fa0a7a724eb504/fastnanoid-0.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67f073167a90cab5df5a89e12f97c90b98b9e14486dce5fb8e780cc30a87031e", size = 261119 }, { url = "https://files.pythonhosted.org/packages/10/d8/6f24692866831f146255a37e28ae615ef63363b93ba1f9b2e21f7cf7c353/fastnanoid-0.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6fbe8fbcc19644ed12dbb10b76ff67bb3111b0d51f311215514562058226581", size = 226928 }, { url = "https://files.pythonhosted.org/packages/52/90/618330d6be724ea968950d42087857a4c3faeccec0d503a34bf02a2cab6a/fastnanoid-0.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5eae7c39528884001efc572b89f57093a69bb2732c1b113e5f89047e409f8795", size = 234370 }, { url = "https://files.pythonhosted.org/packages/96/eb/3b647816a1d30c6426f81ab218d15c33eeabfa02d6fef7856df93e80a3bb/fastnanoid-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5cf2f5d1c57c41a0de660d1f2529364f715325ea94c5d01498751f8e56758730", size = 411544 }, { url = "https://files.pythonhosted.org/packages/2a/bc/84bde22fa83195cf8edcd60c0ece60a9ca15ef5ab4dc11f7ec49e9e11a1a/fastnanoid-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:bc9773c8941174ccc60cdc73e3ac265b800f96896a93922991ade01a3017b013", size = 493623 }, { url = "https://files.pythonhosted.org/packages/26/12/276810b4c3c0383d17fce678f758c884318c0b6e32bbbe5cf8fd7c2593f8/fastnanoid-0.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5c039d9c8da181283af8a3a4ded14d1a285ada3c9a5cb78ed0effb3c1748d93c", size = 415097 }, { url = "https://files.pythonhosted.org/packages/a3/0f/df4e1385d31e1e478ce0915af8fd2b880cfb0b9fe936a73d05900dfd0803/fastnanoid-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5c93ca137bc68d9fd1a2f189c17a6fa8df908311d07a36e9ba66123827fbfb33", size = 397969 }, { url = "https://files.pythonhosted.org/packages/df/87/2c77f57ff69e754f0d2271ff687e9d35ef5f71e5b7c346f38d236c625dec/fastnanoid-0.4.1-cp312-none-win32.whl", hash = "sha256:54dc50f17fa5078c7868cd12cbc9be01e7d4e40b503a98463a7dd2a01a56c39f", size = 98612 }, { url = "https://files.pythonhosted.org/packages/14/48/1131c2590dabfce1ddc28b83f906ca4bab7d39c1d904b2454c46b472a9bd/fastnanoid-0.4.1-cp312-none-win_amd64.whl", hash = "sha256:b6d12d1119fed553cdc632e38c54ccbd7cb2f82dcd0b67ebe879da19cfe0c8e1", size = 105889 }, { url = "https://files.pythonhosted.org/packages/61/f1/26f622d721c71e024cd55faf19531dd69bc25108b1ef326ebece33e9f699/fastnanoid-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e930d7ac5d8512fbe83ef7de46d3ad9d91eb9315b9b68b125d242266825c390", size = 194996 }, { url = "https://files.pythonhosted.org/packages/62/0a/8f0ab7413788d7e7abd2ea9bb21af914f31b3aa1a97b5faa33eb999a742f/fastnanoid-0.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dc5fdad8b0c748ea66df130de106b02a6fe4933e49f21454272509f534de1f3", size = 234065 }, { url = "https://files.pythonhosted.org/packages/5c/bf/097e56a91f11f1a95d66a398d64e7b528c65f9c6c96c071ccdcb2180563a/fastnanoid-0.4.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d6ba7fa984db6263bd895df90416f99e8975a6032477ad1eef326e8e9e4b1624", size = 231972 }, { url = "https://files.pythonhosted.org/packages/7c/c0/5241bfccc04174f6e836ec721e6d6428eac4e0ab1c10f7efcc10c2024bf2/fastnanoid-0.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:596e8bdd189e51af4a7860d18344f0e08d8cb1fb046063c2a135de774923e1bc", size = 261652 }, { url = "https://files.pythonhosted.org/packages/84/81/5389aed7c24914fae2a150a38e5f595ff8d4bc4d04a087f1e5e469b068e4/fastnanoid-0.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9058cf974261fa89641c88d56f3a80524a4eeafd0b9022625fe1674198912fa7", size = 262552 }, { url = "https://files.pythonhosted.org/packages/80/31/e4c9e7654af8e5f815d430fc255c108660ba744f8cc2364333298ec7e40f/fastnanoid-0.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f700fa3c818ccb178efc21543d33689f3deea6f954937294c97d2f1ae111fbc", size = 227269 }, { url = "https://files.pythonhosted.org/packages/6f/8e/1ef0458822a12634c02db9693e190a1dcc8edac4d8a182c1d4b2c2abf1da/fastnanoid-0.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d124937de815606098d473713f564ed3c9d776ed4592df7ae400384b3cdead76", size = 235495 }, { url = "https://files.pythonhosted.org/packages/5e/cf/e5eb22103bf032731a49b46e86d8c4ae833723774a4af1a25a136ff0037e/fastnanoid-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a11b5af8a79e90e0d196f8f33e290b2ee491d41eafe743e71edb45878cb2b8dd", size = 411927 }, { url = "https://files.pythonhosted.org/packages/0c/0f/8046a43dd5e83f731d958a92c2c262277f9d3efdfb035fbaec6fde081ad5/fastnanoid-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ccf13c32c1ef2def1684e1b424cc29d5727d7bbb1cec15af42cbde031dfe71ae", size = 494256 }, { url = "https://files.pythonhosted.org/packages/a6/af/c644f011142aafd67b065b18197f3f2da2eae8aacf5a3419056f890606ad/fastnanoid-0.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9b6b75133fd461d15241648e59fcb6bfa490e86108046907a7806f4b928d55b6", size = 415748 }, { url = "https://files.pythonhosted.org/packages/03/5a/5c925d5ad87c82cc62deefb8b058c4ba9f198167d84ca645c9231774ffff/fastnanoid-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:af517e0b993ef1a1afca89160d2bd9764e87f17b2db928a8c1c2cd270cc163d5", size = 398538 }, { url = "https://files.pythonhosted.org/packages/cd/8c/76140f4102529b5a9f22333fb7390940fbd5a707859a41c253ba0ce7eb44/fastnanoid-0.4.1-cp39-none-win32.whl", hash = "sha256:491832c86d2dc0aa6a31011f428ab01fdfecbda01eae33d853a57eb8cd90d8f6", size = 98742 }, { url = "https://files.pythonhosted.org/packages/8d/be/d1a24ab16cb2537fd372520c78d8596fcb8f644f19e97fd8f0fe68f463d4/fastnanoid-0.4.1-cp39-none-win_amd64.whl", hash = "sha256:43e6890b71682bc610c7782b484f55dd9ac3f7b3c8c341fa90a906215e225379", size = 105862 }, { url = "https://files.pythonhosted.org/packages/9b/61/827cc586b6c4211946d90d9eaa1ec055eba9ae31f71393673c4802113831/fastnanoid-0.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4da9c369ad5d1fba8bc4452bfd8e2f40974087a29d66475d8eec281f0386fee9", size = 234444 }, { url = "https://files.pythonhosted.org/packages/a8/48/cce55a34782bb4ed34292d193f1aba6c3629be92f298011dce16b6dfbd8a/fastnanoid-0.4.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:359e6f082b348fd37d4e6492f1728948e81758e93de3015bed2a92b58c0c44c3", size = 232612 }, { url = "https://files.pythonhosted.org/packages/ff/f6/e7fea180533649c548f02d0ace16d76305fbea762026c5367d313f047793/fastnanoid-0.4.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:823067b5cc527597a07fdeb86d50123824b5c6f41c5bf37d6327f956a247784b", size = 261764 }, { url = "https://files.pythonhosted.org/packages/a7/3f/37a5c8b875d07d665dfa53bb1ca67d7dbce9517fe8001e4f676e81126126/fastnanoid-0.4.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:588722f2d43af6fa16ec9e55b9ea480ce3bc111a1c10757cdd2c148f42d12930", size = 263414 }, { url = "https://files.pythonhosted.org/packages/77/95/3c7acf630d78c82bbdaa12c5a66d7aa508d1b545efb85bbb7d1ac37d0d00/fastnanoid-0.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:927fbe72d03cb705890f880e20240276a22888f8e5a7da1d21d3eefd24348f47", size = 228228 }, { url = "https://files.pythonhosted.org/packages/97/ce/3f5f5924b236d08d155d3bcb794d63a92075033e2b6ecfe17cbf4614815c/fastnanoid-0.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0c2b6e3a9cd109154b9a80ae6a38d02550358147f5cf13d166f262af5b62f6b", size = 235934 }, { url = "https://files.pythonhosted.org/packages/a9/cb/3b89fc023f02fe55acbdc4c202050952876c4b118e7caedf034b153d07e2/fastnanoid-0.4.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:266039637cddf143a07058e139868e3a8ce3108ffcad001cd8c399f260073bec", size = 412662 }, { url = "https://files.pythonhosted.org/packages/4d/47/723cd630704c499fd7e91548c23658cd08c7f6331e5932c489341c319d19/fastnanoid-0.4.1-pp310-pypy310_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:b1dc76cdcad24f00a7a427cf00f4f72b23893b19ce41fd2f0916bb6e5ecd8b22", size = 494897 }, { url = "https://files.pythonhosted.org/packages/79/87/a1ddf63fdddc17c63e196b3ab7bf997d588683176efe9e97e9a7a72466f1/fastnanoid-0.4.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:30911cf3ec13d065bcb1672f624237ad39b8eab57a51fa74ebb99e2765d76b35", size = 416140 }, { url = "https://files.pythonhosted.org/packages/4d/f8/a53cbab4c646f948e7e79100586dddab23cd6ec6e0be02d9a8794c3ba6e8/fastnanoid-0.4.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0897ce9f40ef8483374c16a3a7ecdfe6feb949b420b1e4b69e8359c254020f03", size = 399208 }, { url = "https://files.pythonhosted.org/packages/0e/99/a1bfcfd64fd4b9e8599a634c5f42a7c1ea39d151bcd616c583a95843f041/fastnanoid-0.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dcfee2a516fb701751c52c75c51b9d0b5c0eedac01a032a793e96630ef25b0d", size = 234938 }, { url = "https://files.pythonhosted.org/packages/3a/35/7c7393c06ece7cd27eca843957a71414c66227f1f88249efa78eb6d20e22/fastnanoid-0.4.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24851db09a54c25fb922096cd6716871bd1f564b1f70f4b04f2ee8f71cd3ecac", size = 233149 }, { url = "https://files.pythonhosted.org/packages/7c/2b/2ae0a3058c775076e36203bf58417422d399cf2f0df49061d5b577de6942/fastnanoid-0.4.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c7fb33cf414d3120b46c5b72ad64bc51e2b81bbbc11d4f8aee2f8e1e4f5bb26e", size = 262215 }, { url = "https://files.pythonhosted.org/packages/b9/95/d67ea1b364195f579ef6c046af35c967e9a6cc33c227dc7e8c36c6028314/fastnanoid-0.4.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54518cb82914e2ab2dfaa2ef18d71996f04fa026d7c7e7aa815e37dc442ec796", size = 263468 }, { url = "https://files.pythonhosted.org/packages/a3/a4/c45127919dddef26dfad4a3a6b1d0548d835647bdaf24ec1308f16a2c8db/fastnanoid-0.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16d13f5223ca16e31585fd526c59b05402cf6513a829d5619675badb1dc58424", size = 228442 }, { url = "https://files.pythonhosted.org/packages/1a/08/bfa9a24d903f34bbc857246bd388101ef99ed7832efa826e31a4ef4957c0/fastnanoid-0.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00dda7a2f34e2d41a6600ae62367472c255573865d294b3941a725ee010c3873", size = 235962 }, { url = "https://files.pythonhosted.org/packages/a9/b8/da79a09b9ebfe7f502e308fe64acd043060988cf8a5d9322cedc5889a4ea/fastnanoid-0.4.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0a6b9bed15396c2b7e0bc72350c3ee2a0573d47defe95e387bbd592dbe9d5922", size = 412938 }, { url = "https://files.pythonhosted.org/packages/bc/54/d7049f90777bf21de4d5ab74b42cedf213bcdff7f0daa318442f18ebb072/fastnanoid-0.4.1-pp39-pypy39_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:e69ce0aa186e5236086b40de3814f0719037d2eed66e04bee2c0116ee4fd5501", size = 495351 }, { url = "https://files.pythonhosted.org/packages/45/03/757a093f34aea99dfa948cd0af82d10196c7b9f2c893fd9c020c9da89ea0/fastnanoid-0.4.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:e0ab1f5806047e484a64bae131e502bd22b8f1ef740f866adc73c645c8648d1b", size = 416558 }, { url = "https://files.pythonhosted.org/packages/7b/cb/77d820e6050c1e669ef8661f6b94d0f203242a5e20191802140920420e2b/fastnanoid-0.4.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:02f427342dfa025d66253fb5176da1f332dcb2784929bc6f12a4dc0f9b9e203c", size = 399506 }, ] [[package]] name = "filelock" version = "3.18.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075 } wheels = [ { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215 }, ] [[package]] name = "flask" version = "3.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "blinker" }, { name = "click" }, { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "itsdangerous" }, { name = "jinja2" }, { name = "werkzeug" }, ] sdist = { url = "https://files.pythonhosted.org/packages/89/50/dff6380f1c7f84135484e176e0cac8690af72fa90e932ad2a0a60e28c69b/flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac", size = 680824 } wheels = [ { url = "https://files.pythonhosted.org/packages/af/47/93213ee66ef8fae3b93b3e29206f6b251e65c97bd91d8e1c5596ef15af0a/flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136", size = 102979 }, ] [package.optional-dependencies] async = [ { name = "asgiref" }, ] [[package]] name = "flask-sqlalchemy" version = "3.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "flask" }, { name = "sqlalchemy" }, ] sdist = { url = "https://files.pythonhosted.org/packages/91/53/b0a9fcc1b1297f51e68b69ed3b7c3c40d8c45be1391d77ae198712914392/flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312", size = 81899 } wheels = [ { url = "https://files.pythonhosted.org/packages/1d/6a/89963a5c6ecf166e8be29e0d1bf6806051ee8fe6c82e232842e3aeac9204/flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0", size = 25125 }, ] [[package]] name = "google-api-core" version = "2.24.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth" }, { name = "googleapis-common-protos" }, { name = "proto-plus" }, { name = "protobuf" }, { name = "requests" }, ] sdist = { url = "https://files.pythonhosted.org/packages/09/5c/085bcb872556934bb119e5e09de54daa07873f6866b8f0303c49e72287f7/google_api_core-2.24.2.tar.gz", hash = "sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696", size = 163516 } wheels = [ { url = "https://files.pythonhosted.org/packages/46/95/f472d85adab6e538da2025dfca9e976a0d125cc0af2301f190e77b76e51c/google_api_core-2.24.2-py3-none-any.whl", hash = "sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9", size = 160061 }, ] [package.optional-dependencies] grpc = [ { name = "grpcio" }, { name = "grpcio-status" }, ] [[package]] name = "google-auth" version = "2.38.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, { name = "pyasn1-modules" }, { name = "rsa" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c6/eb/d504ba1daf190af6b204a9d4714d457462b486043744901a6eeea711f913/google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4", size = 270866 } wheels = [ { url = "https://files.pythonhosted.org/packages/9d/47/603554949a37bca5b7f894d51896a9c534b9eab808e2520a748e081669d0/google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a", size = 210770 }, ] [[package]] name = "google-cloud-core" version = "2.4.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core" }, { name = "google-auth" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d6/b8/2b53838d2acd6ec6168fd284a990c76695e84c65deee79c9f3a4276f6b4f/google_cloud_core-2.4.3.tar.gz", hash = "sha256:1fab62d7102844b278fe6dead3af32408b1df3eb06f5c7e8634cbd40edc4da53", size = 35861 } wheels = [ { url = "https://files.pythonhosted.org/packages/40/86/bda7241a8da2d28a754aad2ba0f6776e35b67e37c36ae0c45d49370f1014/google_cloud_core-2.4.3-py2.py3-none-any.whl", hash = "sha256:5130f9f4c14b4fafdff75c79448f9495cfade0d8775facf1b09c3bf67e027f6e", size = 29348 }, ] [[package]] name = "google-cloud-spanner" version = "3.53.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"] }, { name = "google-cloud-core" }, { name = "grpc-google-iam-v1" }, { name = "grpc-interceptor" }, { name = "proto-plus" }, { name = "protobuf" }, { name = "sqlparse" }, ] sdist = { url = "https://files.pythonhosted.org/packages/27/eb/cef9263b4ac61a9e967d3d6846c4469151160a1cced891791ce1c6ccddee/google_cloud_spanner-3.53.0.tar.gz", hash = "sha256:0c7be3134b74928cf928d1f73b58c722fc2014346de1240a0cc8ffdd3222f606", size = 659445 } wheels = [ { url = "https://files.pythonhosted.org/packages/d6/96/52997fc187611a2cf0c64df747fa70ffc0b469f0a367f39bdd078c43db52/google_cloud_spanner-3.53.0-py2.py3-none-any.whl", hash = "sha256:be863394521b44df3c5a118c00c4b7c978d4437adb49e359e39b3d76362a7e60", size = 483101 }, ] [[package]] name = "googleapis-common-protos" version = "1.69.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/1b/d7/ee9d56af4e6dbe958562b5020f46263c8a4628e7952070241fc0e9b182ae/googleapis_common_protos-1.69.2.tar.gz", hash = "sha256:3e1b904a27a33c821b4b749fd31d334c0c9c30e6113023d495e48979a3dc9c5f", size = 144496 } wheels = [ { url = "https://files.pythonhosted.org/packages/f9/53/d35476d547a286506f0a6a634ccf1e5d288fffd53d48f0bd5fef61d68684/googleapis_common_protos-1.69.2-py3-none-any.whl", hash = "sha256:0b30452ff9c7a27d80bfc5718954063e8ab53dd3697093d3bc99581f5fd24212", size = 293215 }, ] [package.optional-dependencies] grpc = [ { name = "grpcio" }, ] [[package]] name = "greenlet" version = "3.1.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 } wheels = [ { url = "https://files.pythonhosted.org/packages/25/90/5234a78dc0ef6496a6eb97b67a42a8e96742a56f7dc808cb954a85390448/greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563", size = 271235 }, { url = "https://files.pythonhosted.org/packages/7c/16/cd631fa0ab7d06ef06387135b7549fdcc77d8d859ed770a0d28e47b20972/greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83", size = 637168 }, { url = "https://files.pythonhosted.org/packages/2f/b1/aed39043a6fec33c284a2c9abd63ce191f4f1a07319340ffc04d2ed3256f/greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0", size = 648826 }, { url = "https://files.pythonhosted.org/packages/76/25/40e0112f7f3ebe54e8e8ed91b2b9f970805143efef16d043dfc15e70f44b/greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120", size = 644443 }, { url = "https://files.pythonhosted.org/packages/fb/2f/3850b867a9af519794784a7eeed1dd5bc68ffbcc5b28cef703711025fd0a/greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc", size = 643295 }, { url = "https://files.pythonhosted.org/packages/cf/69/79e4d63b9387b48939096e25115b8af7cd8a90397a304f92436bcb21f5b2/greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617", size = 599544 }, { url = "https://files.pythonhosted.org/packages/46/1d/44dbcb0e6c323bd6f71b8c2f4233766a5faf4b8948873225d34a0b7efa71/greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7", size = 1125456 }, { url = "https://files.pythonhosted.org/packages/e0/1d/a305dce121838d0278cee39d5bb268c657f10a5363ae4b726848f833f1bb/greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6", size = 1149111 }, { url = "https://files.pythonhosted.org/packages/96/28/d62835fb33fb5652f2e98d34c44ad1a0feacc8b1d3f1aecab035f51f267d/greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80", size = 298392 }, { url = "https://files.pythonhosted.org/packages/28/62/1c2665558618553c42922ed47a4e6d6527e2fa3516a8256c2f431c5d0441/greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", size = 272479 }, { url = "https://files.pythonhosted.org/packages/76/9d/421e2d5f07285b6e4e3a676b016ca781f63cfe4a0cd8eaecf3fd6f7a71ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", size = 640404 }, { url = "https://files.pythonhosted.org/packages/e5/de/6e05f5c59262a584e502dd3d261bbdd2c97ab5416cc9c0b91ea38932a901/greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", size = 652813 }, { url = "https://files.pythonhosted.org/packages/49/93/d5f93c84241acdea15a8fd329362c2c71c79e1a507c3f142a5d67ea435ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", size = 648517 }, { url = "https://files.pythonhosted.org/packages/15/85/72f77fc02d00470c86a5c982b8daafdf65d38aefbbe441cebff3bf7037fc/greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", size = 647831 }, { url = "https://files.pythonhosted.org/packages/f7/4b/1c9695aa24f808e156c8f4813f685d975ca73c000c2a5056c514c64980f6/greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", size = 602413 }, { url = "https://files.pythonhosted.org/packages/76/70/ad6e5b31ef330f03b12559d19fda2606a522d3849cde46b24f223d6d1619/greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", size = 1129619 }, { url = "https://files.pythonhosted.org/packages/f4/fb/201e1b932e584066e0f0658b538e73c459b34d44b4bd4034f682423bc801/greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", size = 1155198 }, { url = "https://files.pythonhosted.org/packages/12/da/b9ed5e310bb8b89661b80cbcd4db5a067903bbcd7fc854923f5ebb4144f0/greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", size = 298930 }, { url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 }, { url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 }, { url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 }, { url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 }, { url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 }, { url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 }, { url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 }, { url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 }, { url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 }, { url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 }, { url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 }, { url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 }, { url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 }, { url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 }, { url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 }, { url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 }, { url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 }, { url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 }, { url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 }, { url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 }, { url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 }, { url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 }, { url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 }, { url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 }, { url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 }, { url = "https://files.pythonhosted.org/packages/8c/82/8051e82af6d6b5150aacb6789a657a8afd48f0a44d8e91cb72aaaf28553a/greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3", size = 270027 }, { url = "https://files.pythonhosted.org/packages/f9/74/f66de2785880293780eebd18a2958aeea7cbe7814af1ccef634f4701f846/greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42", size = 634822 }, { url = "https://files.pythonhosted.org/packages/68/23/acd9ca6bc412b02b8aa755e47b16aafbe642dde0ad2f929f836e57a7949c/greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f", size = 646866 }, { url = "https://files.pythonhosted.org/packages/a9/ab/562beaf8a53dc9f6b2459f200e7bc226bb07e51862a66351d8b7817e3efd/greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437", size = 641985 }, { url = "https://files.pythonhosted.org/packages/03/d3/1006543621f16689f6dc75f6bcf06e3c23e044c26fe391c16c253623313e/greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145", size = 641268 }, { url = "https://files.pythonhosted.org/packages/2f/c1/ad71ce1b5f61f900593377b3f77b39408bce5dc96754790311b49869e146/greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c", size = 597376 }, { url = "https://files.pythonhosted.org/packages/f7/ff/183226685b478544d61d74804445589e069d00deb8ddef042699733950c7/greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e", size = 1123359 }, { url = "https://files.pythonhosted.org/packages/c0/8b/9b3b85a89c22f55f315908b94cd75ab5fed5973f7393bbef000ca8b2c5c1/greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e", size = 1147458 }, { url = "https://files.pythonhosted.org/packages/b8/1c/248fadcecd1790b0ba793ff81fa2375c9ad6442f4c748bf2cc2e6563346a/greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c", size = 281131 }, { url = "https://files.pythonhosted.org/packages/ae/02/e7d0aef2354a38709b764df50b2b83608f0621493e47f47694eb80922822/greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22", size = 298306 }, ] [[package]] name = "grpc-google-iam-v1" version = "0.14.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "googleapis-common-protos", extra = ["grpc"] }, { name = "grpcio" }, { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b9/4e/8d0ca3b035e41fe0b3f31ebbb638356af720335e5a11154c330169b40777/grpc_google_iam_v1-0.14.2.tar.gz", hash = "sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20", size = 16259 } wheels = [ { url = "https://files.pythonhosted.org/packages/66/6f/dd9b178aee7835b96c2e63715aba6516a9d50f6bebbd1cc1d32c82a2a6c3/grpc_google_iam_v1-0.14.2-py3-none-any.whl", hash = "sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351", size = 19242 }, ] [[package]] name = "grpc-interceptor" version = "0.15.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9f/28/57449d5567adf4c1d3e216aaca545913fbc21a915f2da6790d6734aac76e/grpc-interceptor-0.15.4.tar.gz", hash = "sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926", size = 19322 } wheels = [ { url = "https://files.pythonhosted.org/packages/15/ac/8d53f230a7443401ce81791ec50a3b0e54924bf615ad287654fa4a2f5cdc/grpc_interceptor-0.15.4-py3-none-any.whl", hash = "sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d", size = 20848 }, ] [[package]] name = "grpcio" version = "1.71.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1c/95/aa11fc09a85d91fbc7dd405dcb2a1e0256989d67bf89fa65ae24b3ba105a/grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c", size = 12549828 } wheels = [ { url = "https://files.pythonhosted.org/packages/7c/c5/ef610b3f988cc0cc67b765f72b8e2db06a1db14e65acb5ae7810a6b7042e/grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd", size = 5210643 }, { url = "https://files.pythonhosted.org/packages/bf/de/c84293c961622df302c0d5d07ec6e2d4cd3874ea42f602be2df09c4ad44f/grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d", size = 11308962 }, { url = "https://files.pythonhosted.org/packages/7c/38/04c9e0dc8c904570c80faa1f1349b190b63e45d6b2782ec8567b050efa9d/grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea", size = 5699236 }, { url = "https://files.pythonhosted.org/packages/95/96/e7be331d1298fa605ea7c9ceafc931490edd3d5b33c4f695f1a0667f3491/grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69", size = 6339767 }, { url = "https://files.pythonhosted.org/packages/5d/b7/7e7b7bb6bb18baf156fd4f2f5b254150dcdd6cbf0def1ee427a2fb2bfc4d/grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73", size = 5943028 }, { url = "https://files.pythonhosted.org/packages/13/aa/5fb756175995aeb47238d706530772d9a7ac8e73bcca1b47dc145d02c95f/grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804", size = 6031841 }, { url = "https://files.pythonhosted.org/packages/54/93/172783e01eed61f7f180617b7fa4470f504e383e32af2587f664576a7101/grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6", size = 6651039 }, { url = "https://files.pythonhosted.org/packages/6f/99/62654b220a27ed46d3313252214f4bc66261143dc9b58004085cd0646753/grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5", size = 6198465 }, { url = "https://files.pythonhosted.org/packages/68/35/96116de833b330abe4412cc94edc68f99ed2fa3e39d8713ff307b3799e81/grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509", size = 3620382 }, { url = "https://files.pythonhosted.org/packages/b7/09/f32ef637e386f3f2c02effac49699229fa560ce9007682d24e9e212d2eb4/grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a", size = 4280302 }, { url = "https://files.pythonhosted.org/packages/63/04/a085f3ad4133426f6da8c1becf0749872a49feb625a407a2e864ded3fb12/grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef", size = 5210453 }, { url = "https://files.pythonhosted.org/packages/b4/d5/0bc53ed33ba458de95020970e2c22aa8027b26cc84f98bea7fcad5d695d1/grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7", size = 11347567 }, { url = "https://files.pythonhosted.org/packages/e3/6d/ce334f7e7a58572335ccd61154d808fe681a4c5e951f8a1ff68f5a6e47ce/grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7", size = 5696067 }, { url = "https://files.pythonhosted.org/packages/05/4a/80befd0b8b1dc2b9ac5337e57473354d81be938f87132e147c4a24a581bd/grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7", size = 6348377 }, { url = "https://files.pythonhosted.org/packages/c7/67/cbd63c485051eb78663355d9efd1b896cfb50d4a220581ec2cb9a15cd750/grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e", size = 5940407 }, { url = "https://files.pythonhosted.org/packages/98/4b/7a11aa4326d7faa499f764eaf8a9b5a0eb054ce0988ee7ca34897c2b02ae/grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b", size = 6030915 }, { url = "https://files.pythonhosted.org/packages/eb/a2/cdae2d0e458b475213a011078b0090f7a1d87f9a68c678b76f6af7c6ac8c/grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7", size = 6648324 }, { url = "https://files.pythonhosted.org/packages/27/df/f345c8daaa8d8574ce9869f9b36ca220c8845923eb3087e8f317eabfc2a8/grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3", size = 6197839 }, { url = "https://files.pythonhosted.org/packages/f2/2c/cd488dc52a1d0ae1bad88b0d203bc302efbb88b82691039a6d85241c5781/grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444", size = 3619978 }, { url = "https://files.pythonhosted.org/packages/ee/3f/cf92e7e62ccb8dbdf977499547dfc27133124d6467d3a7d23775bcecb0f9/grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b", size = 4282279 }, { url = "https://files.pythonhosted.org/packages/4c/83/bd4b6a9ba07825bd19c711d8b25874cd5de72c2a3fbf635c3c344ae65bd2/grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537", size = 5184101 }, { url = "https://files.pythonhosted.org/packages/31/ea/2e0d90c0853568bf714693447f5c73272ea95ee8dad107807fde740e595d/grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7", size = 11310927 }, { url = "https://files.pythonhosted.org/packages/ac/bc/07a3fd8af80467390af491d7dc66882db43884128cdb3cc8524915e0023c/grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec", size = 5654280 }, { url = "https://files.pythonhosted.org/packages/16/af/21f22ea3eed3d0538b6ef7889fce1878a8ba4164497f9e07385733391e2b/grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594", size = 6312051 }, { url = "https://files.pythonhosted.org/packages/49/9d/e12ddc726dc8bd1aa6cba67c85ce42a12ba5b9dd75d5042214a59ccf28ce/grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c", size = 5910666 }, { url = "https://files.pythonhosted.org/packages/d9/e9/38713d6d67aedef738b815763c25f092e0454dc58e77b1d2a51c9d5b3325/grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67", size = 6012019 }, { url = "https://files.pythonhosted.org/packages/80/da/4813cd7adbae6467724fa46c952d7aeac5e82e550b1c62ed2aeb78d444ae/grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db", size = 6637043 }, { url = "https://files.pythonhosted.org/packages/52/ca/c0d767082e39dccb7985c73ab4cf1d23ce8613387149e9978c70c3bf3b07/grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79", size = 6186143 }, { url = "https://files.pythonhosted.org/packages/00/61/7b2c8ec13303f8fe36832c13d91ad4d4ba57204b1c723ada709c346b2271/grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a", size = 3604083 }, { url = "https://files.pythonhosted.org/packages/fd/7c/1e429c5fb26122055d10ff9a1d754790fb067d83c633ff69eddcf8e3614b/grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8", size = 4272191 }, { url = "https://files.pythonhosted.org/packages/04/dd/b00cbb45400d06b26126dcfdbdb34bb6c4f28c3ebbd7aea8228679103ef6/grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379", size = 5184138 }, { url = "https://files.pythonhosted.org/packages/ed/0a/4651215983d590ef53aac40ba0e29dda941a02b097892c44fa3357e706e5/grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3", size = 11310747 }, { url = "https://files.pythonhosted.org/packages/57/a3/149615b247f321e13f60aa512d3509d4215173bdb982c9098d78484de216/grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db", size = 5653991 }, { url = "https://files.pythonhosted.org/packages/ca/56/29432a3e8d951b5e4e520a40cd93bebaa824a14033ea8e65b0ece1da6167/grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29", size = 6312781 }, { url = "https://files.pythonhosted.org/packages/a3/f8/286e81a62964ceb6ac10b10925261d4871a762d2a763fbf354115f9afc98/grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4", size = 5910479 }, { url = "https://files.pythonhosted.org/packages/35/67/d1febb49ec0f599b9e6d4d0d44c2d4afdbed9c3e80deb7587ec788fcf252/grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3", size = 6013262 }, { url = "https://files.pythonhosted.org/packages/a1/04/f9ceda11755f0104a075ad7163fc0d96e2e3a9fe25ef38adfc74c5790daf/grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b", size = 6643356 }, { url = "https://files.pythonhosted.org/packages/fb/ce/236dbc3dc77cf9a9242adcf1f62538734ad64727fabf39e1346ad4bd5c75/grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637", size = 6186564 }, { url = "https://files.pythonhosted.org/packages/10/fd/b3348fce9dd4280e221f513dd54024e765b21c348bc475516672da4218e9/grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb", size = 3601890 }, { url = "https://files.pythonhosted.org/packages/be/f8/db5d5f3fc7e296166286c2a397836b8b042f7ad1e11028d82b061701f0f7/grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366", size = 4273308 }, { url = "https://files.pythonhosted.org/packages/c8/e3/22cb31bbb42de95b35b8f0fb691d8da6e0579e658bb37b86efe2999c702b/grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d", size = 5210667 }, { url = "https://files.pythonhosted.org/packages/f6/5e/4970fb231e57aad8f41682292343551f58fec5c7a07e261294def3cb8bb6/grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e", size = 11336193 }, { url = "https://files.pythonhosted.org/packages/7f/a4/dd71a5540d5e86526b39c23060b7d3195f3144af3fe291947b30c3fcbdad/grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033", size = 5699572 }, { url = "https://files.pythonhosted.org/packages/d0/69/3e3522d7c2c525a60f4bbf811891925ac7594b768b1ac8e6c9d955a72c45/grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97", size = 6339648 }, { url = "https://files.pythonhosted.org/packages/32/f2/9d864ca8f3949bf507db9c6a18532c150fc03910dd3d3e17fd4bc5d3e462/grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d", size = 5943469 }, { url = "https://files.pythonhosted.org/packages/9b/58/aec6ce541b7fb2a9efa15d968db5897c2700bd2da6fb159c1d27515f120c/grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41", size = 6030255 }, { url = "https://files.pythonhosted.org/packages/f7/4f/7356b7edd1f622d49e72faaea75a5d6ac7bdde8f4c14dd19bcfbafd56f4c/grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3", size = 6651120 }, { url = "https://files.pythonhosted.org/packages/54/10/c1bb13137dc8d1637e2373a85904aa57991e65ef429791bfb8a64a60d5bd/grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32", size = 6197989 }, { url = "https://files.pythonhosted.org/packages/0e/dc/0fd537831501df786bc2f9ec5ac1724528a344cd146f6335f7991763eb2b/grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455", size = 3620173 }, { url = "https://files.pythonhosted.org/packages/97/22/b1535291aaa9c046c79a9dc4db125f6b9974d41de154221b72da4e8a005c/grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a", size = 4280941 }, ] [[package]] name = "grpcio-status" version = "1.71.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "googleapis-common-protos" }, { name = "grpcio" }, { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d7/53/a911467bece076020456401f55a27415d2d70d3bc2c37af06b44ea41fc5c/grpcio_status-1.71.0.tar.gz", hash = "sha256:11405fed67b68f406b3f3c7c5ae5104a79d2d309666d10d61b152e91d28fb968", size = 13669 } wheels = [ { url = "https://files.pythonhosted.org/packages/ad/d6/31fbc43ff097d8c4c9fc3df741431b8018f67bf8dfbe6553a555f6e5f675/grpcio_status-1.71.0-py3-none-any.whl", hash = "sha256:843934ef8c09e3e858952887467f8256aac3910c55f077a359a65b2b3cde3e68", size = 14424 }, ] [[package]] name = "h11" version = "0.14.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } wheels = [ { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, ] [[package]] name = "html5lib" version = "1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, { name = "webencodings" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ac/b6/b55c3f49042f1df3dcd422b7f224f939892ee94f22abcf503a9b7339eaf2/html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f", size = 272215 } wheels = [ { url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173 }, ] [[package]] name = "html5tagger" version = "1.3.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9e/02/2ae5f46d517a2c1d4a17f2b1e4834c2c7cc0fb3a69c92389172fa16ab389/html5tagger-1.3.0.tar.gz", hash = "sha256:84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9", size = 14196 } wheels = [ { url = "https://files.pythonhosted.org/packages/9b/12/2f5d43ee912ea14a6baba4b3db6d309b02d932e3b7074c3339b4aded98ff/html5tagger-1.3.0-py3-none-any.whl", hash = "sha256:ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351", size = 10956 }, ] [[package]] name = "httpcore" version = "1.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "h11" }, ] sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } wheels = [ { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, ] [[package]] name = "httptools" version = "0.6.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639 } wheels = [ { url = "https://files.pythonhosted.org/packages/3b/6f/972f8eb0ea7d98a1c6be436e2142d51ad2a64ee18e02b0e7ff1f62171ab1/httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0", size = 198780 }, { url = "https://files.pythonhosted.org/packages/6a/b0/17c672b4bc5c7ba7f201eada4e96c71d0a59fbc185e60e42580093a86f21/httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da", size = 103297 }, { url = "https://files.pythonhosted.org/packages/92/5e/b4a826fe91971a0b68e8c2bd4e7db3e7519882f5a8ccdb1194be2b3ab98f/httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1", size = 443130 }, { url = "https://files.pythonhosted.org/packages/b0/51/ce61e531e40289a681a463e1258fa1e05e0be54540e40d91d065a264cd8f/httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50", size = 442148 }, { url = "https://files.pythonhosted.org/packages/ea/9e/270b7d767849b0c96f275c695d27ca76c30671f8eb8cc1bab6ced5c5e1d0/httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959", size = 415949 }, { url = "https://files.pythonhosted.org/packages/81/86/ced96e3179c48c6f656354e106934e65c8963d48b69be78f355797f0e1b3/httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4", size = 417591 }, { url = "https://files.pythonhosted.org/packages/75/73/187a3f620ed3175364ddb56847d7a608a6fc42d551e133197098c0143eca/httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c", size = 88344 }, { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029 }, { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492 }, { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891 }, { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788 }, { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214 }, { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120 }, { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565 }, { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683 }, { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337 }, { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796 }, { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837 }, { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289 }, { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779 }, { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634 }, { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214 }, { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431 }, { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121 }, { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805 }, { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858 }, { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042 }, { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682 }, { url = "https://files.pythonhosted.org/packages/51/b1/4fc6f52afdf93b7c4304e21f6add9e981e4f857c2fa622a55dfe21b6059e/httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003", size = 201123 }, { url = "https://files.pythonhosted.org/packages/c2/01/e6ecb40ac8fdfb76607c7d3b74a41b464458d5c8710534d8f163b0c15f29/httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab", size = 104507 }, { url = "https://files.pythonhosted.org/packages/dc/24/c70c34119d209bf08199d938dc9c69164f585ed3029237b4bdb90f673cb9/httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547", size = 449615 }, { url = "https://files.pythonhosted.org/packages/2b/62/e7f317fed3703bd81053840cacba4e40bcf424b870e4197f94bd1cf9fe7a/httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9", size = 448819 }, { url = "https://files.pythonhosted.org/packages/2a/13/68337d3be6b023260139434c49d7aa466aaa98f9aee7ed29270ac7dde6a2/httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076", size = 422093 }, { url = "https://files.pythonhosted.org/packages/fc/b3/3a1bc45be03dda7a60c7858e55b6cd0489a81613c1908fb81cf21d34ae50/httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd", size = 423898 }, { url = "https://files.pythonhosted.org/packages/05/72/2ddc2ae5f7ace986f7e68a326215b2e7c32e32fd40e6428fa8f1d8065c7e/httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6", size = 89552 }, ] [[package]] name = "httpx" version = "0.28.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "certifi" }, { name = "httpcore" }, { name = "idna" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, ] [[package]] name = "identify" version = "2.6.9" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9b/98/a71ab060daec766acc30fb47dfca219d03de34a70d616a79a38c6066c5bf/identify-2.6.9.tar.gz", hash = "sha256:d40dfe3142a1421d8518e3d3985ef5ac42890683e32306ad614a29490abeb6bf", size = 99249 } wheels = [ { url = "https://files.pythonhosted.org/packages/07/ce/0845144ed1f0e25db5e7a79c2354c1da4b5ce392b8966449d5db8dca18f1/identify-2.6.9-py2.py3-none-any.whl", hash = "sha256:c98b4322da415a8e5a70ff6e51fbc2d2932c015532d77e9f8537b4ba7813b150", size = 99101 }, ] [[package]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] [[package]] name = "imagesize" version = "1.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026 } wheels = [ { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769 }, ] [[package]] name = "importlib-metadata" version = "8.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp", marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/33/08/c1395a292bb23fd03bdf572a1357c5a733d3eecbab877641ceacab23db6e/importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580", size = 55767 } wheels = [ { url = "https://files.pythonhosted.org/packages/79/9d/0fb148dc4d6fa4a7dd1d8378168d9b4cd8d4560a6fbf6f0121c5fc34eb68/importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e", size = 26971 }, ] [[package]] name = "iniconfig" version = "2.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, ] [[package]] name = "itsdangerous" version = "2.2.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410 } wheels = [ { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234 }, ] [[package]] name = "jinja2" version = "3.1.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, ] [[package]] name = "jsbeautifier" version = "1.15.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "editorconfig" }, { name = "six" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ea/98/d6cadf4d5a1c03b2136837a435682418c29fdeb66be137128544cecc5b7a/jsbeautifier-1.15.4.tar.gz", hash = "sha256:5bb18d9efb9331d825735fbc5360ee8f1aac5e52780042803943aa7f854f7592", size = 75257 } wheels = [ { url = "https://files.pythonhosted.org/packages/2d/14/1c65fccf8413d5f5c6e8425f84675169654395098000d8bddc4e9d3390e1/jsbeautifier-1.15.4-py3-none-any.whl", hash = "sha256:72f65de312a3f10900d7685557f84cb61a9733c50dcc27271a39f5b0051bf528", size = 94707 }, ] [[package]] name = "litestar" version = "2.15.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "click" }, { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "httpx" }, { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "litestar-htmx" }, { name = "msgspec" }, { name = "multidict" }, { name = "multipart" }, { name = "polyfactory" }, { name = "pyyaml" }, { name = "rich" }, { name = "rich-click" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9b/7c/099962c10b6f96d8ee7530b12eac48b162a1abbf75ac1388e07f0be306bf/litestar-2.15.1.tar.gz", hash = "sha256:9458ba9c3397c0bc566e649baa5c461145f0c24f4c54451a64ad8adce57cf9de", size = 397383 } wheels = [ { url = "https://files.pythonhosted.org/packages/72/bd/d52d441222b9b7d9efc36c0f119e62483ef0e55a6b0c5aba546ed743cb24/litestar-2.15.1-py3-none-any.whl", hash = "sha256:3791437e31691eadf8079f70180f3186c1db245e093ad3ff21f5cdbfc7e9df3e", size = 571006 }, ] [package.optional-dependencies] cli = [ { name = "jsbeautifier" }, { name = "uvicorn", extra = ["standard"] }, { name = "uvloop", marker = "sys_platform != 'win32'" }, ] [[package]] name = "litestar-htmx" version = "0.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/c9/0c/06ab03ee497d207dd8cb7588d1940be0b373a8ffdc7be3ec6d7e91c17ae2/litestar_htmx-0.4.1.tar.gz", hash = "sha256:ba2537008eb8cc18bfc8bee5cecb280924c7818bb1c066d79eae4b221696ca08", size = 101877 } wheels = [ { url = "https://files.pythonhosted.org/packages/9d/99/3ea64a79a2f4fea5225ccd0128201a3b8eab5e216b8fba8b778b8c462f29/litestar_htmx-0.4.1-py3-none-any.whl", hash = "sha256:ba2a8ff1e210f21980735b9cde13d239a2b7c3627cb4aeb425d66f4a314d1a59", size = 9970 }, ] [[package]] name = "mako" version = "1.3.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] sdist = { url = "https://files.pythonhosted.org/packages/62/4f/ddb1965901bc388958db9f0c991255b2c469349a741ae8c9cd8a562d70a6/mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac", size = 392195 } wheels = [ { url = "https://files.pythonhosted.org/packages/cd/83/de0a49e7de540513f53ab5d2e105321dedeb08a8f5850f0208decf4390ec/Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1", size = 78456 }, ] [[package]] name = "markdown-it-py" version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } wheels = [ { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, ] [[package]] name = "markupsafe" version = "3.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } wheels = [ { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 }, { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 }, { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 }, { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 }, { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 }, { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 }, { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 }, { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 }, { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 }, { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 }, { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344 }, { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389 }, { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607 }, { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728 }, { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826 }, { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843 }, { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219 }, { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946 }, { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063 }, { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506 }, ] [[package]] name = "mdit-py-plugins" version = "0.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, ] sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542 } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316 }, ] [[package]] name = "mdurl" version = "0.1.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, ] [[package]] name = "more-itertools" version = "10.6.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/88/3b/7fa1fe835e2e93fd6d7b52b2f95ae810cf5ba133e1845f726f5a992d62c2/more-itertools-10.6.0.tar.gz", hash = "sha256:2cd7fad1009c31cc9fb6a035108509e6547547a7a738374f10bd49a09eb3ee3b", size = 125009 } wheels = [ { url = "https://files.pythonhosted.org/packages/23/62/0fe302c6d1be1c777cab0616e6302478251dfbf9055ad426f5d0def75c89/more_itertools-10.6.0-py3-none-any.whl", hash = "sha256:6eb054cb4b6db1473f6e15fcc676a08e4732548acd47c708f0e179c2c7c01e89", size = 63038 }, ] [[package]] name = "msgpack" version = "1.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/cb/d0/7555686ae7ff5731205df1012ede15dd9d927f6227ea151e901c7406af4f/msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e", size = 167260 } wheels = [ { url = "https://files.pythonhosted.org/packages/4b/f9/a892a6038c861fa849b11a2bb0502c07bc698ab6ea53359e5771397d883b/msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd", size = 150428 }, { url = "https://files.pythonhosted.org/packages/df/7a/d174cc6a3b6bb85556e6a046d3193294a92f9a8e583cdbd46dc8a1d7e7f4/msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d", size = 84131 }, { url = "https://files.pythonhosted.org/packages/08/52/bf4fbf72f897a23a56b822997a72c16de07d8d56d7bf273242f884055682/msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5", size = 81215 }, { url = "https://files.pythonhosted.org/packages/02/95/dc0044b439b518236aaf012da4677c1b8183ce388411ad1b1e63c32d8979/msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5", size = 371229 }, { url = "https://files.pythonhosted.org/packages/ff/75/09081792db60470bef19d9c2be89f024d366b1e1973c197bb59e6aabc647/msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e", size = 378034 }, { url = "https://files.pythonhosted.org/packages/32/d3/c152e0c55fead87dd948d4b29879b0f14feeeec92ef1fd2ec21b107c3f49/msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b", size = 363070 }, { url = "https://files.pythonhosted.org/packages/d9/2c/82e73506dd55f9e43ac8aa007c9dd088c6f0de2aa19e8f7330e6a65879fc/msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f", size = 359863 }, { url = "https://files.pythonhosted.org/packages/cb/a0/3d093b248837094220e1edc9ec4337de3443b1cfeeb6e0896af8ccc4cc7a/msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68", size = 368166 }, { url = "https://files.pythonhosted.org/packages/e4/13/7646f14f06838b406cf5a6ddbb7e8dc78b4996d891ab3b93c33d1ccc8678/msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b", size = 370105 }, { url = "https://files.pythonhosted.org/packages/67/fa/dbbd2443e4578e165192dabbc6a22c0812cda2649261b1264ff515f19f15/msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044", size = 68513 }, { url = "https://files.pythonhosted.org/packages/24/ce/c2c8fbf0ded750cb63cbcbb61bc1f2dfd69e16dca30a8af8ba80ec182dcd/msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f", size = 74687 }, { url = "https://files.pythonhosted.org/packages/b7/5e/a4c7154ba65d93be91f2f1e55f90e76c5f91ccadc7efc4341e6f04c8647f/msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7", size = 150803 }, { url = "https://files.pythonhosted.org/packages/60/c2/687684164698f1d51c41778c838d854965dd284a4b9d3a44beba9265c931/msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa", size = 84343 }, { url = "https://files.pythonhosted.org/packages/42/ae/d3adea9bb4a1342763556078b5765e666f8fdf242e00f3f6657380920972/msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701", size = 81408 }, { url = "https://files.pythonhosted.org/packages/dc/17/6313325a6ff40ce9c3207293aee3ba50104aed6c2c1559d20d09e5c1ff54/msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6", size = 396096 }, { url = "https://files.pythonhosted.org/packages/a8/a1/ad7b84b91ab5a324e707f4c9761633e357820b011a01e34ce658c1dda7cc/msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59", size = 403671 }, { url = "https://files.pythonhosted.org/packages/bb/0b/fd5b7c0b308bbf1831df0ca04ec76fe2f5bf6319833646b0a4bd5e9dc76d/msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0", size = 387414 }, { url = "https://files.pythonhosted.org/packages/f0/03/ff8233b7c6e9929a1f5da3c7860eccd847e2523ca2de0d8ef4878d354cfa/msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e", size = 383759 }, { url = "https://files.pythonhosted.org/packages/1f/1b/eb82e1fed5a16dddd9bc75f0854b6e2fe86c0259c4353666d7fab37d39f4/msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6", size = 394405 }, { url = "https://files.pythonhosted.org/packages/90/2e/962c6004e373d54ecf33d695fb1402f99b51832631e37c49273cc564ffc5/msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5", size = 396041 }, { url = "https://files.pythonhosted.org/packages/f8/20/6e03342f629474414860c48aeffcc2f7f50ddaf351d95f20c3f1c67399a8/msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88", size = 68538 }, { url = "https://files.pythonhosted.org/packages/aa/c4/5a582fc9a87991a3e6f6800e9bb2f3c82972912235eb9539954f3e9997c7/msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788", size = 74871 }, { url = "https://files.pythonhosted.org/packages/e1/d6/716b7ca1dbde63290d2973d22bbef1b5032ca634c3ff4384a958ec3f093a/msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d", size = 152421 }, { url = "https://files.pythonhosted.org/packages/70/da/5312b067f6773429cec2f8f08b021c06af416bba340c912c2ec778539ed6/msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2", size = 85277 }, { url = "https://files.pythonhosted.org/packages/28/51/da7f3ae4462e8bb98af0d5bdf2707f1b8c65a0d4f496e46b6afb06cbc286/msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420", size = 82222 }, { url = "https://files.pythonhosted.org/packages/33/af/dc95c4b2a49cff17ce47611ca9ba218198806cad7796c0b01d1e332c86bb/msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2", size = 392971 }, { url = "https://files.pythonhosted.org/packages/f1/54/65af8de681fa8255402c80eda2a501ba467921d5a7a028c9c22a2c2eedb5/msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39", size = 401403 }, { url = "https://files.pythonhosted.org/packages/97/8c/e333690777bd33919ab7024269dc3c41c76ef5137b211d776fbb404bfead/msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f", size = 385356 }, { url = "https://files.pythonhosted.org/packages/57/52/406795ba478dc1c890559dd4e89280fa86506608a28ccf3a72fbf45df9f5/msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247", size = 383028 }, { url = "https://files.pythonhosted.org/packages/e7/69/053b6549bf90a3acadcd8232eae03e2fefc87f066a5b9fbb37e2e608859f/msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c", size = 391100 }, { url = "https://files.pythonhosted.org/packages/23/f0/d4101d4da054f04274995ddc4086c2715d9b93111eb9ed49686c0f7ccc8a/msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b", size = 394254 }, { url = "https://files.pythonhosted.org/packages/1c/12/cf07458f35d0d775ff3a2dc5559fa2e1fcd06c46f1ef510e594ebefdca01/msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b", size = 69085 }, { url = "https://files.pythonhosted.org/packages/73/80/2708a4641f7d553a63bc934a3eb7214806b5b39d200133ca7f7afb0a53e8/msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f", size = 75347 }, { url = "https://files.pythonhosted.org/packages/c8/b0/380f5f639543a4ac413e969109978feb1f3c66e931068f91ab6ab0f8be00/msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf", size = 151142 }, { url = "https://files.pythonhosted.org/packages/c8/ee/be57e9702400a6cb2606883d55b05784fada898dfc7fd12608ab1fdb054e/msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330", size = 84523 }, { url = "https://files.pythonhosted.org/packages/7e/3a/2919f63acca3c119565449681ad08a2f84b2171ddfcff1dba6959db2cceb/msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734", size = 81556 }, { url = "https://files.pythonhosted.org/packages/7c/43/a11113d9e5c1498c145a8925768ea2d5fce7cbab15c99cda655aa09947ed/msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e", size = 392105 }, { url = "https://files.pythonhosted.org/packages/2d/7b/2c1d74ca6c94f70a1add74a8393a0138172207dc5de6fc6269483519d048/msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca", size = 399979 }, { url = "https://files.pythonhosted.org/packages/82/8c/cf64ae518c7b8efc763ca1f1348a96f0e37150061e777a8ea5430b413a74/msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915", size = 383816 }, { url = "https://files.pythonhosted.org/packages/69/86/a847ef7a0f5ef3fa94ae20f52a4cacf596a4e4a010197fbcc27744eb9a83/msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d", size = 380973 }, { url = "https://files.pythonhosted.org/packages/aa/90/c74cf6e1126faa93185d3b830ee97246ecc4fe12cf9d2d31318ee4246994/msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434", size = 387435 }, { url = "https://files.pythonhosted.org/packages/7a/40/631c238f1f338eb09f4acb0f34ab5862c4e9d7eda11c1b685471a4c5ea37/msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c", size = 399082 }, { url = "https://files.pythonhosted.org/packages/e9/1b/fa8a952be252a1555ed39f97c06778e3aeb9123aa4cccc0fd2acd0b4e315/msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc", size = 69037 }, { url = "https://files.pythonhosted.org/packages/b6/bc/8bd826dd03e022153bfa1766dcdec4976d6c818865ed54223d71f07862b3/msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f", size = 75140 }, { url = "https://files.pythonhosted.org/packages/f7/3b/544a5c5886042b80e1f4847a4757af3430f60d106d8d43bb7be72c9e9650/msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1", size = 150713 }, { url = "https://files.pythonhosted.org/packages/93/af/d63f25bcccd3d6f06fd518ba4a321f34a4370c67b579ca5c70b4a37721b4/msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48", size = 84277 }, { url = "https://files.pythonhosted.org/packages/92/9b/5c0dfb0009b9f96328664fecb9f8e4e9c8a1ae919e6d53986c1b813cb493/msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c", size = 81357 }, { url = "https://files.pythonhosted.org/packages/d1/7c/3a9ee6ec9fc3e47681ad39b4d344ee04ff20a776b594fba92d88d8b68356/msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468", size = 371256 }, { url = "https://files.pythonhosted.org/packages/f7/0a/8a213cecea7b731c540f25212ba5f9a818f358237ac51a44d448bd753690/msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74", size = 377868 }, { url = "https://files.pythonhosted.org/packages/1b/94/a82b0db0981e9586ed5af77d6cfb343da05d7437dceaae3b35d346498110/msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846", size = 363370 }, { url = "https://files.pythonhosted.org/packages/93/fc/6c7f0dcc1c913e14861e16eaf494c07fc1dde454ec726ff8cebcf348ae53/msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346", size = 358970 }, { url = "https://files.pythonhosted.org/packages/1f/c6/e4a04c0089deace870dabcdef5c9f12798f958e2e81d5012501edaff342f/msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b", size = 366358 }, { url = "https://files.pythonhosted.org/packages/b6/54/7d8317dac590cf16b3e08e3fb74d2081e5af44eb396f0effa13f17777f30/msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8", size = 370336 }, { url = "https://files.pythonhosted.org/packages/dc/6f/a5a1f43b6566831e9630e5bc5d86034a8884386297302be128402555dde1/msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd", size = 68683 }, { url = "https://files.pythonhosted.org/packages/5f/e8/2162621e18dbc36e2bc8492fd0e97b3975f5d89fe0472ae6d5f7fbdd8cf7/msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325", size = 74787 }, ] [[package]] name = "msgspec" version = "0.19.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/cf/9b/95d8ce458462b8b71b8a70fa94563b2498b89933689f3a7b8911edfae3d7/msgspec-0.19.0.tar.gz", hash = "sha256:604037e7cd475345848116e89c553aa9a233259733ab51986ac924ab1b976f8e", size = 216934 } wheels = [ { url = "https://files.pythonhosted.org/packages/13/40/817282b42f58399762267b30deb8ac011d8db373f8da0c212c85fbe62b8f/msgspec-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d8dd848ee7ca7c8153462557655570156c2be94e79acec3561cf379581343259", size = 190019 }, { url = "https://files.pythonhosted.org/packages/92/99/bd7ed738c00f223a8119928661167a89124140792af18af513e6519b0d54/msgspec-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0553bbc77662e5708fe66aa75e7bd3e4b0f209709c48b299afd791d711a93c36", size = 183680 }, { url = "https://files.pythonhosted.org/packages/e5/27/322badde18eb234e36d4a14122b89edd4e2973cdbc3da61ca7edf40a1ccd/msgspec-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe2c4bf29bf4e89790b3117470dea2c20b59932772483082c468b990d45fb947", size = 209334 }, { url = "https://files.pythonhosted.org/packages/c6/65/080509c5774a1592b2779d902a70b5fe008532759927e011f068145a16cb/msgspec-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e87ecfa9795ee5214861eab8326b0e75475c2e68a384002aa135ea2a27d909", size = 211551 }, { url = "https://files.pythonhosted.org/packages/6f/2e/1c23c6b4ca6f4285c30a39def1054e2bee281389e4b681b5e3711bd5a8c9/msgspec-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3c4ec642689da44618f68c90855a10edbc6ac3ff7c1d94395446c65a776e712a", size = 215099 }, { url = "https://files.pythonhosted.org/packages/83/fe/95f9654518879f3359d1e76bc41189113aa9102452170ab7c9a9a4ee52f6/msgspec-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2719647625320b60e2d8af06b35f5b12d4f4d281db30a15a1df22adb2295f633", size = 218211 }, { url = "https://files.pythonhosted.org/packages/79/f6/71ca7e87a1fb34dfe5efea8156c9ef59dd55613aeda2ca562f122cd22012/msgspec-0.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:695b832d0091edd86eeb535cd39e45f3919f48d997685f7ac31acb15e0a2ed90", size = 186174 }, { url = "https://files.pythonhosted.org/packages/24/d4/2ec2567ac30dab072cce3e91fb17803c52f0a37aab6b0c24375d2b20a581/msgspec-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa77046904db764b0462036bc63ef71f02b75b8f72e9c9dd4c447d6da1ed8f8e", size = 187939 }, { url = "https://files.pythonhosted.org/packages/2b/c0/18226e4328897f4f19875cb62bb9259fe47e901eade9d9376ab5f251a929/msgspec-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:047cfa8675eb3bad68722cfe95c60e7afabf84d1bd8938979dd2b92e9e4a9551", size = 182202 }, { url = "https://files.pythonhosted.org/packages/81/25/3a4b24d468203d8af90d1d351b77ea3cffb96b29492855cf83078f16bfe4/msgspec-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e78f46ff39a427e10b4a61614a2777ad69559cc8d603a7c05681f5a595ea98f7", size = 209029 }, { url = "https://files.pythonhosted.org/packages/85/2e/db7e189b57901955239f7689b5dcd6ae9458637a9c66747326726c650523/msgspec-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c7adf191e4bd3be0e9231c3b6dc20cf1199ada2af523885efc2ed218eafd011", size = 210682 }, { url = "https://files.pythonhosted.org/packages/03/97/7c8895c9074a97052d7e4a1cc1230b7b6e2ca2486714eb12c3f08bb9d284/msgspec-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f04cad4385e20be7c7176bb8ae3dca54a08e9756cfc97bcdb4f18560c3042063", size = 214003 }, { url = "https://files.pythonhosted.org/packages/61/61/e892997bcaa289559b4d5869f066a8021b79f4bf8e955f831b095f47a4cd/msgspec-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45c8fb410670b3b7eb884d44a75589377c341ec1392b778311acdbfa55187716", size = 216833 }, { url = "https://files.pythonhosted.org/packages/ce/3d/71b2dffd3a1c743ffe13296ff701ee503feaebc3f04d0e75613b6563c374/msgspec-0.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:70eaef4934b87193a27d802534dc466778ad8d536e296ae2f9334e182ac27b6c", size = 186184 }, { url = "https://files.pythonhosted.org/packages/b2/5f/a70c24f075e3e7af2fae5414c7048b0e11389685b7f717bb55ba282a34a7/msgspec-0.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f98bd8962ad549c27d63845b50af3f53ec468b6318400c9f1adfe8b092d7b62f", size = 190485 }, { url = "https://files.pythonhosted.org/packages/89/b0/1b9763938cfae12acf14b682fcf05c92855974d921a5a985ecc197d1c672/msgspec-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:43bbb237feab761b815ed9df43b266114203f53596f9b6e6f00ebd79d178cdf2", size = 183910 }, { url = "https://files.pythonhosted.org/packages/87/81/0c8c93f0b92c97e326b279795f9c5b956c5a97af28ca0fbb9fd86c83737a/msgspec-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cfc033c02c3e0aec52b71710d7f84cb3ca5eb407ab2ad23d75631153fdb1f12", size = 210633 }, { url = "https://files.pythonhosted.org/packages/d0/ef/c5422ce8af73928d194a6606f8ae36e93a52fd5e8df5abd366903a5ca8da/msgspec-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d911c442571605e17658ca2b416fd8579c5050ac9adc5e00c2cb3126c97f73bc", size = 213594 }, { url = "https://files.pythonhosted.org/packages/19/2b/4137bc2ed45660444842d042be2cf5b18aa06efd2cda107cff18253b9653/msgspec-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:757b501fa57e24896cf40a831442b19a864f56d253679f34f260dcb002524a6c", size = 214053 }, { url = "https://files.pythonhosted.org/packages/9d/e6/8ad51bdc806aac1dc501e8fe43f759f9ed7284043d722b53323ea421c360/msgspec-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f0f65f29b45e2816d8bded36e6b837a4bf5fb60ec4bc3c625fa2c6da4124537", size = 219081 }, { url = "https://files.pythonhosted.org/packages/b1/ef/27dd35a7049c9a4f4211c6cd6a8c9db0a50647546f003a5867827ec45391/msgspec-0.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:067f0de1c33cfa0b6a8206562efdf6be5985b988b53dd244a8e06f993f27c8c0", size = 187467 }, { url = "https://files.pythonhosted.org/packages/3c/cb/2842c312bbe618d8fefc8b9cedce37f773cdc8fa453306546dba2c21fd98/msgspec-0.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f12d30dd6266557aaaf0aa0f9580a9a8fbeadfa83699c487713e355ec5f0bd86", size = 190498 }, { url = "https://files.pythonhosted.org/packages/58/95/c40b01b93465e1a5f3b6c7d91b10fb574818163740cc3acbe722d1e0e7e4/msgspec-0.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82b2c42c1b9ebc89e822e7e13bbe9d17ede0c23c187469fdd9505afd5a481314", size = 183950 }, { url = "https://files.pythonhosted.org/packages/e8/f0/5b764e066ce9aba4b70d1db8b087ea66098c7c27d59b9dd8a3532774d48f/msgspec-0.19.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19746b50be214a54239aab822964f2ac81e38b0055cca94808359d779338c10e", size = 210647 }, { url = "https://files.pythonhosted.org/packages/9d/87/bc14f49bc95c4cb0dd0a8c56028a67c014ee7e6818ccdce74a4862af259b/msgspec-0.19.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60ef4bdb0ec8e4ad62e5a1f95230c08efb1f64f32e6e8dd2ced685bcc73858b5", size = 213563 }, { url = "https://files.pythonhosted.org/packages/53/2f/2b1c2b056894fbaa975f68f81e3014bb447516a8b010f1bed3fb0e016ed7/msgspec-0.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac7f7c377c122b649f7545810c6cd1b47586e3aa3059126ce3516ac7ccc6a6a9", size = 213996 }, { url = "https://files.pythonhosted.org/packages/aa/5a/4cd408d90d1417e8d2ce6a22b98a6853c1b4d7cb7669153e4424d60087f6/msgspec-0.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5bc1472223a643f5ffb5bf46ccdede7f9795078194f14edd69e3aab7020d327", size = 219087 }, { url = "https://files.pythonhosted.org/packages/23/d8/f15b40611c2d5753d1abb0ca0da0c75348daf1252220e5dda2867bd81062/msgspec-0.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:317050bc0f7739cb30d257ff09152ca309bf5a369854bbf1e57dffc310c1f20f", size = 187432 }, { url = "https://files.pythonhosted.org/packages/ea/d0/323f867eaec1f2236ba30adf613777b1c97a7e8698e2e881656b21871fa4/msgspec-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15c1e86fff77184c20a2932cd9742bf33fe23125fa3fcf332df9ad2f7d483044", size = 189926 }, { url = "https://files.pythonhosted.org/packages/a8/37/c3e1b39bdae90a7258d77959f5f5e36ad44b40e2be91cff83eea33c54d43/msgspec-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b5541b2b3294e5ffabe31a09d604e23a88533ace36ac288fa32a420aa38d229", size = 183873 }, { url = "https://files.pythonhosted.org/packages/cb/a2/48f2c15c7644668e51f4dce99d5f709bd55314e47acb02e90682f5880f35/msgspec-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f5c043ace7962ef188746e83b99faaa9e3e699ab857ca3f367b309c8e2c6b12", size = 209272 }, { url = "https://files.pythonhosted.org/packages/25/3c/aa339cf08b990c3f07e67b229a3a8aa31bf129ed974b35e5daa0df7d9d56/msgspec-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca06aa08e39bf57e39a258e1996474f84d0dd8130d486c00bec26d797b8c5446", size = 211396 }, { url = "https://files.pythonhosted.org/packages/c7/00/c7fb9d524327c558b2803973cc3f988c5100a1708879970a9e377bdf6f4f/msgspec-0.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e695dad6897896e9384cf5e2687d9ae9feaef50e802f93602d35458e20d1fb19", size = 215002 }, { url = "https://files.pythonhosted.org/packages/3f/bf/d9f9fff026c1248cde84a5ce62b3742e8a63a3c4e811f99f00c8babf7615/msgspec-0.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3be5c02e1fee57b54130316a08fe40cca53af92999a302a6054cd451700ea7db", size = 218132 }, { url = "https://files.pythonhosted.org/packages/00/03/b92011210f79794958167a3a3ea64a71135d9a2034cfb7597b545a42606d/msgspec-0.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:0684573a821be3c749912acf5848cce78af4298345cb2d7a8b8948a0a5a27cfe", size = 186301 }, ] [[package]] name = "multidict" version = "6.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/82/4a/7874ca44a1c9b23796c767dd94159f6c17e31c0e7d090552a1c623247d82/multidict-6.2.0.tar.gz", hash = "sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8", size = 71066 } wheels = [ { url = "https://files.pythonhosted.org/packages/2d/ca/3ae4d9c9ba78e7bcb63e3f12974b8fa16b9a20de44e9785f5d291ccb823c/multidict-6.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1", size = 49238 }, { url = "https://files.pythonhosted.org/packages/25/a4/55e595d2df586e442c85b2610542d1e14def4c6f641761125d35fb38f87c/multidict-6.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2", size = 29748 }, { url = "https://files.pythonhosted.org/packages/35/6f/09bc361a34bbf953e9897f69823f9c4b46aec0aaed6ec94ce63093ede317/multidict-6.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e", size = 30026 }, { url = "https://files.pythonhosted.org/packages/b6/c7/5b51816f7c38049fc50786f46e63c009e6fecd1953fbbafa8bfe4e2eb39d/multidict-6.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a", size = 132393 }, { url = "https://files.pythonhosted.org/packages/1a/21/c51aca665afa93b397d2c47369f6c267193977611a55a7c9d8683dc095bc/multidict-6.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de", size = 139237 }, { url = "https://files.pythonhosted.org/packages/2e/9b/a7b91f8ed63314e7a3c276b4ca90ae5d0267a584ca2e42106baa728622d6/multidict-6.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d", size = 134920 }, { url = "https://files.pythonhosted.org/packages/c8/84/4b590a121b1009fe79d1ae5875b4aa9339d37d23e368dd3bcf5e36d27452/multidict-6.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3", size = 129764 }, { url = "https://files.pythonhosted.org/packages/b8/de/831be406b5ab0dc0d25430ddf597c6ce1a2e23a4991363f1ca48f16fb817/multidict-6.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a", size = 122121 }, { url = "https://files.pythonhosted.org/packages/fa/2f/892334f4d3efc7cd11e3a64dc922a85611627380ee2de3d0627ac159a975/multidict-6.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a", size = 135640 }, { url = "https://files.pythonhosted.org/packages/6c/53/bf91c5fdede9406247dcbceaa9d7e7fa08e4d0e27fa3c76a0dab126bc6b2/multidict-6.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49", size = 129655 }, { url = "https://files.pythonhosted.org/packages/d4/7a/f98e1c5d14c1bbbb83025a69da9a37344f7556c09fef39979cf62b464d60/multidict-6.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191", size = 140691 }, { url = "https://files.pythonhosted.org/packages/dd/c9/af0ab78b53d5b769bc1fa751e53cc7356cef422bd1cf38ed653985a46ddf/multidict-6.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb", size = 135254 }, { url = "https://files.pythonhosted.org/packages/c9/53/28cc971b17e25487a089bcf720fe284478f264a6fc619427ddf7145fcb2b/multidict-6.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a", size = 133620 }, { url = "https://files.pythonhosted.org/packages/b6/9a/d7637fbe1d5928b9f6a33ce36c2ff37e0aab9aa22f5fc9552fd75fe7f364/multidict-6.2.0-cp310-cp310-win32.whl", hash = "sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460", size = 27044 }, { url = "https://files.pythonhosted.org/packages/4e/11/04758cc18a51227dbb350a8a25c7db0620d63fb23db5b8d1f87762f05cbe/multidict-6.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1", size = 29149 }, { url = "https://files.pythonhosted.org/packages/97/aa/879cf5581bd56c19f1bd2682ee4ecfd4085a404668d4ee5138b0a08eaf2a/multidict-6.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46", size = 49125 }, { url = "https://files.pythonhosted.org/packages/9e/d8/e6d47c166c13c48be8efb9720afe0f5cdc4da4687547192cbc3c03903041/multidict-6.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932", size = 29689 }, { url = "https://files.pythonhosted.org/packages/a4/20/f3f0a2ca142c81100b6d4cbf79505961b54181d66157615bba3955304442/multidict-6.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf", size = 29975 }, { url = "https://files.pythonhosted.org/packages/ab/2d/1724972c7aeb7aa1916a3276cb32f9c39e186456ee7ed621504e7a758322/multidict-6.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf", size = 135688 }, { url = "https://files.pythonhosted.org/packages/1a/08/ea54e7e245aaf0bb1c758578e5afba394ffccb8bd80d229a499b9b83f2b1/multidict-6.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc", size = 142703 }, { url = "https://files.pythonhosted.org/packages/97/76/960dee0424f38c71eda54101ee1ca7bb47c5250ed02f7b3e8e50b1ce0603/multidict-6.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1", size = 138559 }, { url = "https://files.pythonhosted.org/packages/d0/35/969fd792e2e72801d80307f0a14f5b19c066d4a51d34dded22c71401527d/multidict-6.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081", size = 133312 }, { url = "https://files.pythonhosted.org/packages/a4/b8/f96657a2f744d577cfda5a7edf9da04a731b80d3239eafbfe7ca4d944695/multidict-6.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98", size = 125652 }, { url = "https://files.pythonhosted.org/packages/35/9d/97696d052297d8e2e08195a25c7aae873a6186c147b7635f979edbe3acde/multidict-6.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633", size = 139015 }, { url = "https://files.pythonhosted.org/packages/31/a0/5c106e28d42f20288c10049bc6647364287ba049dc00d6ae4f1584eb1bd1/multidict-6.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e", size = 132437 }, { url = "https://files.pythonhosted.org/packages/55/57/d5c60c075fef73422ae3b8f914221485b9ff15000b2db657c03bd190aee0/multidict-6.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d", size = 144037 }, { url = "https://files.pythonhosted.org/packages/eb/56/a23f599c697a455bf65ecb0f69a5b052d6442c567d380ed423f816246824/multidict-6.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4", size = 138535 }, { url = "https://files.pythonhosted.org/packages/34/3a/a06ff9b5899090f4bbdbf09e237964c76cecfe75d2aa921e801356314017/multidict-6.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2", size = 136885 }, { url = "https://files.pythonhosted.org/packages/d6/28/489c0eca1df3800cb5d0a66278d5dd2a4deae747a41d1cf553e6a4c0a984/multidict-6.2.0-cp311-cp311-win32.whl", hash = "sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d", size = 27044 }, { url = "https://files.pythonhosted.org/packages/d0/b5/c7cd5ba9581add40bc743980f82426b90d9f42db0b56502011f1b3c929df/multidict-6.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86", size = 29145 }, { url = "https://files.pythonhosted.org/packages/a4/e2/0153a8db878aef9b2397be81e62cbc3b32ca9b94e0f700b103027db9d506/multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b", size = 49204 }, { url = "https://files.pythonhosted.org/packages/bb/9d/5ccb3224a976d1286f360bb4e89e67b7cdfb87336257fc99be3c17f565d7/multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4", size = 29807 }, { url = "https://files.pythonhosted.org/packages/62/32/ef20037f51b84b074a89bab5af46d4565381c3f825fc7cbfc19c1ee156be/multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44", size = 30000 }, { url = "https://files.pythonhosted.org/packages/97/81/b0a7560bfc3ec72606232cd7e60159e09b9cf29e66014d770c1315868fa2/multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd", size = 131820 }, { url = "https://files.pythonhosted.org/packages/49/3b/768bfc0e41179fbccd3a22925329a11755b7fdd53bec66dbf6b8772f0bce/multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e", size = 136272 }, { url = "https://files.pythonhosted.org/packages/71/ac/fd2be3fe98ff54e7739448f771ba730d42036de0870737db9ae34bb8efe9/multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c", size = 135233 }, { url = "https://files.pythonhosted.org/packages/93/76/1657047da771315911a927b364a32dafce4135b79b64208ce4ac69525c56/multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87", size = 132861 }, { url = "https://files.pythonhosted.org/packages/19/a5/9f07ffb9bf68b8aaa406c2abee27ad87e8b62a60551587b8e59ee91aea84/multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29", size = 122166 }, { url = "https://files.pythonhosted.org/packages/95/23/b5ce3318d9d6c8f105c3679510f9d7202980545aad8eb4426313bd8da3ee/multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd", size = 136052 }, { url = "https://files.pythonhosted.org/packages/ce/5c/02cffec58ffe120873dce520af593415b91cc324be0345f534ad3637da4e/multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8", size = 130094 }, { url = "https://files.pythonhosted.org/packages/49/f3/3b19a83f4ebf53a3a2a0435f3e447aa227b242ba3fd96a92404b31fb3543/multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df", size = 140962 }, { url = "https://files.pythonhosted.org/packages/cc/1a/c916b54fb53168c24cb6a3a0795fd99d0a59a0ea93fa9f6edeff5565cb20/multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d", size = 138082 }, { url = "https://files.pythonhosted.org/packages/ef/1a/dcb7fb18f64b3727c61f432c1e1a0d52b3924016124e4bbc8a7d2e4fa57b/multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b", size = 136019 }, { url = "https://files.pythonhosted.org/packages/fb/02/7695485375106f5c542574f70e1968c391f86fa3efc9f1fd76aac0af7237/multidict-6.2.0-cp312-cp312-win32.whl", hash = "sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626", size = 26676 }, { url = "https://files.pythonhosted.org/packages/3c/f5/f147000fe1f4078160157b15b0790fff0513646b0f9b7404bf34007a9b44/multidict-6.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c", size = 28899 }, { url = "https://files.pythonhosted.org/packages/a4/6c/5df5590b1f9a821154589df62ceae247537b01ab26b0aa85997c35ca3d9e/multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80", size = 49151 }, { url = "https://files.pythonhosted.org/packages/d5/ca/c917fbf1be989cd7ea9caa6f87e9c33844ba8d5fbb29cd515d4d2833b84c/multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16", size = 29803 }, { url = "https://files.pythonhosted.org/packages/22/19/d97086fc96f73acf36d4dbe65c2c4175911969df49c4e94ef082be59d94e/multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e", size = 29947 }, { url = "https://files.pythonhosted.org/packages/e3/3b/203476b6e915c3f51616d5f87230c556e2f24b168c14818a3d8dae242b1b/multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817", size = 130369 }, { url = "https://files.pythonhosted.org/packages/c6/4f/67470007cf03b2bb6df8ae6d716a8eeb0a7d19e0c8dba4e53fa338883bca/multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc", size = 135231 }, { url = "https://files.pythonhosted.org/packages/6d/f5/7a5ce64dc9a3fecc7d67d0b5cb9c262c67e0b660639e5742c13af63fd80f/multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1", size = 133634 }, { url = "https://files.pythonhosted.org/packages/05/93/ab2931907e318c0437a4cd156c9cfff317ffb33d99ebbfe2d64200a870f7/multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844", size = 131349 }, { url = "https://files.pythonhosted.org/packages/54/aa/ab8eda83a6a85f5b4bb0b1c28e62b18129b14519ef2e0d4cfd5f360da73c/multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48", size = 120861 }, { url = "https://files.pythonhosted.org/packages/15/2f/7d08ea7c5d9f45786893b4848fad59ec8ea567367d4234691a721e4049a1/multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0", size = 134611 }, { url = "https://files.pythonhosted.org/packages/8b/07/387047bb1eac563981d397a7f85c75b306df1fff3c20b90da5a6cf6e487e/multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f", size = 128955 }, { url = "https://files.pythonhosted.org/packages/8d/6e/7ae18f764a5282c2d682f1c90c6b2a0f6490327730170139a7a63bf3bb20/multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de", size = 139759 }, { url = "https://files.pythonhosted.org/packages/b6/f4/c1b3b087b9379b9e56229bcf6570b9a963975c205a5811ac717284890598/multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02", size = 136426 }, { url = "https://files.pythonhosted.org/packages/a2/0e/ef7b39b161ffd40f9e25dd62e59644b2ccaa814c64e9573f9bc721578419/multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d", size = 134648 }, { url = "https://files.pythonhosted.org/packages/37/5c/7905acd0ca411c97bcae62ab167d9922f0c5a1d316b6d3af875d4bda3551/multidict-6.2.0-cp313-cp313-win32.whl", hash = "sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e", size = 26680 }, { url = "https://files.pythonhosted.org/packages/89/36/96b071d1dad6ac44fe517e4250329e753787bb7a63967ef44bb9b3a659f6/multidict-6.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2", size = 28942 }, { url = "https://files.pythonhosted.org/packages/f5/05/d686cd2a12d648ecd434675ee8daa2901a80f477817e89ab3b160de5b398/multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7", size = 50807 }, { url = "https://files.pythonhosted.org/packages/4c/1f/c7db5aac8fea129fa4c5a119e3d279da48d769138ae9624d1234aa01a06f/multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b", size = 30474 }, { url = "https://files.pythonhosted.org/packages/e5/f1/1fb27514f4d73cea165429dcb7d90cdc4a45445865832caa0c50dd545420/multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e", size = 30841 }, { url = "https://files.pythonhosted.org/packages/d6/6b/9487169e549a23c8958edbb332afaf1ab55d61f0c03cb758ee07ff8f74fb/multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025", size = 148658 }, { url = "https://files.pythonhosted.org/packages/d7/22/79ebb2e4f70857c94999ce195db76886ae287b1b6102da73df24dcad4903/multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd", size = 151988 }, { url = "https://files.pythonhosted.org/packages/49/5d/63b17f3c1a2861587d26705923a94eb6b2600e5222d6b0d513bce5a78720/multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7", size = 148432 }, { url = "https://files.pythonhosted.org/packages/a3/22/55204eec45c4280fa431c11494ad64d6da0dc89af76282fc6467432360a0/multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af", size = 143161 }, { url = "https://files.pythonhosted.org/packages/97/e6/202b2cf5af161228767acab8bc49e73a91f4a7de088c9c71f3c02950a030/multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331", size = 136820 }, { url = "https://files.pythonhosted.org/packages/7d/16/dbedae0e94c7edc48fddef0c39483f2313205d9bc566fd7f11777b168616/multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c", size = 150875 }, { url = "https://files.pythonhosted.org/packages/f3/04/38ccf25d4bf8beef76a22bad7d9833fd088b4594c9765fe6fede39aa6c89/multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b", size = 142050 }, { url = "https://files.pythonhosted.org/packages/9e/89/4f6b43386e7b79a4aad560d751981a0a282a1943c312ac72f940d7cf8f9f/multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151", size = 154117 }, { url = "https://files.pythonhosted.org/packages/24/e3/3dde5b193f86d30ad6400bd50e116b0df1da3f0c7d419661e3bd79e5ad86/multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019", size = 149408 }, { url = "https://files.pythonhosted.org/packages/df/b2/ec1e27e8e3da12fcc9053e1eae2f6b50faa8708064d83ea25aa7fb77ffd2/multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547", size = 145767 }, { url = "https://files.pythonhosted.org/packages/3a/8e/c07a648a9d592fa9f3a19d1c7e1c7738ba95aff90db967a5a09cff1e1f37/multidict-6.2.0-cp313-cp313t-win32.whl", hash = "sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc", size = 28950 }, { url = "https://files.pythonhosted.org/packages/dc/a9/bebb5485b94d7c09831638a4df9a1a924c32431a750723f0bf39cd16a787/multidict-6.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44", size = 32001 }, { url = "https://files.pythonhosted.org/packages/ec/a3/8c8eeac0e6080ffe89f53f239cab98b576dd584960f78add84803fbafda8/multidict-6.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a", size = 48972 }, { url = "https://files.pythonhosted.org/packages/05/1e/0ad3ab9ef09b73f78af3f509e27f668814beab05d7fb838134b4f140b6a7/multidict-6.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac", size = 29610 }, { url = "https://files.pythonhosted.org/packages/76/1f/ec8a90383d2ce4fdb14ba3f752b280096a6c2e1353d3fcd309d9af47c1b8/multidict-6.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88", size = 29983 }, { url = "https://files.pythonhosted.org/packages/f0/9b/851be91f031007549fe9778926acbab3322081bba7c944cb588eb4765593/multidict-6.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133", size = 131833 }, { url = "https://files.pythonhosted.org/packages/81/e4/4239b907135687b754cf5fbe7dda9015048c36b2bc9910a06fa69ce9e23a/multidict-6.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656", size = 138790 }, { url = "https://files.pythonhosted.org/packages/96/5d/24dda76145c688c3d1b2241a01c07d608feb999e70fc92db246ba5380b8d/multidict-6.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349", size = 134529 }, { url = "https://files.pythonhosted.org/packages/c6/67/12bfd2a023bdb3c3d0ad181c83d79688fa34b4d60a230d4d55ad78fe2595/multidict-6.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f", size = 129330 }, { url = "https://files.pythonhosted.org/packages/fa/a0/c02509b31ff325b49a07d5d0e21f066dedfd3f5317936e193d23677ae375/multidict-6.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872", size = 121826 }, { url = "https://files.pythonhosted.org/packages/85/e7/d9857dd6264574129a402cc4bdecd42a091c44eba2815c6b4f7ca20ca3cc/multidict-6.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2", size = 135088 }, { url = "https://files.pythonhosted.org/packages/c4/c2/1b1f9ba409dcbe38f4f83a9de28946e8cbc70420813bf9ecec19ea98561a/multidict-6.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27", size = 129212 }, { url = "https://files.pythonhosted.org/packages/89/c2/2f6d1cb16e8102da94cfe8871b17d7455d2aa3c70e16a1789f1b4cebe956/multidict-6.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90", size = 140263 }, { url = "https://files.pythonhosted.org/packages/36/17/65288873b0663c885ee1477895d3187142fdc7e9549f68b9930f2b983342/multidict-6.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf", size = 134892 }, { url = "https://files.pythonhosted.org/packages/92/3d/c59cfc4fa26bfe170f0e6c4fcab31a1fbc09960975a4423a6e3e26465815/multidict-6.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2", size = 133227 }, { url = "https://files.pythonhosted.org/packages/75/da/a38874073671c55853ed74ef114f3983f5a443fae546a99ed1721cef854a/multidict-6.2.0-cp39-cp39-win32.whl", hash = "sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d", size = 27025 }, { url = "https://files.pythonhosted.org/packages/4f/f0/e16ba06acf9aed61fcf152a19c8c55739e74744d31dd49319e5cab7404d4/multidict-6.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3", size = 29158 }, { url = "https://files.pythonhosted.org/packages/9c/fd/b247aec6add5601956d440488b7f23151d8343747e82c038af37b28d6098/multidict-6.2.0-py3-none-any.whl", hash = "sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530", size = 10266 }, ] [[package]] name = "multipart" version = "1.2.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/df/91/6c93b6a95e6a99ef929a99d019fbf5b5f7fd3368389a0b1ec7ce0a23565b/multipart-1.2.1.tar.gz", hash = "sha256:829b909b67bc1ad1c6d4488fcdc6391c2847842b08323addf5200db88dbe9480", size = 36507 } wheels = [ { url = "https://files.pythonhosted.org/packages/cc/d1/3598d1e73385baaab427392856f915487db7aa10abadd436f8f2d3e3b0f9/multipart-1.2.1-py3-none-any.whl", hash = "sha256:c03dc203bc2e67f6b46a599467ae0d87cf71d7530504b2c1ff4a9ea21d8b8c8c", size = 13730 }, ] [[package]] name = "mypy" version = "1.15.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 } wheels = [ { url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13", size = 10738433 }, { url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559", size = 9861472 }, { url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b", size = 11611424 }, { url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3", size = 12365450 }, { url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b", size = 12551765 }, { url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828", size = 9274701 }, { url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f", size = 10662338 }, { url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5", size = 9787540 }, { url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e", size = 11538051 }, { url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c", size = 12286751 }, { url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f", size = 12421783 }, { url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f", size = 9265618 }, { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 }, { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 }, { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 }, { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 }, { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 }, { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 }, { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 }, { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 }, { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 }, { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 }, { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 }, { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 }, { url = "https://files.pythonhosted.org/packages/5a/fa/79cf41a55b682794abe71372151dbbf856e3008f6767057229e6649d294a/mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078", size = 10737129 }, { url = "https://files.pythonhosted.org/packages/d3/33/dd8feb2597d648de29e3da0a8bf4e1afbda472964d2a4a0052203a6f3594/mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba", size = 9856335 }, { url = "https://files.pythonhosted.org/packages/e4/b5/74508959c1b06b96674b364ffeb7ae5802646b32929b7701fc6b18447592/mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5", size = 11611935 }, { url = "https://files.pythonhosted.org/packages/6c/53/da61b9d9973efcd6507183fdad96606996191657fe79701b2c818714d573/mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b", size = 12365827 }, { url = "https://files.pythonhosted.org/packages/c1/72/965bd9ee89540c79a25778cc080c7e6ef40aa1eeac4d52cec7eae6eb5228/mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2", size = 12541924 }, { url = "https://files.pythonhosted.org/packages/46/d0/f41645c2eb263e6c77ada7d76f894c580c9ddb20d77f0c24d34273a4dab2/mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980", size = 9271176 }, { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 }, ] [[package]] name = "mypy-extensions" version = "1.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } wheels = [ { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, ] [[package]] name = "myst-parser" version = "3.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10'", ] dependencies = [ { name = "docutils", marker = "python_full_version < '3.10'" }, { name = "jinja2", marker = "python_full_version < '3.10'" }, { name = "markdown-it-py", marker = "python_full_version < '3.10'" }, { name = "mdit-py-plugins", marker = "python_full_version < '3.10'" }, { name = "pyyaml", marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/49/64/e2f13dac02f599980798c01156393b781aec983b52a6e4057ee58f07c43a/myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87", size = 92392 } wheels = [ { url = "https://files.pythonhosted.org/packages/e2/de/21aa8394f16add8f7427f0a1326ccd2b3a2a8a3245c9252bc5ac034c6155/myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1", size = 83163 }, ] [[package]] name = "myst-parser" version = "4.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] dependencies = [ { name = "docutils", marker = "python_full_version >= '3.10'" }, { name = "jinja2", marker = "python_full_version >= '3.10'" }, { name = "markdown-it-py", marker = "python_full_version >= '3.10'" }, { name = "mdit-py-plugins", marker = "python_full_version >= '3.10'" }, { name = "pyyaml", marker = "python_full_version >= '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985 } wheels = [ { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579 }, ] [[package]] name = "natsort" version = "8.4.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e2/a9/a0c57aee75f77794adaf35322f8b6404cbd0f89ad45c87197a937764b7d0/natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581", size = 76575 } wheels = [ { url = "https://files.pythonhosted.org/packages/ef/82/7a9d0550484a62c6da82858ee9419f3dd1ccc9aa1c26a1e43da3ecd20b0d/natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c", size = 38268 }, ] [[package]] name = "nodeenv" version = "1.9.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, ] [[package]] name = "oracledb" version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] sdist = { url = "https://files.pythonhosted.org/packages/bf/39/712f797b75705c21148fa1d98651f63c2e5cc6876e509a0a9e2f5b406572/oracledb-3.0.0.tar.gz", hash = "sha256:64dc86ee5c032febc556798b06e7b000ef6828bb0252084f6addacad3363db85", size = 840431 } wheels = [ { url = "https://files.pythonhosted.org/packages/76/0f/d480889c09de20f9588829b88e6ce482de9e6131de368008c5754fc4fc75/oracledb-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d4fbe19765c489176558bfa2c5145a4e6e960a80b0a451b3f5af368a835623cd", size = 4270186 }, { url = "https://files.pythonhosted.org/packages/b1/25/a7a172d1233ed5d8425b6689411c09dfb701b004152140fe943f0b9daefa/oracledb-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dec5489116cda6c742f75263bc04333575412775a39a6fea22a0b37f6f9e7021", size = 2655606 }, { url = "https://files.pythonhosted.org/packages/fd/60/b7e6997ed896569e7df57d1b670ca14e6252f472b4b1488d6edb650f86c2/oracledb-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ca6fd65ed3dbc78ce25930b179e97754779eb5cb35eeb97dff8b5fc4db75746", size = 2862555 }, { url = "https://files.pythonhosted.org/packages/d5/61/08fac4c848d3b6a6f8b95df3a9a3739f180c187ce6a43507e854e581b910/oracledb-3.0.0-cp310-cp310-win32.whl", hash = "sha256:04e3f935aca72efa8108b2ae2d98e0f65b59b00edfe2f83bc9b0261a68cd5205", size = 1750099 }, { url = "https://files.pythonhosted.org/packages/8e/47/18cd87bb525d77b44d2509b78781cb1c1807bf5478e8098cd416d9a3bc3a/oracledb-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:19903b34cee9c434df32b3e7b04ca0b1c7b81e50d1ea172d70eae59716787bb3", size = 2091866 }, { url = "https://files.pythonhosted.org/packages/fa/bf/d872c4b3fc15cd3261fe0ea72b21d181700c92dbc050160e161654987062/oracledb-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:52daa9141c63dfa75c07d445e9bb7f69f43bfb3c5a173ecc48c798fe50288d26", size = 4312963 }, { url = "https://files.pythonhosted.org/packages/b1/ea/01ee29e76a610a53bb34fdc1030f04b7669c3f80b25f661e07850fc6160e/oracledb-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af98941789df4c6aaaf4338f5b5f6b7f2c8c3fe6f8d6a9382f177f350868747a", size = 2661536 }, { url = "https://files.pythonhosted.org/packages/3d/8e/ad380e34a46819224423b4773e58c350bc6269643c8969604097ced8c3bc/oracledb-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9812bb48865aaec35d73af54cd1746679f2a8a13cbd1412ab371aba2e39b3943", size = 2867461 }, { url = "https://files.pythonhosted.org/packages/96/09/ecc4384a27fd6e1e4de824ae9c160e4ad3aaebdaade5b4bdcf56a4d1ff63/oracledb-3.0.0-cp311-cp311-win32.whl", hash = "sha256:6c27fe0de64f2652e949eb05b3baa94df9b981a4a45fa7f8a991e1afb450c8e2", size = 1752046 }, { url = "https://files.pythonhosted.org/packages/62/e8/f34bde24050c6e55eeba46b23b2291f2dd7fd272fa8b322dcbe71be55778/oracledb-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f922709672002f0b40997456f03a95f03e5712a86c61159951c5ce09334325e0", size = 2101210 }, { url = "https://files.pythonhosted.org/packages/6f/fc/24590c3a3d41e58494bd3c3b447a62835138e5f9b243d9f8da0cfb5da8dc/oracledb-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:acd0e747227dea01bebe627b07e958bf36588a337539f24db629dc3431d3f7eb", size = 4351993 }, { url = "https://files.pythonhosted.org/packages/b7/b6/1f3b0b7bb94d53e8857d77b2e8dbdf6da091dd7e377523e24b79dac4fd71/oracledb-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8b402f77c22af031cd0051aea2472ecd0635c1b452998f511aa08b7350c90a4", size = 2532640 }, { url = "https://files.pythonhosted.org/packages/72/1a/1815f6c086ab49c00921cf155ff5eede5267fb29fcec37cb246339a5ce4d/oracledb-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:378a27782e9a37918bd07a5a1427a77cb6f777d0a5a8eac9c070d786f50120ef", size = 2765949 }, { url = "https://files.pythonhosted.org/packages/33/8d/208900f8d372909792ee70b2daad3f7361181e55f2217c45ed9dff658b54/oracledb-3.0.0-cp312-cp312-win32.whl", hash = "sha256:54a28c2cb08316a527cd1467740a63771cc1c1164697c932aa834c0967dc4efc", size = 1709373 }, { url = "https://files.pythonhosted.org/packages/0c/5e/c21754f19c896102793c3afec2277e2180aa7d505e4d7fcca24b52d14e4f/oracledb-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8289bad6d103ce42b140e40576cf0c81633e344d56e2d738b539341eacf65624", size = 2056452 }, { url = "https://files.pythonhosted.org/packages/d9/8b/1db854789d6583b284961ddb290dc5d6f3d8259911e5ad7dc9b7dc9b6fd7/oracledb-3.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1dcec2916441492e6d6f03be52f06ee9f4814dece672be49f972219ff18fe2c1", size = 4311779 }, { url = "https://files.pythonhosted.org/packages/1e/df/71eb3e5db8c2baa3247b5a9687aa8efdc8fc553ab62351078407fd101892/oracledb-3.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e5963d72f2bf6f6707649cd490c26fc8cc4314e84dd74a1313ecf1c70c93531", size = 2517621 }, { url = "https://files.pythonhosted.org/packages/ee/48/10d6f519e718d0db7894615783d70e475c0285ac99e66f5800c7165e34ea/oracledb-3.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5da57c328a994985bae5936af7974a5c505cf93178d2e3882d96f3ec8363682b", size = 2746897 }, { url = "https://files.pythonhosted.org/packages/cc/0a/dd53849391547858467a76d4d51f498f7a8f54bdfe97d4b0fbac9957cdd9/oracledb-3.0.0-cp313-cp313-win32.whl", hash = "sha256:2358ffacf5209b6d9c5aaaf34d9754d491b20a141dc305fe21b6cb1ff23fc12a", size = 1704828 }, { url = "https://files.pythonhosted.org/packages/68/0e/cd88200ded018fd88f5ef168605126e4ac7c5f8ccf925c6cb18966e23f05/oracledb-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:f6b66fddb9ae440b662ae9b8f1e0f618caaf2c3e44a46bbd1521c3ca11f40b0f", size = 2053858 }, { url = "https://files.pythonhosted.org/packages/3a/11/01bacfef5078b39aef4576c1070b23d62f2dfbd88321317a0324d65e4712/oracledb-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b5df2724d262e3f459c42d03a3df58fcfb9c5e9a96a18739048ecd01aadc94e5", size = 4274085 }, { url = "https://files.pythonhosted.org/packages/6c/77/851fc8d18bdb80d22ed0fadf2133f53441eb7614eddae47ace78fc67e11f/oracledb-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d5087062e55cca4e7ee699e05f53b56a08386d16c7160637a087475a1132567a", size = 2660923 }, { url = "https://files.pythonhosted.org/packages/e2/fa/f9b2459e5143477268f950bae90910a65ef46cf62629305e7c3252a3f9a5/oracledb-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6c8bc126a876633ecfcff8e568b3c7711ffe80935eaa5e97d6aed189131d4ad", size = 2861218 }, { url = "https://files.pythonhosted.org/packages/9e/79/2bc37b5f5806f3dace56c3c0b07a457f1ae215c346da4cfba28c84660075/oracledb-3.0.0-cp39-cp39-win32.whl", hash = "sha256:2526ffd052fe2d916e04328d5f1db25d8fd0aea3f2a9f4c60bd578e3d0c76f93", size = 1752063 }, { url = "https://files.pythonhosted.org/packages/52/af/792828c3b01a0b9cf7e840bd6a10ceed360b4198056aae85383f85fadadf/oracledb-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3b56e74a92f7e8961c5a10103cb97cbcdeac778230db9c2ec2546fe20e3871ca", size = 2094349 }, ] [[package]] name = "orjson" version = "3.10.15" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ae/f9/5dea21763eeff8c1590076918a446ea3d6140743e0e36f58f369928ed0f4/orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e", size = 5282482 } wheels = [ { url = "https://files.pythonhosted.org/packages/52/09/e5ff18ad009e6f97eb7edc5f67ef98b3ce0c189da9c3eaca1f9587cd4c61/orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04", size = 249532 }, { url = "https://files.pythonhosted.org/packages/bd/b8/a75883301fe332bd433d9b0ded7d2bb706ccac679602c3516984f8814fb5/orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8", size = 125229 }, { url = "https://files.pythonhosted.org/packages/83/4b/22f053e7a364cc9c685be203b1e40fc5f2b3f164a9b2284547504eec682e/orjson-3.10.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8", size = 150148 }, { url = "https://files.pythonhosted.org/packages/63/64/1b54fc75ca328b57dd810541a4035fe48c12a161d466e3cf5b11a8c25649/orjson-3.10.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814", size = 139748 }, { url = "https://files.pythonhosted.org/packages/5e/ff/ff0c5da781807bb0a5acd789d9a7fbcb57f7b0c6e1916595da1f5ce69f3c/orjson-3.10.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164", size = 154559 }, { url = "https://files.pythonhosted.org/packages/4e/9a/11e2974383384ace8495810d4a2ebef5f55aacfc97b333b65e789c9d362d/orjson-3.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf", size = 130349 }, { url = "https://files.pythonhosted.org/packages/2d/c4/dd9583aea6aefee1b64d3aed13f51d2aadb014028bc929fe52936ec5091f/orjson-3.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061", size = 138514 }, { url = "https://files.pythonhosted.org/packages/53/3e/dcf1729230654f5c5594fc752de1f43dcf67e055ac0d300c8cdb1309269a/orjson-3.10.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3", size = 130940 }, { url = "https://files.pythonhosted.org/packages/e8/2b/b9759fe704789937705c8a56a03f6c03e50dff7df87d65cba9a20fec5282/orjson-3.10.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d", size = 414713 }, { url = "https://files.pythonhosted.org/packages/a7/6b/b9dfdbd4b6e20a59238319eb203ae07c3f6abf07eef909169b7a37ae3bba/orjson-3.10.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182", size = 141028 }, { url = "https://files.pythonhosted.org/packages/7c/b5/40f5bbea619c7caf75eb4d652a9821875a8ed04acc45fe3d3ef054ca69fb/orjson-3.10.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e", size = 129715 }, { url = "https://files.pythonhosted.org/packages/38/60/2272514061cbdf4d672edbca6e59c7e01cd1c706e881427d88f3c3e79761/orjson-3.10.15-cp310-cp310-win32.whl", hash = "sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab", size = 142473 }, { url = "https://files.pythonhosted.org/packages/11/5d/be1490ff7eafe7fef890eb4527cf5bcd8cfd6117f3efe42a3249ec847b60/orjson-3.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806", size = 133564 }, { url = "https://files.pythonhosted.org/packages/7a/a2/21b25ce4a2c71dbb90948ee81bd7a42b4fbfc63162e57faf83157d5540ae/orjson-3.10.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6", size = 249533 }, { url = "https://files.pythonhosted.org/packages/b2/85/2076fc12d8225698a51278009726750c9c65c846eda741e77e1761cfef33/orjson-3.10.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef", size = 125230 }, { url = "https://files.pythonhosted.org/packages/06/df/a85a7955f11274191eccf559e8481b2be74a7c6d43075d0a9506aa80284d/orjson-3.10.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334", size = 150148 }, { url = "https://files.pythonhosted.org/packages/37/b3/94c55625a29b8767c0eed194cb000b3787e3c23b4cdd13be17bae6ccbb4b/orjson-3.10.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d", size = 139749 }, { url = "https://files.pythonhosted.org/packages/53/ba/c608b1e719971e8ddac2379f290404c2e914cf8e976369bae3cad88768b1/orjson-3.10.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0", size = 154558 }, { url = "https://files.pythonhosted.org/packages/b2/c4/c1fb835bb23ad788a39aa9ebb8821d51b1c03588d9a9e4ca7de5b354fdd5/orjson-3.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13", size = 130349 }, { url = "https://files.pythonhosted.org/packages/78/14/bb2b48b26ab3c570b284eb2157d98c1ef331a8397f6c8bd983b270467f5c/orjson-3.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5", size = 138513 }, { url = "https://files.pythonhosted.org/packages/4a/97/d5b353a5fe532e92c46467aa37e637f81af8468aa894cd77d2ec8a12f99e/orjson-3.10.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b", size = 130942 }, { url = "https://files.pythonhosted.org/packages/b5/5d/a067bec55293cca48fea8b9928cfa84c623be0cce8141d47690e64a6ca12/orjson-3.10.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399", size = 414717 }, { url = "https://files.pythonhosted.org/packages/6f/9a/1485b8b05c6b4c4db172c438cf5db5dcfd10e72a9bc23c151a1137e763e0/orjson-3.10.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388", size = 141033 }, { url = "https://files.pythonhosted.org/packages/f8/d2/fc67523656e43a0c7eaeae9007c8b02e86076b15d591e9be11554d3d3138/orjson-3.10.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c", size = 129720 }, { url = "https://files.pythonhosted.org/packages/79/42/f58c7bd4e5b54da2ce2ef0331a39ccbbaa7699b7f70206fbf06737c9ed7d/orjson-3.10.15-cp311-cp311-win32.whl", hash = "sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e", size = 142473 }, { url = "https://files.pythonhosted.org/packages/00/f8/bb60a4644287a544ec81df1699d5b965776bc9848d9029d9f9b3402ac8bb/orjson-3.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e", size = 133570 }, { url = "https://files.pythonhosted.org/packages/66/85/22fe737188905a71afcc4bf7cc4c79cd7f5bbe9ed1fe0aac4ce4c33edc30/orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a", size = 249504 }, { url = "https://files.pythonhosted.org/packages/48/b7/2622b29f3afebe938a0a9037e184660379797d5fd5234e5998345d7a5b43/orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d", size = 125080 }, { url = "https://files.pythonhosted.org/packages/ce/8f/0b72a48f4403d0b88b2a41450c535b3e8989e8a2d7800659a967efc7c115/orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0", size = 150121 }, { url = "https://files.pythonhosted.org/packages/06/ec/acb1a20cd49edb2000be5a0404cd43e3c8aad219f376ac8c60b870518c03/orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4", size = 139796 }, { url = "https://files.pythonhosted.org/packages/33/e1/f7840a2ea852114b23a52a1c0b2bea0a1ea22236efbcdb876402d799c423/orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767", size = 154636 }, { url = "https://files.pythonhosted.org/packages/fa/da/31543337febd043b8fa80a3b67de627669b88c7b128d9ad4cc2ece005b7a/orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41", size = 130621 }, { url = "https://files.pythonhosted.org/packages/ed/78/66115dc9afbc22496530d2139f2f4455698be444c7c2475cb48f657cefc9/orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514", size = 138516 }, { url = "https://files.pythonhosted.org/packages/22/84/cd4f5fb5427ffcf823140957a47503076184cb1ce15bcc1165125c26c46c/orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17", size = 130762 }, { url = "https://files.pythonhosted.org/packages/93/1f/67596b711ba9f56dd75d73b60089c5c92057f1130bb3a25a0f53fb9a583b/orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b", size = 414700 }, { url = "https://files.pythonhosted.org/packages/7c/0c/6a3b3271b46443d90efb713c3e4fe83fa8cd71cda0d11a0f69a03f437c6e/orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7", size = 141077 }, { url = "https://files.pythonhosted.org/packages/3b/9b/33c58e0bfc788995eccd0d525ecd6b84b40d7ed182dd0751cd4c1322ac62/orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a", size = 129898 }, { url = "https://files.pythonhosted.org/packages/01/c1/d577ecd2e9fa393366a1ea0a9267f6510d86e6c4bb1cdfb9877104cac44c/orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665", size = 142566 }, { url = "https://files.pythonhosted.org/packages/ed/eb/a85317ee1732d1034b92d56f89f1de4d7bf7904f5c8fb9dcdd5b1c83917f/orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa", size = 133732 }, { url = "https://files.pythonhosted.org/packages/06/10/fe7d60b8da538e8d3d3721f08c1b7bff0491e8fa4dd3bf11a17e34f4730e/orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6", size = 249399 }, { url = "https://files.pythonhosted.org/packages/6b/83/52c356fd3a61abd829ae7e4366a6fe8e8863c825a60d7ac5156067516edf/orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a", size = 125044 }, { url = "https://files.pythonhosted.org/packages/55/b2/d06d5901408e7ded1a74c7c20d70e3a127057a6d21355f50c90c0f337913/orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9", size = 150066 }, { url = "https://files.pythonhosted.org/packages/75/8c/60c3106e08dc593a861755781c7c675a566445cc39558677d505878d879f/orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0", size = 139737 }, { url = "https://files.pythonhosted.org/packages/6a/8c/ae00d7d0ab8a4490b1efeb01ad4ab2f1982e69cc82490bf8093407718ff5/orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307", size = 154804 }, { url = "https://files.pythonhosted.org/packages/22/86/65dc69bd88b6dd254535310e97bc518aa50a39ef9c5a2a5d518e7a223710/orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e", size = 130583 }, { url = "https://files.pythonhosted.org/packages/bb/00/6fe01ededb05d52be42fabb13d93a36e51f1fd9be173bd95707d11a8a860/orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7", size = 138465 }, { url = "https://files.pythonhosted.org/packages/db/2f/4cc151c4b471b0cdc8cb29d3eadbce5007eb0475d26fa26ed123dca93b33/orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8", size = 130742 }, { url = "https://files.pythonhosted.org/packages/9f/13/8a6109e4b477c518498ca37963d9c0eb1508b259725553fb53d53b20e2ea/orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca", size = 414669 }, { url = "https://files.pythonhosted.org/packages/22/7b/1d229d6d24644ed4d0a803de1b0e2df832032d5beda7346831c78191b5b2/orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561", size = 141043 }, { url = "https://files.pythonhosted.org/packages/cc/d3/6dc91156cf12ed86bed383bcb942d84d23304a1e57b7ab030bf60ea130d6/orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825", size = 129826 }, { url = "https://files.pythonhosted.org/packages/b3/38/c47c25b86f6996f1343be721b6ea4367bc1c8bc0fc3f6bbcd995d18cb19d/orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890", size = 142542 }, { url = "https://files.pythonhosted.org/packages/27/f1/1d7ec15b20f8ce9300bc850de1e059132b88990e46cd0ccac29cbf11e4f9/orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf", size = 133444 }, { url = "https://files.pythonhosted.org/packages/56/39/b2123d8d98a62ee89626dc7ecb782d9b60a5edb0b5721bc894ee3470df5a/orjson-3.10.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ffe19f3e8d68111e8644d4f4e267a069ca427926855582ff01fc012496d19969", size = 250031 }, { url = "https://files.pythonhosted.org/packages/65/4d/a058dc6476713cbd5647e5fd0be8d40c27e9ed77d37a788b594c424caa0e/orjson-3.10.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d433bf32a363823863a96561a555227c18a522a8217a6f9400f00ddc70139ae2", size = 125021 }, { url = "https://files.pythonhosted.org/packages/3d/cb/4d1450bb2c3276f8bf9524df6b01af4d01f55e9a9772555cf119275eb1d0/orjson-3.10.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da03392674f59a95d03fa5fb9fe3a160b0511ad84b7a3914699ea5a1b3a38da2", size = 149957 }, { url = "https://files.pythonhosted.org/packages/93/7b/d1fae6d4393a9fa8f5d3fb173f0a9c778135569c50e5390811b74c45b4b3/orjson-3.10.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a63bb41559b05360ded9132032239e47983a39b151af1201f07ec9370715c82", size = 139515 }, { url = "https://files.pythonhosted.org/packages/7f/b2/e0c0b8197c709983093700f9a59aa64478d80edc55fe620bceadb92004e3/orjson-3.10.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3766ac4702f8f795ff3fa067968e806b4344af257011858cc3d6d8721588b53f", size = 154314 }, { url = "https://files.pythonhosted.org/packages/db/94/eeb94ca3aa7564f753fe352101bcfc8179febaa1888f55ba3cad25b05f71/orjson-3.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1c73dcc8fadbd7c55802d9aa093b36878d34a3b3222c41052ce6b0fc65f8e8", size = 130145 }, { url = "https://files.pythonhosted.org/packages/ca/10/54c0118a38eaa5ae832c27306834bdc13954bd0a443b80da63faebf17ffe/orjson-3.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b299383825eafe642cbab34be762ccff9fd3408d72726a6b2a4506d410a71ab3", size = 138344 }, { url = "https://files.pythonhosted.org/packages/78/87/3c15eeb315171aa27f96bcca87ed54ee292b72d755973a66e3a6800e8ae9/orjson-3.10.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:abc7abecdbf67a173ef1316036ebbf54ce400ef2300b4e26a7b843bd446c2480", size = 130730 }, { url = "https://files.pythonhosted.org/packages/8a/dc/522430fb24445b9cc8301a5954f80ce8ee244c5159ba913578acc36b078f/orjson-3.10.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:3614ea508d522a621384c1d6639016a5a2e4f027f3e4a1c93a51867615d28829", size = 414482 }, { url = "https://files.pythonhosted.org/packages/c8/01/83b2e80b9c96ca9753d06e01d325037b2f3e404b14c7a8e875b2f2b7c171/orjson-3.10.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:295c70f9dc154307777ba30fe29ff15c1bcc9dfc5c48632f37d20a607e9ba85a", size = 140792 }, { url = "https://files.pythonhosted.org/packages/96/40/f211084b0e0267b6b515f05967048d8957839d80ff534bde0dc7f9df9ae0/orjson-3.10.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:63309e3ff924c62404923c80b9e2048c1f74ba4b615e7584584389ada50ed428", size = 129536 }, { url = "https://files.pythonhosted.org/packages/b2/8c/014d96f5c6446adcd2403fe2d4007ff582f8867f5028b0cd994f0174d61c/orjson-3.10.15-cp39-cp39-win32.whl", hash = "sha256:a2f708c62d026fb5340788ba94a55c23df4e1869fec74be455e0b2f5363b8507", size = 142302 }, { url = "https://files.pythonhosted.org/packages/47/bd/81da73ef8e66434c51a4ea7db45e3a0b62bff2c3e7ebc723aa4eeead2feb/orjson-3.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:efcf6c735c3d22ef60c4aa27a5238f1a477df85e9b15f2142f9d669beb2d13fd", size = 133401 }, ] [[package]] name = "packaging" version = "24.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } wheels = [ { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, ] [[package]] name = "platformdirs" version = "4.3.7" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b6/2d/7d512a3913d60623e7eb945c6d1b4f0bddf1d0b7ada5225274c87e5b53d1/platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351", size = 21291 } wheels = [ { url = "https://files.pythonhosted.org/packages/6d/45/59578566b3275b8fd9157885918fcd0c4d74162928a5310926887b856a51/platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94", size = 18499 }, ] [[package]] name = "pluggy" version = "1.5.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } wheels = [ { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, ] [[package]] name = "polyfactory" version = "2.19.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "faker" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/77/e8/81f5bd98329c8ca9f5a6ecbddfc9e8f4b4b512ffbd4e81f5886694e20887/polyfactory-2.19.0.tar.gz", hash = "sha256:6d4273fb1f23e1fccc7aa7c64e28ddc3c20105cc499df32ebc478465daa7fa72", size = 241199 } wheels = [ { url = "https://files.pythonhosted.org/packages/7c/5e/d3512a46d54b94f007583118d7e084631e034131aa4199b8d4314784ae8b/polyfactory-2.19.0-py3-none-any.whl", hash = "sha256:0137f5eaf1bc31c62c16ccbab9467e96a7352748ca426ef363bd081c149a3e3f", size = 59770 }, ] [[package]] name = "pre-commit" version = "4.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cfgv" }, { name = "identify" }, { name = "nodeenv" }, { name = "pyyaml" }, { name = "virtualenv" }, ] sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424 } wheels = [ { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707 }, ] [[package]] name = "prompt-toolkit" version = "3.0.50" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a1/e1/bd15cb8ffdcfeeb2bdc215de3c3cffca11408d829e4b8416dcfe71ba8854/prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab", size = 429087 } wheels = [ { url = "https://files.pythonhosted.org/packages/e4/ea/d836f008d33151c7a1f62caf3d8dd782e4d15f6a43897f64480c2b8de2ad/prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198", size = 387816 }, ] [[package]] name = "proto-plus" version = "1.26.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142 } wheels = [ { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163 }, ] [[package]] name = "protobuf" version = "5.29.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/17/7d/b9dca7365f0e2c4fa7c193ff795427cfa6290147e5185ab11ece280a18e7/protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99", size = 424902 } wheels = [ { url = "https://files.pythonhosted.org/packages/9a/b2/043a1a1a20edd134563699b0e91862726a0dc9146c090743b6c44d798e75/protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7", size = 422709 }, { url = "https://files.pythonhosted.org/packages/79/fc/2474b59570daa818de6124c0a15741ee3e5d6302e9d6ce0bdfd12e98119f/protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d", size = 434506 }, { url = "https://files.pythonhosted.org/packages/46/de/7c126bbb06aa0f8a7b38aaf8bd746c514d70e6a2a3f6dd460b3b7aad7aae/protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0", size = 417826 }, { url = "https://files.pythonhosted.org/packages/a2/b5/bade14ae31ba871a139aa45e7a8183d869efe87c34a4850c87b936963261/protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e", size = 319574 }, { url = "https://files.pythonhosted.org/packages/46/88/b01ed2291aae68b708f7d334288ad5fb3e7aa769a9c309c91a0d55cb91b0/protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922", size = 319672 }, { url = "https://files.pythonhosted.org/packages/8a/b8/c3847343ebd9c7ae0b762de1e173b110689fd334ac8dcf1697ffd9316861/protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe", size = 422675 }, { url = "https://files.pythonhosted.org/packages/f0/74/e23e1ab05b27ce0b55f70be90df82076a5c18924d98679110459c52bacd9/protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812", size = 434594 }, { url = "https://files.pythonhosted.org/packages/12/fb/a586e0c973c95502e054ac5f81f88394f24ccc7982dac19c515acd9e2c93/protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862", size = 172551 }, ] [[package]] name = "psycopg" version = "3.2.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.13'" }, { name = "tzdata", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/67/97/eea08f74f1c6dd2a02ee81b4ebfe5b558beb468ebbd11031adbf58d31be0/psycopg-3.2.6.tar.gz", hash = "sha256:16fa094efa2698f260f2af74f3710f781e4a6f226efe9d1fd0c37f384639ed8a", size = 156322 } wheels = [ { url = "https://files.pythonhosted.org/packages/d7/7d/0ba52deff71f65df8ec8038adad86ba09368c945424a9bd8145d679a2c6a/psycopg-3.2.6-py3-none-any.whl", hash = "sha256:f3ff5488525890abb0566c429146add66b329e20d6d4835662b920cbbf90ac58", size = 199077 }, ] [package.optional-dependencies] binary = [ { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, ] pool = [ { name = "psycopg-pool" }, ] [[package]] name = "psycopg-binary" version = "3.2.6" source = { registry = "https://pypi.org/simple" } wheels = [ { url = "https://files.pythonhosted.org/packages/4b/7b/48afdcb14bf828c4006f573845fbbd98df701bff9043fbb0b8caab261b6f/psycopg_binary-3.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1b639acb3e24243c23f75700bf6e3af7b76da92523ec7c3196a13aaf0b578453", size = 3868985 }, { url = "https://files.pythonhosted.org/packages/de/45/9e777c61ef3ac5e7fb42618afbd9f41464c1c396ec85c79c48086ace437a/psycopg_binary-3.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1b5c359173726b38d7acbb9f73270f269591d8031d099c1a70dd3f3d22b0e8a8", size = 3938244 }, { url = "https://files.pythonhosted.org/packages/d6/93/e48962aca19af1f3d2cb0c2ff890ca305c51d1759a2e89c90a527228cf1d/psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3434efe7c00f505f4c1e531519dac6c701df738ba7a1328eac81118d80019132", size = 4523096 }, { url = "https://files.pythonhosted.org/packages/fe/52/21f4a9bb7123e07e06a712338eb6cc5794a23a56813deb4a8cd3de8ec91c/psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bca8d9643191b13193940bbf84d51ac5a747e965c230177258fb02b8043fb7a", size = 4329659 }, { url = "https://files.pythonhosted.org/packages/9e/72/8da1c98b4e0d4c3649f037101b70ae52e4f821597919dabc72c889e60ca9/psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55fa40f11d37e6e5149a282a5fd7e0734ce55c623673bfba638480914fd1414c", size = 4575359 }, { url = "https://files.pythonhosted.org/packages/83/5a/a85c98a5b2b3f771d7478ac0081b48749d4c07ce41d51f89f592f87cfbeb/psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0690ac1061c655b1bcbe9284d07bf5276bc9c0d788a6c74aaf3b042e64984b83", size = 4287138 }, { url = "https://files.pythonhosted.org/packages/b0/c3/0abafd3f300e5ff952dd9b3be95b4e2527ae1e2ea7fd7a7421e6bc1c0e37/psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9a4a9967ff650d2821d5fad6bec7b15f4c2072603e9fa3f89a39f351ade1fd3", size = 3872142 }, { url = "https://files.pythonhosted.org/packages/0f/16/029aa400c4b7f4b7042307d8a854768463a65326d061ad2145f7b3989ca5/psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6f2894cc7aee8a15fe591e8536911d9c015cb404432cf7bdac2797e54cb2ba8", size = 3340033 }, { url = "https://files.pythonhosted.org/packages/cd/a1/28e86b832d696ba5fd79c4d704b8ca46b827428f7ea063063ca280a678a4/psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:05560c81312d7c2bee95a9860cd25198677f2320fb4a3527bc04e8cae7fcfb64", size = 3438823 }, { url = "https://files.pythonhosted.org/packages/93/31/73546c999725b397bb7e7fd55f83a9c78787c6fe7fe457e4179d19a115dc/psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4269cd23a485d6dd6eb6b10841c94551a53091cf0b1b6d5247a6a341f53f0d95", size = 3464031 }, { url = "https://files.pythonhosted.org/packages/85/38/957bd4bdde976c9a38d61896bf9d2c8f5752b98d8f4d879a7902588a8583/psycopg_binary-3.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:7942f35a6f314608720116bcd9de240110ceadffd2ac5c34f68f74a31e52e46a", size = 2792159 }, { url = "https://files.pythonhosted.org/packages/5a/71/5bfa1ffc4d59f0454b114ce0d017eca269b079ca2753a96302c2117067c7/psycopg_binary-3.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7afe181f6b3eb714362e9b6a2dc2a589bff60471a1d8639fd231a4e426e01523", size = 3876608 }, { url = "https://files.pythonhosted.org/packages/7e/07/1724d842b876af7bef442f0853d6cbf951264229414e4d0a57b8e3787847/psycopg_binary-3.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34bb0fceba0773dc0bfb53224bb2c0b19dc97ea0a997a223615484cf02cae55c", size = 3942785 }, { url = "https://files.pythonhosted.org/packages/09/51/a251a356f10c7947bcc2285ebf1541e1c2d851b8db20eb8f29ed3a5974bf/psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54120122d2779dcd307f49e1f921d757fe5dacdced27deab37f277eef0c52a5b", size = 4519448 }, { url = "https://files.pythonhosted.org/packages/6e/cf/0c92ab1270664a1341e52f5794ecc636b1f4ac67bf1743075091795151f8/psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:816aa556f63b2303e66ba6c8888a8b3f3e6e4e47049ec7a4d62c84ac60b091ca", size = 4324382 }, { url = "https://files.pythonhosted.org/packages/bf/2b/6921bd4a57fe19d4618798a8a8648e1a516c92563c37b2073639fffac5d5/psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19a0ba351eda9a59babf8c7c9d89c7bbc5b26bf096bc349b096bd0dd2482088", size = 4578720 }, { url = "https://files.pythonhosted.org/packages/5c/30/1034d164e2be09f650a86eccc93625e51568e307c855bf6f94759c298303/psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e197e01290ef818a092c877025fc28096adbb6d0743e313491a21aab31bd96", size = 4281871 }, { url = "https://files.pythonhosted.org/packages/c4/d0/67fdf0174c334a9a85a9672590d7da83e85d9cedfc35f473a557e310a1ca/psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:274794b4b29ef426e09086404446b61a146f5e756da71366c5a6d57abec31f7d", size = 3870582 }, { url = "https://files.pythonhosted.org/packages/9f/4e/3a4fd2d1fd715b11e7287023edde916e1174b58b37873c531f782a49803b/psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:69845bdc0db519e1dfc27932cd3d5b1ecb3f72950af52a1987508ab0b52b3b55", size = 3334464 }, { url = "https://files.pythonhosted.org/packages/4a/22/90a8032276fa5b215ce26cefb44abafa8fb09de396c6dc6f62a5e53fe2ad/psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:66c3bed2caf0d1cabcb9365064de183b5209a7cbeaa131e79e68f350c9c963c2", size = 3431945 }, { url = "https://files.pythonhosted.org/packages/e7/b0/e547e9a851ab19c79869c1d84a533e225d284e70c222720fed4529fcda60/psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e3ae3201fe85c7f901349a2cf52f02ceca4cb97a5e2e2ac8b8a1c9a6eb747bed", size = 3463278 }, { url = "https://files.pythonhosted.org/packages/e7/ce/e555bd8dd6fce8b34bbc3856125600f0842c85a8364702ebe0dc39372087/psycopg_binary-3.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:58f443b4df2adb59937c96775fadf4967f93d952fbcc82394446985faec11041", size = 2795094 }, { url = "https://files.pythonhosted.org/packages/a3/c7/220b1273f0befb2cd9fe83d379b3484ae029a88798a90bc0d36f10bea5df/psycopg_binary-3.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f27a46ff0497e882e8c0286e8833c785b4d1a80f23e1bf606f4c90e5f9f3ce75", size = 3857986 }, { url = "https://files.pythonhosted.org/packages/8a/d8/30176532826cf87c608a6f79dd668bf9aff0cdf8eb80209eddf4c5aa7229/psycopg_binary-3.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b30ee4821ded7de48b8048b14952512588e7c5477b0a5965221e1798afba61a1", size = 3940060 }, { url = "https://files.pythonhosted.org/packages/54/7c/fa7cd1f057f33f7ae483d6bc5a03ec6eff111f8aa5c678d9aaef92705247/psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e57edf3b1f5427f39660225b01f8e7b97f5cfab132092f014bf1638bc85d81d2", size = 4499082 }, { url = "https://files.pythonhosted.org/packages/b8/81/1606966f6146187c273993ea6f88f2151b26741df8f4e01349a625983be9/psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c5172ce3e4ae7a4fd450070210f801e2ce6bc0f11d1208d29268deb0cda34de", size = 4307509 }, { url = "https://files.pythonhosted.org/packages/69/ad/01c87aab17a4b89128b8036800d11ab296c7c2c623940cc7e6f2668f375a/psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfab3804c43571a6615e559cdc4c4115785d258a4dd71a721be033f5f5f378d", size = 4547813 }, { url = "https://files.pythonhosted.org/packages/65/30/f93a193846ee738ffe5d2a4837e7ddeb7279707af81d088cee96cae853a0/psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fa1c920cce16f1205f37b20c685c58b9656b170b8b4c93629100d342d0d118e", size = 4259847 }, { url = "https://files.pythonhosted.org/packages/8e/73/65c4ae71be86675a62154407c92af4b917146f9ff3baaf0e4166c0734aeb/psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e118d818101c1608c6b5ba52a6c977614d8f05aa89467501172ba4d10588e11", size = 3846550 }, { url = "https://files.pythonhosted.org/packages/53/cc/a24626cac3f208c776bb22e15e9a5e483aa81145221e6427e50381f40811/psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:763319a8bfeca77d31512da71f5a33459b9568a7621c481c3828c62f9c38f351", size = 3320269 }, { url = "https://files.pythonhosted.org/packages/55/e6/68c76fb9d6c53d5e4170a0c9216c7aa6c2903808f626d84d002b47a16931/psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2fbc05819560389dbece046966bc88e0f2ea77673497e274c4293b8b4c1d0703", size = 3399365 }, { url = "https://files.pythonhosted.org/packages/b4/2c/55b140f5a2c582dae42ef38502c45ef69c938274242a40bd04c143081029/psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a57f99bb953b4bd6f32d0a9844664e7f6ca5ead9ba40e96635be3cd30794813", size = 3438908 }, { url = "https://files.pythonhosted.org/packages/ae/f6/589c95cceccee2ab408b6b2e16f1ed6db4536fb24f2f5c9ce568cf43270c/psycopg_binary-3.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:5de6809e19a465dcb9c269675bded46a135f2d600cd99f0735afbb21ddad2af4", size = 2782886 }, { url = "https://files.pythonhosted.org/packages/bf/32/3d06c478fd3070ac25a49c2e8ca46b6d76b0048fa9fa255b99ee32f32312/psycopg_binary-3.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54af3fbf871baa2eb19df96fd7dc0cbd88e628a692063c3d1ab5cdd00aa04322", size = 3852672 }, { url = "https://files.pythonhosted.org/packages/34/97/e581030e279500ede3096adb510f0e6071874b97cfc047a9a87b7d71fc77/psycopg_binary-3.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ad5da1e4636776c21eaeacdec42f25fa4612631a12f25cd9ab34ddf2c346ffb9", size = 3936562 }, { url = "https://files.pythonhosted.org/packages/74/b6/6a8df4cb23c3d327403a83406c06c9140f311cb56c4e4d720ee7abf6fddc/psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7956b9ea56f79cd86eddcfbfc65ae2af1e4fe7932fa400755005d903c709370", size = 4499167 }, { url = "https://files.pythonhosted.org/packages/e4/5b/950eafef61e5e0b8ddb5afc5b6b279756411aa4bf70a346a6f091ad679bb/psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e2efb763188008cf2914820dcb9fb23c10fe2be0d2c97ef0fac7cec28e281d8", size = 4311651 }, { url = "https://files.pythonhosted.org/packages/72/b9/b366c49afc854c26b3053d4d35376046eea9aebdc48ded18ea249ea1f80c/psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b3aab3451679f1e7932270e950259ed48c3b79390022d3f660491c0e65e4838", size = 4547852 }, { url = "https://files.pythonhosted.org/packages/ab/d4/0e047360e2ea387dc7171ca017ffcee5214a0762f74b9dd982035f2e52fb/psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849a370ac4e125f55f2ad37f928e588291a67ccf91fa33d0b1e042bb3ee1f986", size = 4261725 }, { url = "https://files.pythonhosted.org/packages/e3/ea/a1b969804250183900959ebe845d86be7fed2cbd9be58f64cd0fc24b2892/psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:566d4ace928419d91f1eb3227fc9ef7b41cf0ad22e93dd2c3368d693cf144408", size = 3850073 }, { url = "https://files.pythonhosted.org/packages/e5/71/ec2907342f0675092b76aea74365b56f38d960c4c635984dcfe25d8178c8/psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f1981f13b10de2f11cfa2f99a8738b35b3f0a0f3075861446894a8d3042430c0", size = 3320323 }, { url = "https://files.pythonhosted.org/packages/d7/d7/0d2cb4b42f231e2efe8ea1799ce917973d47486212a2c4d33cd331e7ac28/psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:36f598300b55b3c983ae8df06473ad27333d2fd9f3e2cfdb913b3a5aaa3a8bcf", size = 3402335 }, { url = "https://files.pythonhosted.org/packages/66/92/7050c372f78e53eba14695cec6c3a91b2d9ca56feaf0bfe95fe90facf730/psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0f4699fa5fe1fffb0d6b2d14b31fd8c29b7ea7375f89d5989f002aaf21728b21", size = 3440442 }, { url = "https://files.pythonhosted.org/packages/5f/4c/bebcaf754189283b2f3d457822a3d9b233d08ff50973d8f1e8d51f4d35ed/psycopg_binary-3.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:afe697b8b0071f497c5d4c0f41df9e038391534f5614f7fb3a8c1ca32d66e860", size = 2783465 }, { url = "https://files.pythonhosted.org/packages/be/ac/5023320c46e0d233faff6cd2d902a60d6c09034f136e7cc0876463a97d50/psycopg_binary-3.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:260c43c329e668606388cee78ec0dab083a25c2c6e6f9cf74a130fd5a27b0f87", size = 3870121 }, { url = "https://files.pythonhosted.org/packages/8d/2d/63cdd41112c954582d6bb8905970abe5e5a69b53d6dd2a38fc7ec624a6b4/psycopg_binary-3.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9870e51fad4684dbdec057fa757d65e61cb2acb16236836e9360044c2a1ec880", size = 3939150 }, { url = "https://files.pythonhosted.org/packages/a8/c6/0a0fe3b5506d30d597c82b16b8c1e023393a10701109e917c4cb529ccc71/psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030e9c3082a931e972b029b3cef085784a3bf7f8e18367ae50d5b809aa6e1d87", size = 4525092 }, { url = "https://files.pythonhosted.org/packages/0d/66/66c3c05a0535c022b1b8af501aec05161084e6a4ab44edebd99ff82de976/psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60c9ed291fbd5e777c2c630dcfd10b7a87d68512b0757d5e7406d9c4895a82a", size = 4330350 }, { url = "https://files.pythonhosted.org/packages/90/f3/15ce9800b54c50ff01af1c39d0a727083bb3640067a54b58631ca6666b24/psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e0f4a17a9c376c195e403b4826c18f325bd28f425231d36d1036258bf893e23", size = 4577917 }, { url = "https://files.pythonhosted.org/packages/a0/aa/ffe622452a43376193d3d1e63b479e0f90ce9732c91366c05a16ce49e513/psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac46da609624b16d961f604b3cbc3233ef43211ef1456a188f8c427109c9c3e1", size = 4286328 }, { url = "https://files.pythonhosted.org/packages/bf/bf/53da912ba8ace3136318718112c5db85bf0ed4f079f92849536a78384875/psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e77949b8e7014b85cee0bf6e9e041bcae7719b2693ebf59236368fb0b2a08814", size = 3874534 }, { url = "https://files.pythonhosted.org/packages/1a/b5/e659e96ffc6846d3f90c4af785c84898e625f974c7f67255688addda6167/psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:532322d9ef6e7d178a4f344970b017110633bcc3dc1c3403efcef55aad612517", size = 3339634 }, { url = "https://files.pythonhosted.org/packages/9b/d6/92858290a8d710a27caf1b92183f4fa2ef3b99c9b6959fc73fd9639b8412/psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:880c5fd76dcb50bdcc8f87359e5a6c7eb416697cc9aa02854c91223bd999c045", size = 3437930 }, { url = "https://files.pythonhosted.org/packages/f7/a5/60b5a6519f58b2deca261d2e4566bce4beeeb30f6e7c3f72e4d8f204dcf6/psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c0cddc7458b8416d77cd8829d0192466502f31d1fb853d58613cf13ac64f41c", size = 3464810 }, { url = "https://files.pythonhosted.org/packages/27/b6/e48cafe90e46b82b1393276ce6d92ddb7650c39aba8aa0256affbaa94c36/psycopg_binary-3.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:ea158665676f42b19585dfe948071d3c5f28276f84a97522fb2e82c1d9194563", size = 2793902 }, ] [[package]] name = "psycopg-pool" version = "3.2.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/cf/13/1e7850bb2c69a63267c3dbf37387d3f71a00fd0e2fa55c5db14d64ba1af4/psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5", size = 29770 } wheels = [ { url = "https://files.pythonhosted.org/packages/47/fd/4feb52a55c1a4bd748f2acaed1903ab54a723c47f6d0242780f4d97104d4/psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7", size = 38252 }, ] [[package]] name = "psycopg2-binary" version = "2.9.10" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764 } wheels = [ { url = "https://files.pythonhosted.org/packages/7a/81/331257dbf2801cdb82105306042f7a1637cc752f65f2bb688188e0de5f0b/psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f", size = 3043397 }, { url = "https://files.pythonhosted.org/packages/e7/9a/7f4f2f031010bbfe6a02b4a15c01e12eb6b9b7b358ab33229f28baadbfc1/psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906", size = 3274806 }, { url = "https://files.pythonhosted.org/packages/e5/57/8ddd4b374fa811a0b0a0f49b6abad1cde9cb34df73ea3348cc283fcd70b4/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92", size = 2851361 }, { url = "https://files.pythonhosted.org/packages/f9/66/d1e52c20d283f1f3a8e7e5c1e06851d432f123ef57b13043b4f9b21ffa1f/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007", size = 3080836 }, { url = "https://files.pythonhosted.org/packages/a0/cb/592d44a9546aba78f8a1249021fe7c59d3afb8a0ba51434d6610cc3462b6/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0", size = 3264552 }, { url = "https://files.pythonhosted.org/packages/64/33/c8548560b94b7617f203d7236d6cdf36fe1a5a3645600ada6efd79da946f/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4", size = 3019789 }, { url = "https://files.pythonhosted.org/packages/b0/0e/c2da0db5bea88a3be52307f88b75eec72c4de62814cbe9ee600c29c06334/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1", size = 2871776 }, { url = "https://files.pythonhosted.org/packages/15/d7/774afa1eadb787ddf41aab52d4c62785563e29949613c958955031408ae6/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5", size = 2820959 }, { url = "https://files.pythonhosted.org/packages/5e/ed/440dc3f5991a8c6172a1cde44850ead0e483a375277a1aef7cfcec00af07/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5", size = 2919329 }, { url = "https://files.pythonhosted.org/packages/03/be/2cc8f4282898306732d2ae7b7378ae14e8df3c1231b53579efa056aae887/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53", size = 2957659 }, { url = "https://files.pythonhosted.org/packages/d0/12/fb8e4f485d98c570e00dad5800e9a2349cfe0f71a767c856857160d343a5/psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b", size = 1024605 }, { url = "https://files.pythonhosted.org/packages/22/4f/217cd2471ecf45d82905dd09085e049af8de6cfdc008b6663c3226dc1c98/psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1", size = 1163817 }, { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397 }, { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806 }, { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370 }, { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780 }, { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583 }, { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831 }, { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822 }, { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975 }, { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320 }, { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617 }, { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618 }, { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816 }, { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771 }, { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336 }, { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637 }, { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097 }, { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776 }, { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968 }, { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334 }, { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722 }, { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132 }, { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312 }, { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191 }, { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 }, { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699 }, { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245 }, { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631 }, { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140 }, { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762 }, { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967 }, { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326 }, { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712 }, { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155 }, { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356 }, { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224 }, { url = "https://files.pythonhosted.org/packages/a2/bc/e77648009b6e61af327c607543f65fdf25bcfb4100f5a6f3bdb62ddac03c/psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b", size = 3043437 }, { url = "https://files.pythonhosted.org/packages/e0/e8/5a12211a1f5b959f3e3ccd342eace60c1f26422f53e06d687821dc268780/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc", size = 2851340 }, { url = "https://files.pythonhosted.org/packages/47/ed/5932b0458a7fc61237b653df050513c8d18a6f4083cc7f90dcef967f7bce/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697", size = 3080905 }, { url = "https://files.pythonhosted.org/packages/71/df/8047d85c3d23864aca4613c3be1ea0fe61dbe4e050a89ac189f9dce4403e/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481", size = 3264640 }, { url = "https://files.pythonhosted.org/packages/f3/de/6157e4ef242920e8f2749f7708d5cc8815414bdd4a27a91996e7cd5c80df/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648", size = 3019812 }, { url = "https://files.pythonhosted.org/packages/25/f9/0fc49efd2d4d6db3a8d0a3f5749b33a0d3fdd872cad49fbf5bfce1c50027/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d", size = 2871933 }, { url = "https://files.pythonhosted.org/packages/57/bc/2ed1bd182219065692ed458d218d311b0b220b20662d25d913bc4e8d3549/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30", size = 2820990 }, { url = "https://files.pythonhosted.org/packages/71/2a/43f77a9b8ee0b10e2de784d97ddc099d9fe0d9eec462a006e4d2cc74756d/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c", size = 2919352 }, { url = "https://files.pythonhosted.org/packages/57/86/d2943df70469e6afab3b5b8e1367fccc61891f46de436b24ddee6f2c8404/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287", size = 2957614 }, { url = "https://files.pythonhosted.org/packages/85/21/195d69371330983aa16139e60ba855d0a18164c9295f3a3696be41bbcd54/psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8", size = 1025341 }, { url = "https://files.pythonhosted.org/packages/ad/53/73196ebc19d6fbfc22427b982fbc98698b7b9c361e5e7707e3a3247cf06d/psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5", size = 1163958 }, ] [[package]] name = "pyasn1" version = "0.6.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } wheels = [ { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, ] [[package]] name = "pyasn1-modules" version = "0.4.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyasn1" }, ] sdist = { url = "https://files.pythonhosted.org/packages/1d/67/6afbf0d507f73c32d21084a79946bfcfca5fbc62a72057e9c23797a737c9/pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c", size = 310028 } wheels = [ { url = "https://files.pythonhosted.org/packages/77/89/bc88a6711935ba795a679ea6ebee07e128050d6382eaa35a0a47c8032bdc/pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd", size = 181537 }, ] [[package]] name = "pycparser" version = "2.22" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } wheels = [ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, ] [[package]] name = "pydantic" version = "2.10.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, { name = "pydantic-core" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 } wheels = [ { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 }, ] [[package]] name = "pydantic-core" version = "2.27.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 } wheels = [ { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 }, { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 }, { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 }, { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 }, { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 }, { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 }, { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 }, { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 }, { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 }, { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 }, { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 }, { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 }, { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 }, { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 }, { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 }, { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 }, { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 }, { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 }, { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 }, { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 }, { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 }, { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 }, { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 }, { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 }, { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 }, { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 }, { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 }, { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 }, { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 }, { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 }, { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 }, { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 }, { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 }, { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 }, { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 }, { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 }, { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 }, { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 }, { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 }, { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 }, { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 }, { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 }, { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 }, { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 }, { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 }, { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 }, { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 }, { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 }, { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 }, { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 }, { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 }, { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 }, { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 }, { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 }, { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 }, { url = "https://files.pythonhosted.org/packages/27/97/3aef1ddb65c5ccd6eda9050036c956ff6ecbfe66cb7eb40f280f121a5bb0/pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993", size = 1896475 }, { url = "https://files.pythonhosted.org/packages/ad/d3/5668da70e373c9904ed2f372cb52c0b996426f302e0dee2e65634c92007d/pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308", size = 1772279 }, { url = "https://files.pythonhosted.org/packages/8a/9e/e44b8cb0edf04a2f0a1f6425a65ee089c1d6f9c4c2dcab0209127b6fdfc2/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4", size = 1829112 }, { url = "https://files.pythonhosted.org/packages/1c/90/1160d7ac700102effe11616e8119e268770f2a2aa5afb935f3ee6832987d/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf", size = 1866780 }, { url = "https://files.pythonhosted.org/packages/ee/33/13983426df09a36d22c15980008f8d9c77674fc319351813b5a2739b70f3/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76", size = 2037943 }, { url = "https://files.pythonhosted.org/packages/01/d7/ced164e376f6747e9158c89988c293cd524ab8d215ae4e185e9929655d5c/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118", size = 2740492 }, { url = "https://files.pythonhosted.org/packages/8b/1f/3dc6e769d5b7461040778816aab2b00422427bcaa4b56cc89e9c653b2605/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630", size = 1995714 }, { url = "https://files.pythonhosted.org/packages/07/d7/a0bd09bc39283530b3f7c27033a814ef254ba3bd0b5cfd040b7abf1fe5da/pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54", size = 1997163 }, { url = "https://files.pythonhosted.org/packages/2d/bb/2db4ad1762e1c5699d9b857eeb41959191980de6feb054e70f93085e1bcd/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f", size = 2005217 }, { url = "https://files.pythonhosted.org/packages/53/5f/23a5a3e7b8403f8dd8fc8a6f8b49f6b55c7d715b77dcf1f8ae919eeb5628/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362", size = 2127899 }, { url = "https://files.pythonhosted.org/packages/c2/ae/aa38bb8dd3d89c2f1d8362dd890ee8f3b967330821d03bbe08fa01ce3766/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96", size = 2155726 }, { url = "https://files.pythonhosted.org/packages/98/61/4f784608cc9e98f70839187117ce840480f768fed5d386f924074bf6213c/pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e", size = 1817219 }, { url = "https://files.pythonhosted.org/packages/57/82/bb16a68e4a1a858bb3768c2c8f1ff8d8978014e16598f001ea29a25bf1d1/pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67", size = 1985382 }, { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 }, { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 }, { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 }, { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 }, { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 }, { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 }, { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 }, { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 }, { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 }, { url = "https://files.pythonhosted.org/packages/29/0e/dcaea00c9dbd0348b723cae82b0e0c122e0fa2b43fa933e1622fd237a3ee/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656", size = 1891733 }, { url = "https://files.pythonhosted.org/packages/86/d3/e797bba8860ce650272bda6383a9d8cad1d1c9a75a640c9d0e848076f85e/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278", size = 1768375 }, { url = "https://files.pythonhosted.org/packages/41/f7/f847b15fb14978ca2b30262548f5fc4872b2724e90f116393eb69008299d/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb", size = 1822307 }, { url = "https://files.pythonhosted.org/packages/9c/63/ed80ec8255b587b2f108e514dc03eed1546cd00f0af281e699797f373f38/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd", size = 1979971 }, { url = "https://files.pythonhosted.org/packages/a9/6d/6d18308a45454a0de0e975d70171cadaf454bc7a0bf86b9c7688e313f0bb/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc", size = 1987616 }, { url = "https://files.pythonhosted.org/packages/82/8a/05f8780f2c1081b800a7ca54c1971e291c2d07d1a50fb23c7e4aef4ed403/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b", size = 1998943 }, { url = "https://files.pythonhosted.org/packages/5e/3e/fe5b6613d9e4c0038434396b46c5303f5ade871166900b357ada4766c5b7/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b", size = 2116654 }, { url = "https://files.pythonhosted.org/packages/db/ad/28869f58938fad8cc84739c4e592989730bfb69b7c90a8fff138dff18e1e/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2", size = 2152292 }, { url = "https://files.pythonhosted.org/packages/a1/0c/c5c5cd3689c32ed1fe8c5d234b079c12c281c051759770c05b8bed6412b5/pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35", size = 2004961 }, ] [[package]] name = "pydantic-extra-types" version = "2.10.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/53/fa/6b268a47839f8af46ffeb5bb6aee7bded44fbad54e6bf826c11f17aef91a/pydantic_extra_types-2.10.3.tar.gz", hash = "sha256:dcc0a7b90ac9ef1b58876c9b8fdede17fbdde15420de9d571a9fccde2ae175bb", size = 95128 } wheels = [ { url = "https://files.pythonhosted.org/packages/38/0a/f6f8e5f79d188e2f3fa9ecfccfa72538b685985dd5c7c2886c67af70e685/pydantic_extra_types-2.10.3-py3-none-any.whl", hash = "sha256:e8b372752b49019cd8249cc192c62a820d8019f5382a8789d0f887338a59c0f3", size = 37175 }, ] [[package]] name = "pydantic-settings" version = "2.8.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, ] sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550 } wheels = [ { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839 }, ] [[package]] name = "pygments" version = "2.19.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } wheels = [ { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, ] [[package]] name = "pyodbc" version = "5.2.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a0/36/a1ac7d23a1611e7ccd4d27df096f3794e8d1e7faa040260d9d41b6fc3185/pyodbc-5.2.0.tar.gz", hash = "sha256:de8be39809c8ddeeee26a4b876a6463529cd487a60d1393eb2a93e9bcd44a8f5", size = 116908 } wheels = [ { url = "https://files.pythonhosted.org/packages/30/01/05c4a4ec122c4a8a37fa1be5bdbf6fb23724a2ee3b1b771bb46f710158a9/pyodbc-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb0850e3e3782f57457feed297e220bb20c3e8fd7550d7a6b6bb96112bd9b6fe", size = 72483 }, { url = "https://files.pythonhosted.org/packages/73/22/ba718cc5508bdfbb53e1906018d7f597be37241c769dda8a48f52af96fe3/pyodbc-5.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0dae0fb86078c87acf135dbe5afd3c7d15d52ab0db5965c44159e84058c3e2fb", size = 71794 }, { url = "https://files.pythonhosted.org/packages/24/e4/9d859ea3642059c10a6644a00ccb1f8b8e02c1e4f49ab34250db1273c2c5/pyodbc-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6493b9c7506ca964b80ad638d0dc82869df7058255d71f04fdd1405e88bcb36b", size = 332850 }, { url = "https://files.pythonhosted.org/packages/b9/a7/98c3555c10cfeb343ec7eea69ecb17476aa3ace72131ea8a4a1f8250318c/pyodbc-5.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04de873607fb960e71953c164c83e8e5d9291ce0d69e688e54947b254b04902", size = 336009 }, { url = "https://files.pythonhosted.org/packages/24/c1/d5b16dd62eb70f281bc90cdc1e3c46af7acda3f0f6afb34553206506ccb2/pyodbc-5.2.0-cp310-cp310-win32.whl", hash = "sha256:74135cb10c1dcdbd99fe429c61539c232140e62939fa7c69b0a373cc552e4a08", size = 62407 }, { url = "https://files.pythonhosted.org/packages/f5/12/22c83669abee4ca5915aa89172cf1673b58ca05f44dabeb8b0bac9b7fecc/pyodbc-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:d287121eeaa562b9ab3d4c52fa77c793dfedd127049273eb882a05d3d67a8ce8", size = 68874 }, { url = "https://files.pythonhosted.org/packages/8f/a2/5907ce319a571eb1e271d6a475920edfeacd92da1021bb2a15ed1b7f6ac1/pyodbc-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4627779f0a608b51ce2d2fe6d1d395384e65ca36248bf9dbb6d7cf2c8fda1cab", size = 72536 }, { url = "https://files.pythonhosted.org/packages/e1/b8/bd438ab2bb9481615142784b0c9778079a87ae1bca7a0fe8aabfc088aa9f/pyodbc-5.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d997d3b6551273647825c734158ca8a6f682df269f6b3975f2499c01577ddec", size = 71825 }, { url = "https://files.pythonhosted.org/packages/8b/82/cf71ae99b511a7f20c380ce470de233a0291fa3798afa74e0adc8fad1675/pyodbc-5.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5102007a8c78dd2fc1c1b6f6147de8cfc020f81013e4b46c33e66aaa7d1bf7b1", size = 342304 }, { url = "https://files.pythonhosted.org/packages/43/ea/03fe042f4a390df05e753ddd21c6cab006baae1eee71ce230f6e2a883944/pyodbc-5.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e3cbc7075a46c411b531ada557c4aef13d034060a70077717124cabc1717e2d", size = 346186 }, { url = "https://files.pythonhosted.org/packages/f9/80/48178bb50990147adb72ec9e377e94517a0dfaf2f2a6e3fe477d9a33671f/pyodbc-5.2.0-cp311-cp311-win32.whl", hash = "sha256:de1ee7ec2eb326b7be5e2c4ce20d472c5ef1a6eb838d126d1d26779ff5486e49", size = 62418 }, { url = "https://files.pythonhosted.org/packages/7c/6b/f0ad7d8a535d58f35f375ffbf367c68d0ec54452a431d23b0ebee4cd44c6/pyodbc-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:113f904b9852c12f10c7a3288f5a3563ecdbbefe3ccc829074a9eb8255edcd29", size = 68871 }, { url = "https://files.pythonhosted.org/packages/26/26/104525b728fedfababd3143426b9d0008c70f0d604a3bf5d4773977d83f4/pyodbc-5.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be43d1ece4f2cf4d430996689d89a1a15aeb3a8da8262527e5ced5aee27e89c3", size = 73014 }, { url = "https://files.pythonhosted.org/packages/4f/7d/bb632488b603bcd2a6753b858e8bc7dd56146dd19bd72003cc09ae6e3fc0/pyodbc-5.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9f7badd0055221a744d76c11440c0856fd2846ed53b6555cf8f0a8893a3e4b03", size = 72515 }, { url = "https://files.pythonhosted.org/packages/ab/38/a1b9bfe5a7062672268553c2d6ff93676173b0fb4bd583e8c4f74a0e296f/pyodbc-5.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad633c52f4f4e7691daaa2278d6e6ebb2fe4ae7709e610e22c7dd1a1d620cf8b", size = 348561 }, { url = "https://files.pythonhosted.org/packages/71/82/ddb1c41c682550116f391aa6cab2052910046a30d63014bbe6d09c4958f4/pyodbc-5.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97d086a8f7a302b74c9c2e77bedf954a603b19168af900d4d3a97322e773df63", size = 353962 }, { url = "https://files.pythonhosted.org/packages/e5/29/fec0e739d0c1cab155843ed71d0717f5e1694effe3f28d397168f48bcd92/pyodbc-5.2.0-cp312-cp312-win32.whl", hash = "sha256:0e4412f8e608db2a4be5bcc75f9581f386ed6a427dbcb5eac795049ba6fc205e", size = 63050 }, { url = "https://files.pythonhosted.org/packages/21/7f/3a47e022a97b017ffb73351a1061e4401bcb5aa4fc0162d04f4e5452e4fc/pyodbc-5.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:b1f5686b142759c5b2bdbeaa0692622c2ebb1f10780eb3c174b85f5607fbcf55", size = 69485 }, { url = "https://files.pythonhosted.org/packages/90/be/e5f8022ec57a7ea6aa3717a3f307a44c3b012fce7ad6ec91aad3e2a56978/pyodbc-5.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:26844d780045bbc3514d5c2f0d89e7fda7df7db0bd24292eb6902046f5730885", size = 72982 }, { url = "https://files.pythonhosted.org/packages/5c/0e/71111e4f53936b0b99731d9b6acfc8fc95660533a1421447a63d6e519112/pyodbc-5.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:26d2d8fd53b71204c755abc53b0379df4e23fd9a40faf211e1cb87e8a32470f0", size = 72515 }, { url = "https://files.pythonhosted.org/packages/a5/09/3c06bbc1ebb9ae15f53cefe10774809b67da643883287ba1c44ba053816a/pyodbc-5.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a27996b6d27e275dfb5fe8a34087ba1cacadfd1439e636874ef675faea5149d9", size = 347470 }, { url = "https://files.pythonhosted.org/packages/a4/35/1c7efd4665e7983169d20175014f68578e0edfcbc4602b0bafcefa522c4a/pyodbc-5.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf42c4bd323b8fd01f1cd900cca2d09232155f9b8f0b9bcd0be66763588ce64", size = 353025 }, { url = "https://files.pythonhosted.org/packages/6d/c9/736d07fa33572abdc50d858fd9e527d2c8281f3acbb90dff4999a3662edd/pyodbc-5.2.0-cp313-cp313-win32.whl", hash = "sha256:207f16b7e9bf09c591616429ebf2b47127e879aad21167ac15158910dc9bbcda", size = 63052 }, { url = "https://files.pythonhosted.org/packages/73/2a/3219c8b7fa3788fc9f27b5fc2244017223cf070e5ab370f71c519adf9120/pyodbc-5.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:96d3127f28c0dacf18da7ae009cd48eac532d3dcc718a334b86a3c65f6a5ef5c", size = 69486 }, { url = "https://files.pythonhosted.org/packages/7c/1a/bec4dd9f65a7c0c1a75641119351f0f402c721bbcea3c4eb684868259467/pyodbc-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e8f4ee2c523bbe85124540ffad62a3b62ae481f012e390ef93e0602b6302e5e", size = 72440 }, { url = "https://files.pythonhosted.org/packages/df/2f/62cce82e4547dc8c4ac3174403e24ed31bdb10bb69ad30e1bb362b960877/pyodbc-5.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:057b8ede91b21d9f0ef58210d1ca1aad704e641ca68ac6b02f109d86b61d7402", size = 71902 }, { url = "https://files.pythonhosted.org/packages/f0/1a/54d9595f0471c15b1de4766ec3436763aeef980740d484d629afa778c506/pyodbc-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f0ecbc7067467df95c9b8bd38fb2682c4a13a3402d77dccaddf1e145cea8cc0", size = 329596 }, { url = "https://files.pythonhosted.org/packages/2d/3a/88bc3bb8c15aefaf98bfadd51dae2fe492486daeb04911d8cf0a6d8dd884/pyodbc-5.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b7f8324fa01c09fe4843ad8adb0b131299ef263a1fb9e63830c9cd1d5c45e4", size = 333575 }, { url = "https://files.pythonhosted.org/packages/60/75/aedf6d10f66b22302dc3f0181cbef0cc5789f2c2a658343f10ae72f51190/pyodbc-5.2.0-cp39-cp39-win32.whl", hash = "sha256:600ef6f562f609f5612ffaa8a93827249150aa3030c867937c87b24a1608967e", size = 62379 }, { url = "https://files.pythonhosted.org/packages/d9/9c/b1e367b07904a52f22b8707979bcbda1b5a6056c46e67e0a66241d8138aa/pyodbc-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:b77556349746fb90416a48bd114cd7323f7e2559a4b263dada935f9b406ba59b", size = 68951 }, ] [[package]] name = "pyright" version = "1.1.397" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/92/23/cefa10c9cb198e0858ed0b9233371d62bca880337f628e58f50dfdfb12f0/pyright-1.1.397.tar.gz", hash = "sha256:07530fd65a449e4b0b28dceef14be0d8e0995a7a5b1bb2f3f897c3e548451ce3", size = 3818998 } wheels = [ { url = "https://files.pythonhosted.org/packages/01/b5/98ec41e1e0ad5576ecd42c90ec363560f7b389a441722ea3c7207682dec7/pyright-1.1.397-py3-none-any.whl", hash = "sha256:2e93fba776e714a82b085d68f8345b01f91ba43e1ab9d513e79b70fc85906257", size = 5693631 }, ] [[package]] name = "pytest" version = "8.3.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "iniconfig" }, { name = "packaging" }, { name = "pluggy" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } wheels = [ { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, ] [[package]] name = "pytest-asyncio" version = "0.25.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f2/a8/ecbc8ede70921dd2f544ab1cadd3ff3bf842af27f87bbdea774c7baa1d38/pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a", size = 54239 } wheels = [ { url = "https://files.pythonhosted.org/packages/67/17/3493c5624e48fd97156ebaec380dcaafee9506d7e2c46218ceebbb57d7de/pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3", size = 19467 }, ] [[package]] name = "pytest-click" version = "1.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "pytest" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ec/ec/bca3cd29ba2b025ae41666b851f6ff05fb77cb4c13719baaeda6a757772a/pytest_click-1.1.0.tar.gz", hash = "sha256:fdd9f6721f877dda021e7c5dc73e70aecd37e5ed23ec6820f8a7b3fd7b4f8d30", size = 5054 } wheels = [ { url = "https://files.pythonhosted.org/packages/72/1a/eb53371999b94b3c995c00117f3a232dbf6f56c7152a52cf3e3777e7d49d/pytest_click-1.1.0-py3-none-any.whl", hash = "sha256:eade4742c2f02c345e78a32534a43e8db04acf98d415090539dacc880b7cd0e9", size = 4110 }, ] [[package]] name = "pytest-cov" version = "6.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pytest" }, ] sdist = { url = "https://files.pythonhosted.org/packages/be/45/9b538de8cef30e17c7b45ef42f538a94889ed6a16f2387a6c89e73220651/pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0", size = 66945 } wheels = [ { url = "https://files.pythonhosted.org/packages/36/3b/48e79f2cd6a61dbbd4807b4ed46cb564b4fd50a76166b1c4ea5c1d9e2371/pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35", size = 22949 }, ] [[package]] name = "pytest-databases" version = "0.11.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docker" }, { name = "filelock" }, { name = "pytest" }, ] sdist = { url = "https://files.pythonhosted.org/packages/47/d4/42ceccede9b34f266549a0fabdb8b754db84603844a2878082996bfbf8e4/pytest_databases-0.11.1.tar.gz", hash = "sha256:03e7f44b272d369fcf393da155debd4f2f989d3c1e50871a7360825834bee80d", size = 183451 } wheels = [ { url = "https://files.pythonhosted.org/packages/f6/d6/35be903fbab9ee61b1ac546b36792a66fadd911ab47436312a53ff575a8a/pytest_databases-0.11.1-py3-none-any.whl", hash = "sha256:a1f06ef0c7a602c50b7824bdbca7f8155dac34e890dfcc9a9fc945b619048b77", size = 25792 }, ] [[package]] name = "pytest-lazy-fixtures" version = "1.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] sdist = { url = "https://files.pythonhosted.org/packages/11/b9/81537997a5f887f8b9a379df0925ca53d7718bcbb7d1c754bd9699499ec4/pytest_lazy_fixtures-1.1.2.tar.gz", hash = "sha256:14ba4154dab52066c1c4d6547cc40bbf60726445cb16f1d2dcc9485cdf38964d", size = 7231 } wheels = [ { url = "https://files.pythonhosted.org/packages/f8/c8/280dac9595059c3703f6c0aff7bfd7f0c6b39cdafd437bf3c37047193d6a/pytest_lazy_fixtures-1.1.2-py3-none-any.whl", hash = "sha256:e04c164d2caa5a6b9c846fa7559aad2ce0980a944daf4b75579f39920ae961e4", size = 7012 }, ] [[package]] name = "pytest-mock" version = "3.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c6/90/a955c3ab35ccd41ad4de556596fa86685bf4fc5ffcc62d22d856cfd4e29a/pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0", size = 32814 } wheels = [ { url = "https://files.pythonhosted.org/packages/f2/3b/b26f90f74e2986a82df6e7ac7e319b8ea7ccece1caec9f8ab6104dc70603/pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", size = 9863 }, ] [[package]] name = "pytest-rerunfailures" version = "15.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, { name = "pytest" }, ] sdist = { url = "https://files.pythonhosted.org/packages/26/47/ec4e12f45f4b9fac027a41ccaabb353ed4f23695aae860258ba11a84ed9b/pytest-rerunfailures-15.0.tar.gz", hash = "sha256:2d9ac7baf59f4c13ac730b47f6fa80e755d1ba0581da45ce30b72fb3542b4474", size = 21816 } wheels = [ { url = "https://files.pythonhosted.org/packages/89/37/54e5ffc7c0cebee7cf30a3ac5915faa7e7abf8bdfdf3228c277f7c192489/pytest_rerunfailures-15.0-py3-none-any.whl", hash = "sha256:dd150c4795c229ef44320adc9a0c0532c51b78bb7a6843a8c53556b9a611df1a", size = 13017 }, ] [[package]] name = "pytest-sugar" version = "1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, { name = "pytest" }, { name = "termcolor" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f5/ac/5754f5edd6d508bc6493bc37d74b928f102a5fff82d9a80347e180998f08/pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a", size = 14992 } wheels = [ { url = "https://files.pythonhosted.org/packages/92/fb/889f1b69da2f13691de09a111c16c4766a433382d44aa0ecf221deded44a/pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd", size = 10171 }, ] [[package]] name = "pytest-xdist" version = "3.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "execnet" }, { name = "pytest" }, ] sdist = { url = "https://files.pythonhosted.org/packages/41/c4/3c310a19bc1f1e9ef50075582652673ef2bfc8cd62afef9585683821902f/pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d", size = 84060 } wheels = [ { url = "https://files.pythonhosted.org/packages/6d/82/1d96bf03ee4c0fdc3c0cbe61470070e659ca78dc0086fb88b66c185e2449/pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7", size = 46108 }, ] [[package]] name = "python-dateutil" version = "2.9.0.post0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, ] [[package]] name = "python-dotenv" version = "1.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } wheels = [ { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, ] [[package]] name = "python-multipart" version = "0.0.20" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, ] [[package]] name = "pytz" version = "2025.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/5f/57/df1c9157c8d5a05117e455d66fd7cf6dbc46974f832b1058ed4856785d8a/pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e", size = 319617 } wheels = [ { url = "https://files.pythonhosted.org/packages/eb/38/ac33370d784287baa1c3d538978b5e2ea064d4c1b93ffbd12826c190dd10/pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57", size = 507930 }, ] [[package]] name = "pywin32" version = "310" source = { registry = "https://pypi.org/simple" } wheels = [ { url = "https://files.pythonhosted.org/packages/95/da/a5f38fffbba2fb99aa4aa905480ac4b8e83ca486659ac8c95bce47fb5276/pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1", size = 8848240 }, { url = "https://files.pythonhosted.org/packages/aa/fe/d873a773324fa565619ba555a82c9dabd677301720f3660a731a5d07e49a/pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d", size = 9601854 }, { url = "https://files.pythonhosted.org/packages/3c/84/1a8e3d7a15490d28a5d816efa229ecb4999cdc51a7c30dd8914f669093b8/pywin32-310-cp310-cp310-win_arm64.whl", hash = "sha256:33babed0cf0c92a6f94cc6cc13546ab24ee13e3e800e61ed87609ab91e4c8213", size = 8522963 }, { url = "https://files.pythonhosted.org/packages/f7/b1/68aa2986129fb1011dabbe95f0136f44509afaf072b12b8f815905a39f33/pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd", size = 8784284 }, { url = "https://files.pythonhosted.org/packages/b3/bd/d1592635992dd8db5bb8ace0551bc3a769de1ac8850200cfa517e72739fb/pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c", size = 9520748 }, { url = "https://files.pythonhosted.org/packages/90/b1/ac8b1ffce6603849eb45a91cf126c0fa5431f186c2e768bf56889c46f51c/pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582", size = 8455941 }, { url = "https://files.pythonhosted.org/packages/6b/ec/4fdbe47932f671d6e348474ea35ed94227fb5df56a7c30cbbb42cd396ed0/pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d", size = 8796239 }, { url = "https://files.pythonhosted.org/packages/e3/e5/b0627f8bb84e06991bea89ad8153a9e50ace40b2e1195d68e9dff6b03d0f/pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060", size = 9503839 }, { url = "https://files.pythonhosted.org/packages/1f/32/9ccf53748df72301a89713936645a664ec001abd35ecc8578beda593d37d/pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966", size = 8459470 }, { url = "https://files.pythonhosted.org/packages/1c/09/9c1b978ffc4ae53999e89c19c77ba882d9fce476729f23ef55211ea1c034/pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab", size = 8794384 }, { url = "https://files.pythonhosted.org/packages/45/3c/b4640f740ffebadd5d34df35fecba0e1cfef8fde9f3e594df91c28ad9b50/pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e", size = 9503039 }, { url = "https://files.pythonhosted.org/packages/b4/f4/f785020090fb050e7fb6d34b780f2231f302609dc964672f72bfaeb59a28/pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33", size = 8458152 }, { url = "https://files.pythonhosted.org/packages/a2/cd/d09d434630edb6a0c44ad5079611279a67530296cfe0451e003de7f449ff/pywin32-310-cp39-cp39-win32.whl", hash = "sha256:851c8d927af0d879221e616ae1f66145253537bbdd321a77e8ef701b443a9a1a", size = 8848099 }, { url = "https://files.pythonhosted.org/packages/93/ff/2a8c10315ffbdee7b3883ac0d1667e267ca8b3f6f640d81d43b87a82c0c7/pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475", size = 9602031 }, ] [[package]] name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } wheels = [ { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777 }, { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318 }, { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891 }, { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614 }, { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360 }, { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006 }, { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577 }, { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593 }, { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312 }, ] [[package]] name = "questionary" version = "2.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "prompt-toolkit" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a8/b8/d16eb579277f3de9e56e5ad25280fab52fc5774117fb70362e8c2e016559/questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587", size = 26775 } wheels = [ { url = "https://files.pythonhosted.org/packages/ad/3f/11dd4cd4f39e05128bfd20138faea57bec56f9ffba6185d276e3107ba5b2/questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec", size = 36747 }, ] [[package]] name = "requests" version = "2.32.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "charset-normalizer" }, { name = "idna" }, { name = "urllib3" }, ] sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } wheels = [ { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, ] [[package]] name = "rich" version = "13.9.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 } wheels = [ { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 }, ] [[package]] name = "rich-click" version = "1.8.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "rich" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a6/7a/4b78c5997f2a799a8c5c07f3b2145bbcda40115c4d35c76fbadd418a3c89/rich_click-1.8.8.tar.gz", hash = "sha256:547c618dea916620af05d4a6456da797fbde904c97901f44d2f32f89d85d6c84", size = 39066 } wheels = [ { url = "https://files.pythonhosted.org/packages/fa/69/963f0bf44a654f6465bdb66fb5a91051b0d7af9f742b5bd7202607165036/rich_click-1.8.8-py3-none-any.whl", hash = "sha256:205aabd5a98e64ab2c105dee9e368be27480ba004c7dfa2accd0ed44f9f1550e", size = 35747 }, ] [[package]] name = "rich-toolkit" version = "0.13.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "rich" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5b/8a/71cfbf6bf6257ea785d1f030c22468f763eea1b3e5417620f2ba9abd6dca/rich_toolkit-0.13.2.tar.gz", hash = "sha256:fea92557530de7c28f121cbed572ad93d9e0ddc60c3ca643f1b831f2f56b95d3", size = 72288 } wheels = [ { url = "https://files.pythonhosted.org/packages/7e/1b/1c2f43af46456050b27810a7a013af8a7e12bc545a0cdc00eb0df55eb769/rich_toolkit-0.13.2-py3-none-any.whl", hash = "sha256:f3f6c583e5283298a2f7dbd3c65aca18b7f818ad96174113ab5bec0b0e35ed61", size = 13566 }, ] [[package]] name = "roman-numerals-py" version = "3.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017 } wheels = [ { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742 }, ] [[package]] name = "rsa" version = "4.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyasn1" }, ] sdist = { url = "https://files.pythonhosted.org/packages/aa/65/7d973b89c4d2351d7fb232c2e452547ddfa243e93131e7cfa766da627b52/rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21", size = 29711 } wheels = [ { url = "https://files.pythonhosted.org/packages/49/97/fa78e3d2f65c02c8e1268b9aba606569fe97f6c8f7c2d74394553347c145/rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", size = 34315 }, ] [[package]] name = "ruamel-yaml" version = "0.18.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ruamel-yaml-clib", marker = "python_full_version < '3.13' and platform_python_implementation == 'CPython'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ea/46/f44d8be06b85bc7c4d8c95d658be2b68f27711f279bf9dd0612a5e4794f5/ruamel.yaml-0.18.10.tar.gz", hash = "sha256:20c86ab29ac2153f80a428e1254a8adf686d3383df04490514ca3b79a362db58", size = 143447 } wheels = [ { url = "https://files.pythonhosted.org/packages/c2/36/dfc1ebc0081e6d39924a2cc53654497f967a084a436bb64402dfce4254d9/ruamel.yaml-0.18.10-py3-none-any.whl", hash = "sha256:30f22513ab2301b3d2b577adc121c6471f28734d3d9728581245f1e76468b4f1", size = 117729 }, ] [[package]] name = "ruamel-yaml-clib" version = "0.2.12" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/20/84/80203abff8ea4993a87d823a5f632e4d92831ef75d404c9fc78d0176d2b5/ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f", size = 225315 } wheels = [ { url = "https://files.pythonhosted.org/packages/70/57/40a958e863e299f0c74ef32a3bde9f2d1ea8d69669368c0c502a0997f57f/ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5", size = 131301 }, { url = "https://files.pythonhosted.org/packages/98/a8/29a3eb437b12b95f50a6bcc3d7d7214301c6c529d8fdc227247fa84162b5/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969", size = 633728 }, { url = "https://files.pythonhosted.org/packages/35/6d/ae05a87a3ad540259c3ad88d71275cbd1c0f2d30ae04c65dcbfb6dcd4b9f/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df", size = 722230 }, { url = "https://files.pythonhosted.org/packages/7f/b7/20c6f3c0b656fe609675d69bc135c03aac9e3865912444be6339207b6648/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76", size = 686712 }, { url = "https://files.pythonhosted.org/packages/cd/11/d12dbf683471f888d354dac59593873c2b45feb193c5e3e0f2ebf85e68b9/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6", size = 663936 }, { url = "https://files.pythonhosted.org/packages/72/14/4c268f5077db5c83f743ee1daeb236269fa8577133a5cfa49f8b382baf13/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd", size = 696580 }, { url = "https://files.pythonhosted.org/packages/30/fc/8cd12f189c6405a4c1cf37bd633aa740a9538c8e40497c231072d0fef5cf/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a", size = 663393 }, { url = "https://files.pythonhosted.org/packages/80/29/c0a017b704aaf3cbf704989785cd9c5d5b8ccec2dae6ac0c53833c84e677/ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da", size = 100326 }, { url = "https://files.pythonhosted.org/packages/3a/65/fa39d74db4e2d0cd252355732d966a460a41cd01c6353b820a0952432839/ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28", size = 118079 }, { url = "https://files.pythonhosted.org/packages/fb/8f/683c6ad562f558cbc4f7c029abcd9599148c51c54b5ef0f24f2638da9fbb/ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6", size = 132224 }, { url = "https://files.pythonhosted.org/packages/3c/d2/b79b7d695e2f21da020bd44c782490578f300dd44f0a4c57a92575758a76/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e", size = 641480 }, { url = "https://files.pythonhosted.org/packages/68/6e/264c50ce2a31473a9fdbf4fa66ca9b2b17c7455b31ef585462343818bd6c/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e", size = 739068 }, { url = "https://files.pythonhosted.org/packages/86/29/88c2567bc893c84d88b4c48027367c3562ae69121d568e8a3f3a8d363f4d/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52", size = 703012 }, { url = "https://files.pythonhosted.org/packages/11/46/879763c619b5470820f0cd6ca97d134771e502776bc2b844d2adb6e37753/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642", size = 704352 }, { url = "https://files.pythonhosted.org/packages/02/80/ece7e6034256a4186bbe50dee28cd032d816974941a6abf6a9d65e4228a7/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2", size = 737344 }, { url = "https://files.pythonhosted.org/packages/f0/ca/e4106ac7e80efbabdf4bf91d3d32fc424e41418458251712f5672eada9ce/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3", size = 714498 }, { url = "https://files.pythonhosted.org/packages/67/58/b1f60a1d591b771298ffa0428237afb092c7f29ae23bad93420b1eb10703/ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4", size = 100205 }, { url = "https://files.pythonhosted.org/packages/b4/4f/b52f634c9548a9291a70dfce26ca7ebce388235c93588a1068028ea23fcc/ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb", size = 118185 }, { url = "https://files.pythonhosted.org/packages/48/41/e7a405afbdc26af961678474a55373e1b323605a4f5e2ddd4a80ea80f628/ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632", size = 133433 }, { url = "https://files.pythonhosted.org/packages/ec/b0/b850385604334c2ce90e3ee1013bd911aedf058a934905863a6ea95e9eb4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d", size = 647362 }, { url = "https://files.pythonhosted.org/packages/44/d0/3f68a86e006448fb6c005aee66565b9eb89014a70c491d70c08de597f8e4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c", size = 754118 }, { url = "https://files.pythonhosted.org/packages/52/a9/d39f3c5ada0a3bb2870d7db41901125dbe2434fa4f12ca8c5b83a42d7c53/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd", size = 706497 }, { url = "https://files.pythonhosted.org/packages/b0/fa/097e38135dadd9ac25aecf2a54be17ddf6e4c23e43d538492a90ab3d71c6/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31", size = 698042 }, { url = "https://files.pythonhosted.org/packages/ec/d5/a659ca6f503b9379b930f13bc6b130c9f176469b73b9834296822a83a132/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680", size = 745831 }, { url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d", size = 715692 }, { url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5", size = 98777 }, { url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4", size = 115523 }, { url = "https://files.pythonhosted.org/packages/29/00/4864119668d71a5fa45678f380b5923ff410701565821925c69780356ffa/ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a", size = 132011 }, { url = "https://files.pythonhosted.org/packages/7f/5e/212f473a93ae78c669ffa0cb051e3fee1139cb2d385d2ae1653d64281507/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475", size = 642488 }, { url = "https://files.pythonhosted.org/packages/1f/8f/ecfbe2123ade605c49ef769788f79c38ddb1c8fa81e01f4dbf5cf1a44b16/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef", size = 745066 }, { url = "https://files.pythonhosted.org/packages/e2/a9/28f60726d29dfc01b8decdb385de4ced2ced9faeb37a847bd5cf26836815/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6", size = 701785 }, { url = "https://files.pythonhosted.org/packages/84/7e/8e7ec45920daa7f76046578e4f677a3215fe8f18ee30a9cb7627a19d9b4c/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf", size = 693017 }, { url = "https://files.pythonhosted.org/packages/c5/b3/d650eaade4ca225f02a648321e1ab835b9d361c60d51150bac49063b83fa/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1", size = 741270 }, { url = "https://files.pythonhosted.org/packages/87/b8/01c29b924dcbbed75cc45b30c30d565d763b9c4d540545a0eeecffb8f09c/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01", size = 709059 }, { url = "https://files.pythonhosted.org/packages/30/8c/ed73f047a73638257aa9377ad356bea4d96125b305c34a28766f4445cc0f/ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6", size = 98583 }, { url = "https://files.pythonhosted.org/packages/b0/85/e8e751d8791564dd333d5d9a4eab0a7a115f7e349595417fd50ecae3395c/ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3", size = 115190 }, { url = "https://files.pythonhosted.org/packages/e5/46/ccdef7a84ad745c37cb3d9a81790f28fbc9adf9c237dba682017b123294e/ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987", size = 131834 }, { url = "https://files.pythonhosted.org/packages/29/09/932360f30ad1b7b79f08757e0a6fb8c5392a52cdcc182779158fe66d25ac/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45", size = 636120 }, { url = "https://files.pythonhosted.org/packages/a2/2a/5b27602e7a4344c1334e26bf4739746206b7a60a8acdba33a61473468b73/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519", size = 724914 }, { url = "https://files.pythonhosted.org/packages/da/1c/23497017c554fc06ff5701b29355522cff850f626337fff35d9ab352cb18/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7", size = 689072 }, { url = "https://files.pythonhosted.org/packages/68/e6/f3d4ff3223f9ea49c3b7169ec0268e42bd49f87c70c0e3e853895e4a7ae2/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285", size = 667091 }, { url = "https://files.pythonhosted.org/packages/84/62/ead07043527642491e5011b143f44b81ef80f1025a96069b7210e0f2f0f3/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed", size = 699111 }, { url = "https://files.pythonhosted.org/packages/52/b3/fe4d84446f7e4887e3bea7ceff0a7df23790b5ed625f830e79ace88ebefb/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7", size = 666365 }, { url = "https://files.pythonhosted.org/packages/6e/b3/7feb99a00bfaa5c6868617bb7651308afde85e5a0b23cd187fe5de65feeb/ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12", size = 100863 }, { url = "https://files.pythonhosted.org/packages/93/07/de635108684b7a5bb06e432b0930c5a04b6c59efe73bd966d8db3cc208f2/ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b", size = 118653 }, ] [[package]] name = "ruff" version = "0.11.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/77/2b/7ca27e854d92df5e681e6527dc0f9254c9dc06c8408317893cf96c851cdd/ruff-0.11.0.tar.gz", hash = "sha256:e55c620690a4a7ee6f1cccb256ec2157dc597d109400ae75bbf944fc9d6462e2", size = 3799407 } wheels = [ { url = "https://files.pythonhosted.org/packages/48/40/3d0340a9e5edc77d37852c0cd98c5985a5a8081fc3befaeb2ae90aaafd2b/ruff-0.11.0-py3-none-linux_armv6l.whl", hash = "sha256:dc67e32bc3b29557513eb7eeabb23efdb25753684b913bebb8a0c62495095acb", size = 10098158 }, { url = "https://files.pythonhosted.org/packages/ec/a9/d8f5abb3b87b973b007649ac7bf63665a05b2ae2b2af39217b09f52abbbf/ruff-0.11.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38c23fd9bdec4eb437b4c1e3595905a0a8edfccd63a790f818b28c78fe345639", size = 10879071 }, { url = "https://files.pythonhosted.org/packages/ab/62/aaa198614c6211677913ec480415c5e6509586d7b796356cec73a2f8a3e6/ruff-0.11.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7c8661b0be91a38bd56db593e9331beaf9064a79028adee2d5f392674bbc5e88", size = 10247944 }, { url = "https://files.pythonhosted.org/packages/9f/52/59e0a9f2cf1ce5e6cbe336b6dd0144725c8ea3b97cac60688f4e7880bf13/ruff-0.11.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6c0e8d3d2db7e9f6efd884f44b8dc542d5b6b590fc4bb334fdbc624d93a29a2", size = 10421725 }, { url = "https://files.pythonhosted.org/packages/a6/c3/dcd71acc6dff72ce66d13f4be5bca1dbed4db678dff2f0f6f307b04e5c02/ruff-0.11.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c3156d3f4b42e57247275a0a7e15a851c165a4fc89c5e8fa30ea6da4f7407b8", size = 9954435 }, { url = "https://files.pythonhosted.org/packages/a6/9a/342d336c7c52dbd136dee97d4c7797e66c3f92df804f8f3b30da59b92e9c/ruff-0.11.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:490b1e147c1260545f6d041c4092483e3f6d8eba81dc2875eaebcf9140b53905", size = 11492664 }, { url = "https://files.pythonhosted.org/packages/84/35/6e7defd2d7ca95cc385ac1bd9f7f2e4a61b9cc35d60a263aebc8e590c462/ruff-0.11.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1bc09a7419e09662983b1312f6fa5dab829d6ab5d11f18c3760be7ca521c9329", size = 12207856 }, { url = "https://files.pythonhosted.org/packages/22/78/da669c8731bacf40001c880ada6d31bcfb81f89cc996230c3b80d319993e/ruff-0.11.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfa478daf61ac8002214eb2ca5f3e9365048506a9d52b11bea3ecea822bb844", size = 11645156 }, { url = "https://files.pythonhosted.org/packages/ee/47/e27d17d83530a208f4a9ab2e94f758574a04c51e492aa58f91a3ed7cbbcb/ruff-0.11.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fbb2aed66fe742a6a3a0075ed467a459b7cedc5ae01008340075909d819df1e", size = 13884167 }, { url = "https://files.pythonhosted.org/packages/9f/5e/42ffbb0a5d4b07bbc642b7d58357b4e19a0f4774275ca6ca7d1f7b5452cd/ruff-0.11.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92c0c1ff014351c0b0cdfdb1e35fa83b780f1e065667167bb9502d47ca41e6db", size = 11348311 }, { url = "https://files.pythonhosted.org/packages/c8/51/dc3ce0c5ce1a586727a3444a32f98b83ba99599bb1ebca29d9302886e87f/ruff-0.11.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e4fd5ff5de5f83e0458a138e8a869c7c5e907541aec32b707f57cf9a5e124445", size = 10305039 }, { url = "https://files.pythonhosted.org/packages/60/e0/475f0c2f26280f46f2d6d1df1ba96b3399e0234cf368cc4c88e6ad10dcd9/ruff-0.11.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:96bc89a5c5fd21a04939773f9e0e276308be0935de06845110f43fd5c2e4ead7", size = 9937939 }, { url = "https://files.pythonhosted.org/packages/e2/d3/3e61b7fd3e9cdd1e5b8c7ac188bec12975c824e51c5cd3d64caf81b0331e/ruff-0.11.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a9352b9d767889ec5df1483f94870564e8102d4d7e99da52ebf564b882cdc2c7", size = 10923259 }, { url = "https://files.pythonhosted.org/packages/30/32/cd74149ebb40b62ddd14bd2d1842149aeb7f74191fb0f49bd45c76909ff2/ruff-0.11.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:049a191969a10897fe052ef9cc7491b3ef6de79acd7790af7d7897b7a9bfbcb6", size = 11406212 }, { url = "https://files.pythonhosted.org/packages/00/ef/033022a6b104be32e899b00de704d7c6d1723a54d4c9e09d147368f14b62/ruff-0.11.0-py3-none-win32.whl", hash = "sha256:3191e9116b6b5bbe187447656f0c8526f0d36b6fd89ad78ccaad6bdc2fad7df2", size = 10310905 }, { url = "https://files.pythonhosted.org/packages/ed/8a/163f2e78c37757d035bd56cd60c8d96312904ca4a6deeab8442d7b3cbf89/ruff-0.11.0-py3-none-win_amd64.whl", hash = "sha256:c58bfa00e740ca0a6c43d41fb004cd22d165302f360aaa56f7126d544db31a21", size = 11411730 }, { url = "https://files.pythonhosted.org/packages/4e/f7/096f6efabe69b49d7ca61052fc70289c05d8d35735c137ef5ba5ef423662/ruff-0.11.0-py3-none-win_arm64.whl", hash = "sha256:868364fc23f5aa122b00c6f794211e85f7e78f5dffdf7c590ab90b8c4e69b657", size = 10538956 }, ] [[package]] name = "sanic" version = "24.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, { name = "html5tagger" }, { name = "httptools" }, { name = "multidict" }, { name = "sanic-routing" }, { name = "setuptools" }, { name = "tracerite" }, { name = "typing-extensions" }, { name = "ujson", marker = "implementation_name == 'cpython' and sys_platform != 'win32'" }, { name = "uvloop", marker = "implementation_name == 'cpython' and sys_platform != 'win32'" }, { name = "websockets" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5c/d6/8761c9df89392c4330c123f1ecc85cfb3884522f3a58e86a7b1809b2d608/sanic-24.12.0.tar.gz", hash = "sha256:09c23aa917616c1e60e44c66dfd7582cb9fd6503f78298c309945909f5839836", size = 351866 } wheels = [ { url = "https://files.pythonhosted.org/packages/f0/f3/82f38246e8af7976f59f7913a00c8e84325ddb58e05312ab530ce381fdbf/sanic-24.12.0-py3-none-any.whl", hash = "sha256:3c2a01ec0b6c5926e3efe34eac1b497d31ed989038fe213eb25ad0c98687d388", size = 245162 }, ] [package.optional-dependencies] ext = [ { name = "sanic-ext" }, ] [[package]] name = "sanic-ext" version = "24.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyyaml" }, ] sdist = { url = "https://files.pythonhosted.org/packages/43/c6/f5f87268e72825e3cd39c5b833996a2ac47f98b888f4253c5830afebd057/sanic_ext-24.12.0.tar.gz", hash = "sha256:8f912f4c29f242bc638346d09b79f0c8896ff64e79bd0e7fa09eac4b6c0e23c8", size = 66209 } wheels = [ { url = "https://files.pythonhosted.org/packages/f4/3f/4c23be085bce45defd3863cbc707227fc82f49e7d9a5e1bb2656e2e1a2ed/sanic_ext-24.12.0-py3-none-any.whl", hash = "sha256:861f809f071770cf28acd5f13e97ed59985e07361b13b4b4540da1333730c83e", size = 96445 }, ] [[package]] name = "sanic-routing" version = "23.12.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d1/5c/2a7edd14fbccca3719a8d680951d4b25f986752c781c61ccf156a6d1ebff/sanic-routing-23.12.0.tar.gz", hash = "sha256:1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04", size = 29473 } wheels = [ { url = "https://files.pythonhosted.org/packages/cf/e3/3425c9a8773807ac2c01d6a56c8521733f09b627e5827e733c5cd36b9ac5/sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73", size = 25522 }, ] [[package]] name = "sanic-testing" version = "24.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b2/56/8d31d8a7e0b61633d6358694edfae976e69739b5bd640ceac7989b62e749/sanic_testing-24.6.0.tar.gz", hash = "sha256:7591ce537e2a651efb6dc01b458e7e4ea5347f6d91438676774c6f505a124731", size = 10871 } wheels = [ { url = "https://files.pythonhosted.org/packages/0c/93/1d588f1cb9b710b9f22fa78b53d699a8062edc94204d50dd0d78c5f5b495/sanic_testing-24.6.0-py3-none-any.whl", hash = "sha256:b1027184735e88230891aa0461fff84093abfa3bff0f4d29c0f78f42e59efada", size = 10326 }, ] [[package]] name = "setuptools" version = "77.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ea/df/9f719dc48f64284be8bd99e2e0bb0dd6e9f8e2c2c3c7bf7a685bc5adf2c7/setuptools-77.0.1.tar.gz", hash = "sha256:a1246a1b4178c66d7cf50c9fc6d530fac3f89bc284cf803c7fa878c41b1a03b2", size = 1366225 } wheels = [ { url = "https://files.pythonhosted.org/packages/40/50/bc3d02829a3babd70b7f1414c93cf6acd198976f0469a07d0e7b813c5002/setuptools-77.0.1-py3-none-any.whl", hash = "sha256:81a234dff81a82bb52e522c8aef145d0dd4de1fd6de4d3b196d0f77dc2fded26", size = 1254282 }, ] [[package]] name = "shellingham" version = "1.5.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } wheels = [ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, ] [[package]] name = "shibuya" version = "2025.2.28" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f3/8f/3c03c525fe8209ba777dc3f03115f4a8b2940f3f040e7e6f889acfc41003/shibuya-2025.2.28.tar.gz", hash = "sha256:ed76641d030cc70e4079c002cf0feb190e868b211ba0ebbd37f07ba394a62c3b", size = 80558 } wheels = [ { url = "https://files.pythonhosted.org/packages/3e/b1/9f9d4ca3ac7a43440ad9fe65127f8958e7add90a962b6838bdff7198dd5b/shibuya-2025.2.28-py3-none-any.whl", hash = "sha256:7bd78164db93d793865d04d58c278e36caf36fdb97a72b4ef4086bdeaf0c7dd7", size = 96191 }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, ] [[package]] name = "slotscheck" version = "0.19.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4b/57/6fcb8df11e7c76eb87b23bfa931408e47f051c6161749c531b4060a45516/slotscheck-0.19.1.tar.gz", hash = "sha256:6146b7747f8db335a00a66b782f86011b74b995f61746dc5b36a9e77d5326013", size = 16050 } wheels = [ { url = "https://files.pythonhosted.org/packages/da/32/bd569256267f80b76b87d21a09795741a175778b954bee1d7b1a89852b6f/slotscheck-0.19.1-py3-none-any.whl", hash = "sha256:bff9926f8d6408ea21b6c6bbaa4389cea1682962e73ee4f30084b6d2b89260ee", size = 16995 }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, ] [[package]] name = "snowballstemmer" version = "2.2.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699 } wheels = [ { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002 }, ] [[package]] name = "soupsieve" version = "2.6" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 } wheels = [ { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 }, ] [[package]] name = "sphinx" version = "7.4.7" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10'", ] dependencies = [ { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "babel", marker = "python_full_version < '3.10'" }, { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, { name = "docutils", marker = "python_full_version < '3.10'" }, { name = "imagesize", marker = "python_full_version < '3.10'" }, { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "jinja2", marker = "python_full_version < '3.10'" }, { name = "packaging", marker = "python_full_version < '3.10'" }, { name = "pygments", marker = "python_full_version < '3.10'" }, { name = "requests", marker = "python_full_version < '3.10'" }, { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, { name = "tomli", marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911 } wheels = [ { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624 }, ] [[package]] name = "sphinx" version = "8.1.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version == '3.10.*'", ] dependencies = [ { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "babel", marker = "python_full_version == '3.10.*'" }, { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, { name = "docutils", marker = "python_full_version == '3.10.*'" }, { name = "imagesize", marker = "python_full_version == '3.10.*'" }, { name = "jinja2", marker = "python_full_version == '3.10.*'" }, { name = "packaging", marker = "python_full_version == '3.10.*'" }, { name = "pygments", marker = "python_full_version == '3.10.*'" }, { name = "requests", marker = "python_full_version == '3.10.*'" }, { name = "snowballstemmer", marker = "python_full_version == '3.10.*'" }, { name = "sphinxcontrib-applehelp", marker = "python_full_version == '3.10.*'" }, { name = "sphinxcontrib-devhelp", marker = "python_full_version == '3.10.*'" }, { name = "sphinxcontrib-htmlhelp", marker = "python_full_version == '3.10.*'" }, { name = "sphinxcontrib-jsmath", marker = "python_full_version == '3.10.*'" }, { name = "sphinxcontrib-qthelp", marker = "python_full_version == '3.10.*'" }, { name = "sphinxcontrib-serializinghtml", marker = "python_full_version == '3.10.*'" }, { name = "tomli", marker = "python_full_version == '3.10.*'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611 } wheels = [ { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125 }, ] [[package]] name = "sphinx" version = "8.2.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13'", "python_full_version >= '3.11' and python_full_version < '3.13'", ] dependencies = [ { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "babel", marker = "python_full_version >= '3.11'" }, { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, { name = "docutils", marker = "python_full_version >= '3.11'" }, { name = "imagesize", marker = "python_full_version >= '3.11'" }, { name = "jinja2", marker = "python_full_version >= '3.11'" }, { name = "packaging", marker = "python_full_version >= '3.11'" }, { name = "pygments", marker = "python_full_version >= '3.11'" }, { name = "requests", marker = "python_full_version >= '3.11'" }, { name = "roman-numerals-py", marker = "python_full_version >= '3.11'" }, { name = "snowballstemmer", marker = "python_full_version >= '3.11'" }, { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.11'" }, { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.11'" }, { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.11'" }, { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.11'" }, { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.11'" }, { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876 } wheels = [ { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741 }, ] [[package]] name = "sphinx-autobuild" version = "2024.10.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "starlette" }, { name = "uvicorn" }, { name = "watchfiles" }, { name = "websockets" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a5/2c/155e1de2c1ba96a72e5dba152c509a8b41e047ee5c2def9e9f0d812f8be7/sphinx_autobuild-2024.10.3.tar.gz", hash = "sha256:248150f8f333e825107b6d4b86113ab28fa51750e5f9ae63b59dc339be951fb1", size = 14023 } wheels = [ { url = "https://files.pythonhosted.org/packages/18/c0/eba125db38c84d3c74717008fd3cb5000b68cd7e2cbafd1349c6a38c3d3b/sphinx_autobuild-2024.10.3-py3-none-any.whl", hash = "sha256:158e16c36f9d633e613c9aaf81c19b0fc458ca78b112533b20dafcda430d60fa", size = 11908 }, ] [[package]] name = "sphinx-autodoc-typehints" version = "2.3.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10'", ] dependencies = [ { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/74/cd/03e7b917230dc057922130a79ba0240df1693bfd76727ea33fae84b39138/sphinx_autodoc_typehints-2.3.0.tar.gz", hash = "sha256:535c78ed2d6a1bad393ba9f3dfa2602cf424e2631ee207263e07874c38fde084", size = 40709 } wheels = [ { url = "https://files.pythonhosted.org/packages/a0/f3/e0a4ce49da4b6f4e4ce84b3c39a0677831884cb9d8a87ccbf1e9e56e53ac/sphinx_autodoc_typehints-2.3.0-py3-none-any.whl", hash = "sha256:3098e2c6d0ba99eacd013eb06861acc9b51c6e595be86ab05c08ee5506ac0c67", size = 19836 }, ] [[package]] name = "sphinx-autodoc-typehints" version = "3.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version == '3.10.*'", ] dependencies = [ { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/26/f0/43c6a5ff3e7b08a8c3b32f81b859f1b518ccc31e45f22e2b41ced38be7b9/sphinx_autodoc_typehints-3.0.1.tar.gz", hash = "sha256:b9b40dd15dee54f6f810c924f863f9cf1c54f9f3265c495140ea01be7f44fa55", size = 36282 } wheels = [ { url = "https://files.pythonhosted.org/packages/3c/dc/dc46c5c7c566b7ec5e8f860f9c89533bf03c0e6aadc96fb9b337867e4460/sphinx_autodoc_typehints-3.0.1-py3-none-any.whl", hash = "sha256:4b64b676a14b5b79cefb6628a6dc8070e320d4963e8ff640a2f3e9390ae9045a", size = 20245 }, ] [[package]] name = "sphinx-autodoc-typehints" version = "3.1.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13'", "python_full_version >= '3.11' and python_full_version < '3.13'", ] dependencies = [ { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/cb/cc/d38e7260b1bd3af0c84ad8285dfd78236584b74544510584e07963e000ec/sphinx_autodoc_typehints-3.1.0.tar.gz", hash = "sha256:a6b7b0b6df0a380783ce5b29150c2d30352746f027a3e294d37183995d3f23ed", size = 36528 } wheels = [ { url = "https://files.pythonhosted.org/packages/14/2f/bc5bed0677ae00b9ca7919968ea675e2f696b6b20f1648262f26a7a6c6b4/sphinx_autodoc_typehints-3.1.0-py3-none-any.whl", hash = "sha256:67bdee7e27ba943976ce92ebc5647a976a7a08f9f689a826c54617b96a423913", size = 20404 }, ] [[package]] name = "sphinx-click" version = "6.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "docutils" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/db/0a/5b1e8d0579dbb4ca8114e456ca4a68020bfe8e15c7001f3856be4929ab83/sphinx_click-6.0.0.tar.gz", hash = "sha256:f5d664321dc0c6622ff019f1e1c84e58ce0cecfddeb510e004cf60c2a3ab465b", size = 29574 } wheels = [ { url = "https://files.pythonhosted.org/packages/d0/d7/8621c4726ad3f788a1db4c0c409044b16edc563f5c9542807b3724037555/sphinx_click-6.0.0-py3-none-any.whl", hash = "sha256:1e0a3c83bcb7c55497751b19d07ebe56b5d7b85eb76dd399cf9061b497adc317", size = 9922 }, ] [[package]] name = "sphinx-copybutton" version = "0.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fc/2b/a964715e7f5295f77509e59309959f4125122d648f86b4fe7d70ca1d882c/sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd", size = 23039 } wheels = [ { url = "https://files.pythonhosted.org/packages/9e/48/1ea60e74949eecb12cdd6ac43987f9fd331156388dcc2319b45e2ebb81bf/sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e", size = 13343 }, ] [[package]] name = "sphinx-design" version = "0.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/2b/69/b34e0cb5336f09c6866d53b4a19d76c227cdec1bbc7ac4de63ca7d58c9c7/sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632", size = 2193689 } wheels = [ { url = "https://files.pythonhosted.org/packages/c6/43/65c0acbd8cc6f50195a3a1fc195c404988b15c67090e73c7a41a9f57d6bd/sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c", size = 2215338 }, ] [[package]] name = "sphinx-jinja2-compat" version = "0.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinja2" }, { name = "markupsafe" }, { name = "standard-imghdr", marker = "python_full_version >= '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/26/df/27282da6f8c549f765beca9de1a5fc56f9651ed87711a5cac1e914137753/sphinx_jinja2_compat-0.3.0.tar.gz", hash = "sha256:f3c1590b275f42e7a654e081db5e3e5fb97f515608422bde94015ddf795dfe7c", size = 4998 } wheels = [ { url = "https://files.pythonhosted.org/packages/6f/42/2fd09d672eaaa937d6893d8b747d07943f97a6e5e30653aee6ebd339b704/sphinx_jinja2_compat-0.3.0-py3-none-any.whl", hash = "sha256:b1e4006d8e1ea31013fa9946d1b075b0c8d2a42c6e3425e63542c1e9f8be9084", size = 7883 }, ] [[package]] name = "sphinx-paramlinks" version = "0.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docutils" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ae/21/62d3a58ff7bd02bbb9245a63d1f0d2e0455522a11a78951d16088569fca8/sphinx-paramlinks-0.6.0.tar.gz", hash = "sha256:746a0816860aa3fff5d8d746efcbec4deead421f152687411db1d613d29f915e", size = 12363 } [[package]] name = "sphinx-prompt" version = "1.8.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10'", ] dependencies = [ { name = "docutils", marker = "python_full_version < '3.10'" }, { name = "pygments", marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e7/fb/7a07b8df1ca2418147a6b13e3f6b445071f2565198b45efa631d0d6ef0cd/sphinx_prompt-1.8.0.tar.gz", hash = "sha256:47482f86fcec29662fdfd23e7c04ef03582714195d01f5d565403320084372ed", size = 5121 } wheels = [ { url = "https://files.pythonhosted.org/packages/39/49/f890a2668b7cbf375f5528b549c8d36dd2e801b0fbb7b2b5ef65663ecb6c/sphinx_prompt-1.8.0-py3-none-any.whl", hash = "sha256:369ecc633f0711886f9b3a078c83264245be1adf46abeeb9b88b5519e4b51007", size = 7298 }, ] [[package]] name = "sphinx-prompt" version = "1.9.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13'", "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] dependencies = [ { name = "certifi", marker = "python_full_version >= '3.10'" }, { name = "docutils", marker = "python_full_version >= '3.10'" }, { name = "idna", marker = "python_full_version >= '3.10'" }, { name = "pygments", marker = "python_full_version >= '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "urllib3", marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/34/fe/ac4e24f35b5148b31ac717ae7dcc7a2f7ec56eb729e22c7252ed8ad2d9a5/sphinx_prompt-1.9.0.tar.gz", hash = "sha256:471b3c6d466dce780a9b167d9541865fd4e9a80ed46e31b06a52a0529ae995a1", size = 5340 } wheels = [ { url = "https://files.pythonhosted.org/packages/76/98/e90ca466e0ede452d3e5a8d92b8fb68db6de269856e019ed9cab69440522/sphinx_prompt-1.9.0-py3-none-any.whl", hash = "sha256:fd731446c03f043d1ff6df9f22414495b23067c67011cc21658ea8d36b3575fc", size = 7311 }, ] [[package]] name = "sphinx-tabs" version = "3.4.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docutils" }, { name = "pygments" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/27/32/ab475e252dc2b704e82a91141fa404cdd8901a5cf34958fd22afacebfccd/sphinx-tabs-3.4.5.tar.gz", hash = "sha256:ba9d0c1e3e37aaadd4b5678449eb08176770e0fc227e769b6ce747df3ceea531", size = 16070 } wheels = [ { url = "https://files.pythonhosted.org/packages/20/9f/4ac7dbb9f23a2ff5a10903a4f9e9f43e0ff051f63a313e989c962526e305/sphinx_tabs-3.4.5-py3-none-any.whl", hash = "sha256:92cc9473e2ecf1828ca3f6617d0efc0aa8acb06b08c56ba29d1413f2f0f6cf09", size = 9904 }, ] [[package]] name = "sphinx-togglebutton" version = "0.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docutils" }, { name = "setuptools" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "wheel" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f0/df/d151dfbbe588116e450ca7e898750cb218dca6b2e557ced8de6f9bd7242b/sphinx-togglebutton-0.3.2.tar.gz", hash = "sha256:ab0c8b366427b01e4c89802d5d078472c427fa6e9d12d521c34fa0442559dc7a", size = 8324 } wheels = [ { url = "https://files.pythonhosted.org/packages/e9/18/267ce39f29d26cdc7177231428ba823fe5ca94db8c56d1bed69033b364c8/sphinx_togglebutton-0.3.2-py3-none-any.whl", hash = "sha256:9647ba7874b7d1e2d43413d8497153a85edc6ac95a3fea9a75ef9c1e08aaae2b", size = 8249 }, ] [[package]] name = "sphinx-toolbox" version = "3.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "apeye" }, { name = "autodocsumm" }, { name = "beautifulsoup4" }, { name = "cachecontrol", extra = ["filecache"] }, { name = "dict2css" }, { name = "docutils" }, { name = "domdf-python-tools" }, { name = "filelock" }, { name = "html5lib" }, { name = "ruamel-yaml" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-autodoc-typehints", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx-autodoc-typehints", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx-autodoc-typehints", version = "3.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-jinja2-compat" }, { name = "sphinx-prompt", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx-prompt", version = "1.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "sphinx-tabs" }, { name = "tabulate" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/df/32/e10c272614a1f4d84b680007bd45f9b77db3262ee6c3c61a0e27932a55b7/sphinx_toolbox-3.9.0.tar.gz", hash = "sha256:9ee0603b090762d6eed4d0ec9fa91445e3ef95d40a584af125308541c1bf7b8d", size = 114497 } wheels = [ { url = "https://files.pythonhosted.org/packages/5d/7e/9811c8cf0df10c2b6c9c72667837d731dd4f0dc0d0e68980938c8eb6f7f8/sphinx_toolbox-3.9.0-py3-none-any.whl", hash = "sha256:49024961c7791ad6e9dd39c611f89b5162550afa26ccad087be38388c3dd3c1e", size = 195429 }, ] [[package]] name = "sphinxcontrib-applehelp" version = "2.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053 } wheels = [ { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300 }, ] [[package]] name = "sphinxcontrib-devhelp" version = "2.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967 } wheels = [ { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530 }, ] [[package]] name = "sphinxcontrib-htmlhelp" version = "2.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617 } wheels = [ { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705 }, ] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787 } wheels = [ { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071 }, ] [[package]] name = "sphinxcontrib-mermaid" version = "1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyyaml" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/97/69/bf039237ad260073e8c02f820b3e00dc34f3a2de20aff7861e6b19d2f8c5/sphinxcontrib_mermaid-1.0.0.tar.gz", hash = "sha256:2e8ab67d3e1e2816663f9347d026a8dee4a858acdd4ad32dd1c808893db88146", size = 15153 } wheels = [ { url = "https://files.pythonhosted.org/packages/cd/c8/784b9ac6ea08aa594c1a4becbd0dbe77186785362e31fd633b8c6ae0197a/sphinxcontrib_mermaid-1.0.0-py3-none-any.whl", hash = "sha256:60b72710ea02087f212028feb09711225fbc2e343a10d34822fe787510e1caa3", size = 9597 }, ] [[package]] name = "sphinxcontrib-qthelp" version = "2.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165 } wheels = [ { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743 }, ] [[package]] name = "sphinxcontrib-serializinghtml" version = "2.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080 } wheels = [ { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072 }, ] [[package]] name = "sqlalchemy" version = "2.0.39" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/00/8e/e77fcaa67f8b9f504b4764570191e291524575ddbfe78a90fc656d671fdc/sqlalchemy-2.0.39.tar.gz", hash = "sha256:5d2d1fe548def3267b4c70a8568f108d1fed7cbbeccb9cc166e05af2abc25c22", size = 9644602 } wheels = [ { url = "https://files.pythonhosted.org/packages/8d/1d/9f6a1ba617d6dcf27bad9f227b0d73210c706bfe20828f7f10c3bce09ba8/sqlalchemy-2.0.39-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6827f8c1b2f13f1420545bd6d5b3f9e0b85fe750388425be53d23c760dcf176b", size = 2107430 }, { url = "https://files.pythonhosted.org/packages/28/76/d597b6041b21cea951e8ce011d3ff84544f3986e94df679ffc93d5dc1974/sqlalchemy-2.0.39-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9f119e7736967c0ea03aff91ac7d04555ee038caf89bb855d93bbd04ae85b41", size = 2098622 }, { url = "https://files.pythonhosted.org/packages/61/9e/a44899b1654fcb6a6559a5fd143cb49423178a2264fda3a9234cf7106e93/sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4600c7a659d381146e1160235918826c50c80994e07c5b26946a3e7ec6c99249", size = 3080965 }, { url = "https://files.pythonhosted.org/packages/01/30/862aea79563c12aab01270a53f488df83f92d368c2ef09cd84b462444c65/sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a06e6c8e31c98ddc770734c63903e39f1947c9e3e5e4bef515c5491b7737dde", size = 3089221 }, { url = "https://files.pythonhosted.org/packages/96/08/de07932454f00fe67b461f28d8d64acfef9e8ac0c28e708f428ae293d0a7/sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4c433f78c2908ae352848f56589c02b982d0e741b7905228fad628999799de4", size = 3045067 }, { url = "https://files.pythonhosted.org/packages/e2/a5/4dec5af9bf40d55674c76979842b7ff2172daaa9c4794d698d360ae83302/sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bd5c5ee1448b6408734eaa29c0d820d061ae18cb17232ce37848376dcfa3e92", size = 3070581 }, { url = "https://files.pythonhosted.org/packages/48/ea/036811123ff844b6c5d76b121d15dba127d9eecdb66f5f87ad03fa339947/sqlalchemy-2.0.39-cp310-cp310-win32.whl", hash = "sha256:87a1ce1f5e5dc4b6f4e0aac34e7bb535cb23bd4f5d9c799ed1633b65c2bcad8c", size = 2079744 }, { url = "https://files.pythonhosted.org/packages/d2/5b/c3f901fd962d16aef730632cf16db5e12110d4834bc7b2315fc57013ac49/sqlalchemy-2.0.39-cp310-cp310-win_amd64.whl", hash = "sha256:871f55e478b5a648c08dd24af44345406d0e636ffe021d64c9b57a4a11518304", size = 2104030 }, { url = "https://files.pythonhosted.org/packages/59/23/d5f6a78cdec8b4c780b58bea2e243b80d324fb4733df5f31df695fe1fb2d/sqlalchemy-2.0.39-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a28f9c238f1e143ff42ab3ba27990dfb964e5d413c0eb001b88794c5c4a528a9", size = 2108611 }, { url = "https://files.pythonhosted.org/packages/c6/25/6bec21ca66c36e3dcebe0141b86787eb17e3a918f4f583741831958775c2/sqlalchemy-2.0.39-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08cf721bbd4391a0e765fe0fe8816e81d9f43cece54fdb5ac465c56efafecb3d", size = 2099011 }, { url = "https://files.pythonhosted.org/packages/d8/75/e713f1827132fbf6e1ababa39235e486b5c96f911f55430986c17d46e546/sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a8517b6d4005facdbd7eb4e8cf54797dbca100a7df459fdaff4c5123265c1cd", size = 3217287 }, { url = "https://files.pythonhosted.org/packages/ff/0a/46f3171f564a19a1daf6e7e0e6c8afc6ecd792f947c6de435519d4d16af3/sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b2de1523d46e7016afc7e42db239bd41f2163316935de7c84d0e19af7e69538", size = 3217255 }, { url = "https://files.pythonhosted.org/packages/80/57/12f5444c593536c1b61b34144d4ef4d4bac6e1919ce7b0626faecf1a959e/sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:412c6c126369ddae171c13987b38df5122cb92015cba6f9ee1193b867f3f1530", size = 3154102 }, { url = "https://files.pythonhosted.org/packages/3a/ad/1c681a46e0f10ecb5ca52d753416bffa8e80d62b3545e25f401a9b4082a5/sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b35e07f1d57b79b86a7de8ecdcefb78485dab9851b9638c2c793c50203b2ae8", size = 3174432 }, { url = "https://files.pythonhosted.org/packages/7c/c2/948569a87bfc5c356fa9b66f03f6da6bd6d6c6d68a89d7c556ed87442c06/sqlalchemy-2.0.39-cp311-cp311-win32.whl", hash = "sha256:3eb14ba1a9d07c88669b7faf8f589be67871d6409305e73e036321d89f1d904e", size = 2079289 }, { url = "https://files.pythonhosted.org/packages/cd/57/f0369e53e3031afabf5a7f7b78134037245dfed8b884d4d0e1a9c87b7c0f/sqlalchemy-2.0.39-cp311-cp311-win_amd64.whl", hash = "sha256:78f1b79132a69fe8bd6b5d91ef433c8eb40688ba782b26f8c9f3d2d9ca23626f", size = 2104704 }, { url = "https://files.pythonhosted.org/packages/98/86/b2cb432aeb00a1eda7ed33ce86d943c2452dc1642f3ec51bfe9eaae9604b/sqlalchemy-2.0.39-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c457a38351fb6234781d054260c60e531047e4d07beca1889b558ff73dc2014b", size = 2107210 }, { url = "https://files.pythonhosted.org/packages/bf/b0/b2479edb3419ca763ba1b587161c292d181351a33642985506a530f9162b/sqlalchemy-2.0.39-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:018ee97c558b499b58935c5a152aeabf6d36b3d55d91656abeb6d93d663c0c4c", size = 2097599 }, { url = "https://files.pythonhosted.org/packages/58/5e/c5b792a4abcc71e68d44cb531c4845ac539d558975cc61db1afbc8a73c96/sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a8120d6fc185f60e7254fc056a6742f1db68c0f849cfc9ab46163c21df47", size = 3247012 }, { url = "https://files.pythonhosted.org/packages/e0/a8/055fa8a7c5f85e6123b7e40ec2e9e87d63c566011d599b4a5ab75e033017/sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2cf5b5ddb69142511d5559c427ff00ec8c0919a1e6c09486e9c32636ea2b9dd", size = 3257851 }, { url = "https://files.pythonhosted.org/packages/f6/40/aec16681e91a22ddf03dbaeb3c659bce96107c5f47d2a7c665eb7f24a014/sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f03143f8f851dd8de6b0c10784363712058f38209e926723c80654c1b40327a", size = 3193155 }, { url = "https://files.pythonhosted.org/packages/21/9d/cef697b137b9eb0b66ab8e9cf193a7c7c048da3b4bb667e5fcea4d90c7a2/sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06205eb98cb3dd52133ca6818bf5542397f1dd1b69f7ea28aa84413897380b06", size = 3219770 }, { url = "https://files.pythonhosted.org/packages/57/05/e109ca7dde837d8f2f1b235357e4e607f8af81ad8bc29c230fed8245687d/sqlalchemy-2.0.39-cp312-cp312-win32.whl", hash = "sha256:7f5243357e6da9a90c56282f64b50d29cba2ee1f745381174caacc50d501b109", size = 2077567 }, { url = "https://files.pythonhosted.org/packages/97/c6/25ca068e38c29ed6be0fde2521888f19da923dbd58f5ff16af1b73ec9b58/sqlalchemy-2.0.39-cp312-cp312-win_amd64.whl", hash = "sha256:2ed107331d188a286611cea9022de0afc437dd2d3c168e368169f27aa0f61338", size = 2103136 }, { url = "https://files.pythonhosted.org/packages/32/47/55778362642344324a900b6b2b1b26f7f02225b374eb93adc4a363a2d8ae/sqlalchemy-2.0.39-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe193d3ae297c423e0e567e240b4324d6b6c280a048e64c77a3ea6886cc2aa87", size = 2102484 }, { url = "https://files.pythonhosted.org/packages/1b/e1/f5f26f67d095f408138f0fb2c37f827f3d458f2ae51881546045e7e55566/sqlalchemy-2.0.39-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:79f4f502125a41b1b3b34449e747a6abfd52a709d539ea7769101696bdca6716", size = 2092955 }, { url = "https://files.pythonhosted.org/packages/c5/c2/0db0022fc729a54fc7aef90a3457bf20144a681baef82f7357832b44c566/sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a10ca7f8a1ea0fd5630f02feb055b0f5cdfcd07bb3715fc1b6f8cb72bf114e4", size = 3179367 }, { url = "https://files.pythonhosted.org/packages/33/b7/f33743d87d0b4e7a1f12e1631a4b9a29a8d0d7c0ff9b8c896d0bf897fb60/sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6b0a1c7ed54a5361aaebb910c1fa864bae34273662bb4ff788a527eafd6e14d", size = 3192705 }, { url = "https://files.pythonhosted.org/packages/c9/74/6814f31719109c973ddccc87bdfc2c2a9bc013bec64a375599dc5269a310/sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52607d0ebea43cf214e2ee84a6a76bc774176f97c5a774ce33277514875a718e", size = 3125927 }, { url = "https://files.pythonhosted.org/packages/e8/6b/18f476f4baaa9a0e2fbc6808d8f958a5268b637c8eccff497bf96908d528/sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c08a972cbac2a14810463aec3a47ff218bb00c1a607e6689b531a7c589c50723", size = 3154055 }, { url = "https://files.pythonhosted.org/packages/b4/60/76714cecb528da46bc53a0dd36d1ccef2f74ef25448b630a0a760ad07bdb/sqlalchemy-2.0.39-cp313-cp313-win32.whl", hash = "sha256:23c5aa33c01bd898f879db158537d7e7568b503b15aad60ea0c8da8109adf3e7", size = 2075315 }, { url = "https://files.pythonhosted.org/packages/5b/7c/76828886d913700548bac5851eefa5b2c0251ebc37921fe476b93ce81b50/sqlalchemy-2.0.39-cp313-cp313-win_amd64.whl", hash = "sha256:4dabd775fd66cf17f31f8625fc0e4cfc5765f7982f94dc09b9e5868182cb71c0", size = 2099175 }, { url = "https://files.pythonhosted.org/packages/61/ea/e62e25e9a6dc0bea282bb603ec16a9da0b48602e3088bdfbafbcf2c55e69/sqlalchemy-2.0.39-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2600a50d590c22d99c424c394236899ba72f849a02b10e65b4c70149606408b5", size = 2110305 }, { url = "https://files.pythonhosted.org/packages/0e/90/400c5907d96765ca1180363c198b20a760751727147ea975b9a75d626b67/sqlalchemy-2.0.39-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4eff9c270afd23e2746e921e80182872058a7a592017b2713f33f96cc5f82e32", size = 2101528 }, { url = "https://files.pythonhosted.org/packages/03/ba/871fe6ed43a2b8bb24d8e27d587d22b13cb140f8936d611bb1ac39f00aca/sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7332868ce891eda48896131991f7f2be572d65b41a4050957242f8e935d5d7", size = 3095435 }, { url = "https://files.pythonhosted.org/packages/d3/e6/8c4fb632b8c679ca071e510082c20a71d97d0b96c65c765aba5120160fb0/sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125a7763b263218a80759ad9ae2f3610aaf2c2fbbd78fff088d584edf81f3782", size = 3103219 }, { url = "https://files.pythonhosted.org/packages/7a/37/29f9e2e6c272bd8d34a4f925650007cefceeafce9a314ea64dfb0cbab73c/sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:04545042969833cb92e13b0a3019549d284fd2423f318b6ba10e7aa687690a3c", size = 3062252 }, { url = "https://files.pythonhosted.org/packages/7b/3f/990750848a2267eb5c556fbf14877f4cb5b76c7ab321038e42f52b14d203/sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:805cb481474e111ee3687c9047c5f3286e62496f09c0e82e8853338aaaa348f8", size = 3088768 }, { url = "https://files.pythonhosted.org/packages/1f/72/e90913d8ac5a9cb6ef4caf1ea9a06784c9d8e7f0a41e7b3a02bb5530c65f/sqlalchemy-2.0.39-cp39-cp39-win32.whl", hash = "sha256:34d5c49f18778a3665d707e6286545a30339ad545950773d43977e504815fa70", size = 2082136 }, { url = "https://files.pythonhosted.org/packages/93/06/569fee89ed45d7eeaad8f2ea01c824250f6e50cef3fbecb541b770636a71/sqlalchemy-2.0.39-cp39-cp39-win_amd64.whl", hash = "sha256:35e72518615aa5384ef4fae828e3af1b43102458b74a8c481f69af8abf7e802a", size = 2106512 }, { url = "https://files.pythonhosted.org/packages/7b/0f/d69904cb7d17e65c65713303a244ec91fd3c96677baf1d6331457fd47e16/sqlalchemy-2.0.39-py3-none-any.whl", hash = "sha256:a1c6b0a5e3e326a466d809b651c63f278b1256146a377a528b6938a279da334f", size = 1898621 }, ] [[package]] name = "sqlalchemy-cockroachdb" version = "2.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sqlalchemy" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f2/48/300113edbe1e2d2ad74efbc9b2865acb404019f37cc79e9878a47c984a08/sqlalchemy-cockroachdb-2.0.2.tar.gz", hash = "sha256:119756eb905855d6a11345b99cfe853031a3fe598a9c4bf35a8ddac9f89fe8cc", size = 27781 } wheels = [ { url = "https://files.pythonhosted.org/packages/db/31/26d9d5f59183f28ea79aaf713a6984bedbb96d1f4b7388ec68b98dbe27ea/sqlalchemy_cockroachdb-2.0.2-py3-none-any.whl", hash = "sha256:0d5d50e805b024cb2ccd85423a5c1a367d1a56a5cd0ea47765233fd47665070d", size = 21432 }, ] [[package]] name = "sqlalchemy-spanner" version = "1.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alembic" }, { name = "google-cloud-spanner" }, { name = "sqlalchemy" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b3/20/4a07595979f17babe4271f3966b80525cd073b3c4aae697014551e625403/sqlalchemy_spanner-1.10.0.tar.gz", hash = "sha256:7aad22f2df33385bd6a1951239cfa05624f3ea71bc8853e1047705e911d86c2a", size = 80561 } wheels = [ { url = "https://files.pythonhosted.org/packages/c7/7c/7a36ef4475d93ec713b225f44a0741c2b3b3b50936278d850aa52e369462/sqlalchemy_spanner-1.10.0-py3-none-any.whl", hash = "sha256:fa3c6271b5ebc62881c3467008c676c2f59895c55d47a3f1f40befee5d011caf", size = 29799 }, ] [[package]] name = "sqlparse" version = "0.5.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999 } wheels = [ { url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415 }, ] [[package]] name = "standard-imghdr" version = "3.10.14" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/09/d2/2eb5521072c9598886035c65c023f39f7384bcb73eed70794f469e34efac/standard_imghdr-3.10.14.tar.gz", hash = "sha256:2598fe2e7c540dbda34b233295e10957ab8dc8ac6f3bd9eaa8d38be167232e52", size = 5474 } wheels = [ { url = "https://files.pythonhosted.org/packages/fb/d0/9852f70eb01f814843530c053542b72d30e9fbf74da7abb0107e71938389/standard_imghdr-3.10.14-py3-none-any.whl", hash = "sha256:cdf6883163349624dee9a81d2853a20260337c4cd41c04e99c082e01833a08e2", size = 5598 }, ] [[package]] name = "starlette" version = "0.46.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102 } wheels = [ { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995 }, ] [[package]] name = "tabulate" version = "0.9.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 } wheels = [ { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 }, ] [[package]] name = "termcolor" version = "2.5.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/37/72/88311445fd44c455c7d553e61f95412cf89054308a1aa2434ab835075fc5/termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f", size = 13057 } wheels = [ { url = "https://files.pythonhosted.org/packages/7f/be/df630c387a0a054815d60be6a97eb4e8f17385d5d6fe660e1c02750062b4/termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8", size = 7755 }, ] [[package]] name = "time-machine" version = "2.16.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "python-dateutil" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fb/dd/5022939b9cadefe3af04f4012186c29b8afbe858b1ec2cfa38baeec94dab/time_machine-2.16.0.tar.gz", hash = "sha256:4a99acc273d2f98add23a89b94d4dd9e14969c01214c8514bfa78e4e9364c7e2", size = 24626 } wheels = [ { url = "https://files.pythonhosted.org/packages/79/47/32fdb8e70122edbc8be9db1f032d22b38e3d9ef0bf52c64470d0815cdb62/time_machine-2.16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:09531af59fdfb39bfd24d28bd1e837eff5a5d98318509a31b6cfd57d27801e52", size = 20493 }, { url = "https://files.pythonhosted.org/packages/b1/e6/f3bc391d5642e69299f2d1f0a46e7f98d1669e82b1e16c8cf3c6e4615059/time_machine-2.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:92d0b0f3c49f34dd76eb462f0afdc61ed1cb318c06c46d03e99b44ebb489bdad", size = 16757 }, { url = "https://files.pythonhosted.org/packages/d4/7f/3a78d50fec64edd9964bf42b66a2e659a9846669ac8f705acc363ee79d3a/time_machine-2.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c29616e18e2349a8766d5b6817920fc74e39c00fa375d202231e9d525a1b882", size = 34527 }, { url = "https://files.pythonhosted.org/packages/61/00/7cf1324d8f8db8f5dab71c44ed1e9c11c4f1cecca9d4363abf44154aa13b/time_machine-2.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1ceb6035a64cb00650e3ab203cf3faffac18576a3f3125c24df468b784077c7", size = 32537 }, { url = "https://files.pythonhosted.org/packages/8e/c2/edf5ccb2fa529251eb7f1cfb34098c0ef236dbb88f0a6564d06f6f8378f5/time_machine-2.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64c205ea37b8c4ba232645335fc3b75bc2d03ce30f0a34649e36cae85652ee96", size = 34353 }, { url = "https://files.pythonhosted.org/packages/a9/1e/178b9e3d0054300a4dd0485747c89359e5f719f090ae5165c88618793700/time_machine-2.16.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dfe92412bd11104c4f0fb2da68653e6c45b41f7217319a83a8b66ed4f20148b3", size = 34045 }, { url = "https://files.pythonhosted.org/packages/e5/4d/068ad9660f00f88a54f3ff7e9d423ed5c08a5f8147518f6c66fd0393dde7/time_machine-2.16.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d5fe7a6284e3dce87ae13a25029c53542dd27a28d151f3ef362ec4dd9c3e45fd", size = 32356 }, { url = "https://files.pythonhosted.org/packages/a5/25/c0f26294808946ec5b665f17a0072049a3f9e2468abc18aa8fe22580b4cf/time_machine-2.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0fca3025266d88d1b48be162a43b7c2d91c81cc5b3bee9f01194678ffb9969a", size = 33737 }, { url = "https://files.pythonhosted.org/packages/8b/d4/ae909a269828eaa7672e1201403976e794ea679ae7ba04fe0c0c0c65c2b6/time_machine-2.16.0-cp310-cp310-win32.whl", hash = "sha256:4149e17018af07a5756a1df84aea71e6e178598c358c860c6bfec42170fa7970", size = 19133 }, { url = "https://files.pythonhosted.org/packages/7e/e7/5946d62d49e79b97c6772fe2918eccbd069d74effa8d50bdca4056502aeb/time_machine-2.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:01bc257e9418980a4922de94775be42a966e1a082fb01a1635917f9afc7b84ca", size = 19995 }, { url = "https://files.pythonhosted.org/packages/54/cb/6507c6594f086bc955ff200cc4fd415d2ab229371ca3ba8fc3d27429a9cc/time_machine-2.16.0-cp310-cp310-win_arm64.whl", hash = "sha256:6895e3e84119594ab12847c928f619d40ae9cedd0755515dc154a5b5dc6edd9f", size = 18109 }, { url = "https://files.pythonhosted.org/packages/38/7b/34aad93f75f86503dd1fa53bc120d8129fe4de83aef58ffa78c62b044ef9/time_machine-2.16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8f936566ef9f09136a3d5db305961ef6d897b76b240c9ff4199144aed6dd4fe5", size = 20169 }, { url = "https://files.pythonhosted.org/packages/68/cb/7d020d5c05d0460a4a96232b0777882ef989c1e6144d11ba984c4b0b4d1a/time_machine-2.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5886e23ede3478ca2a3e0a641f5d09dd784dfa9e48c96e8e5e31fc4fe77b6dc0", size = 16614 }, { url = "https://files.pythonhosted.org/packages/0d/24/ce1ff76c9a4f3be88c2b947f2411a5a8019390734597d3106a151f8a9416/time_machine-2.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76caf539fa4941e1817b7c482c87c65c52a1903fea761e84525955c6106fafb", size = 32507 }, { url = "https://files.pythonhosted.org/packages/08/d7/ba1135587bd2ed105e59ed7e05969c913277d110fecc0ed871006ea3f763/time_machine-2.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:298aa423e07c8b21b991782f01d7749c871c792319c2af3e9755f9ab49033212", size = 30627 }, { url = "https://files.pythonhosted.org/packages/da/c6/f490aaddc80c54238f4b8fe97870bbfe0d2c70fe4a57269badc94f5f38a6/time_machine-2.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391ae9c484736850bb44ef125cbad52fe2d1b69e42c95dc88c43af8ead2cc7", size = 32362 }, { url = "https://files.pythonhosted.org/packages/b1/f7/2522ae1c1995a39d6d8b7ee7efed47ec8bd7ff3240fdb2662a8b7e11b84a/time_machine-2.16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:503e7ff507c2089699d91885fc5b9c8ff16774a7b6aff48b4dcee0c0a0685b61", size = 32188 }, { url = "https://files.pythonhosted.org/packages/e9/53/b1ccb55f39e7e62660f852d7aedef438d2872ea9c73f64be46d0d3b3f3d7/time_machine-2.16.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eee7b0fc4fbab2c6585ea17606c6548be83919c70deea0865409fe9fc2d8cdce", size = 30600 }, { url = "https://files.pythonhosted.org/packages/19/1f/37a5a9333a2da35b0fc43e8ac693b82dd5492892131bc3cc0c8f5835af94/time_machine-2.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9db5e5b3ccdadaafa5730c2f9db44c38b013234c9ad01f87738907e19bdba268", size = 31896 }, { url = "https://files.pythonhosted.org/packages/fc/97/e1a8bd64e5432adf47859cb63847b4472efc644b508602141c60ccf52112/time_machine-2.16.0-cp311-cp311-win32.whl", hash = "sha256:2552f0767bc10c9d668f108fef9b487809cdeb772439ce932e74136365c69baf", size = 19030 }, { url = "https://files.pythonhosted.org/packages/34/c9/f4764e447aa9da4031c89da60fa69f4f73fd45571415788c298cbd4620e9/time_machine-2.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:12474fcdbc475aa6fe5275fe7224e685c5b9777f5939647f35980e9614ae7558", size = 19924 }, { url = "https://files.pythonhosted.org/packages/8a/c0/788500d33656a044e3289b814106c2277209ac73316c00b9668012ce6027/time_machine-2.16.0-cp311-cp311-win_arm64.whl", hash = "sha256:ac2df0fa564356384515ed62cb6679f33f1f529435b16b0ec0f88414635dbe39", size = 17993 }, { url = "https://files.pythonhosted.org/packages/4a/f4/603a84e7ae6427a53953db9f61b689dc6adf233e03c5f5ca907a901452fd/time_machine-2.16.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:84788f4d62a8b1bf5e499bb9b0e23ceceea21c415ad6030be6267ce3d639842f", size = 20155 }, { url = "https://files.pythonhosted.org/packages/d8/94/dbe69aecb4b84be52d34814e63176c5ca61f38ee9e6ecda11104653405b5/time_machine-2.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:15ec236b6571730236a193d9d6c11d472432fc6ab54e85eac1c16d98ddcd71bf", size = 16640 }, { url = "https://files.pythonhosted.org/packages/da/13/27f11be25d7bd298e033b9da93217e5b68309bf724b6e494cdadb471d00d/time_machine-2.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cedc989717c8b44a3881ac3d68ab5a95820448796c550de6a2149ed1525157f0", size = 33721 }, { url = "https://files.pythonhosted.org/packages/e6/9d/70e4640fed1fd8122204ae825c688d0ef8c04f515ec6bf3c5f3086d6510e/time_machine-2.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d26d79de1c63a8c6586c75967e09b0ff306aa7e944a1eaddb74595c9b1839ca", size = 31646 }, { url = "https://files.pythonhosted.org/packages/a1/cb/93bc0e51bea4e171a85151dbba3c3b3f612b50b953cd3076f5b4f0db9e14/time_machine-2.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:317b68b56a9c3731e0cf8886e0f94230727159e375988b36c60edce0ddbcb44a", size = 33403 }, { url = "https://files.pythonhosted.org/packages/89/71/2c6a63ad4fbce3d62d46bbd9ac4433f30bade7f25978ce00815b905bcfcf/time_machine-2.16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:43e1e18279759897be3293a255d53e6b1cb0364b69d9591d0b80c51e461c94b0", size = 33327 }, { url = "https://files.pythonhosted.org/packages/68/4e/205c2b26763b8817cd6b8868242843800a1fbf275f2af35f5ba35ff2b01a/time_machine-2.16.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e43adb22def972a29d2b147999b56897116085777a0fea182fd93ee45730611e", size = 31454 }, { url = "https://files.pythonhosted.org/packages/d7/95/44c1aa3994919f93534244c40cfd2fb9416d7686dc0c8b9b262c751b5118/time_machine-2.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0c766bea27a0600e36806d628ebc4b47178b12fcdfb6c24dc0a566a9c06bfe7f", size = 32972 }, { url = "https://files.pythonhosted.org/packages/d4/ee/75243df9c7cf30f108758e887141a58e6544baaa46e2e647b9ccc56db819/time_machine-2.16.0-cp312-cp312-win32.whl", hash = "sha256:6dae82ab647d107817e013db82223e20a9853fa88543fec853ae326382d03c2e", size = 19078 }, { url = "https://files.pythonhosted.org/packages/d4/7c/d4e67cc031f9653c92167ccf87d241e3208653d191c96ac79281c273ab92/time_machine-2.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:265462c77dc9576267c3c7f20707780a171a9fdbac93ac22e608c309efd68c33", size = 19923 }, { url = "https://files.pythonhosted.org/packages/aa/b6/7047226fcb9afefe47fc80f605530535bf71ad99b6797f057abbfa4cd9a5/time_machine-2.16.0-cp312-cp312-win_arm64.whl", hash = "sha256:ef768e14768eebe3bb1196c0dece8e14c1c6991605721214a0c3c68cf77eb216", size = 18003 }, { url = "https://files.pythonhosted.org/packages/a6/18/3087d0eb185cedbc82385f46bf16032ec7102a0e070205a2c88c4ecf9952/time_machine-2.16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7751bf745d54e9e8b358c0afa332815da9b8a6194b26d0fd62876ab6c4d5c9c0", size = 20209 }, { url = "https://files.pythonhosted.org/packages/03/a3/fcc3eaf69390402ecf491d718e533b6d0e06d944d77fc8d87be3a2839102/time_machine-2.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1784edf173ca840ba154de6eed000b5727f65ab92972c2f88cec5c4d6349c5f2", size = 16681 }, { url = "https://files.pythonhosted.org/packages/a2/96/8b76d264014bf9dc21873218de50d67223c71736f87fe6c65e582f7c29ac/time_machine-2.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f5876a5682ce1f517e55d7ace2383432627889f6f7e338b961f99d684fd9e8d", size = 33768 }, { url = "https://files.pythonhosted.org/packages/5c/13/59ae8259be02b6c657ef6e3b6952bf274b43849f6f35cc61a576c68ce301/time_machine-2.16.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:806672529a2e255cd901f244c9033767dc1fa53466d0d3e3e49565a1572a64fe", size = 31685 }, { url = "https://files.pythonhosted.org/packages/3e/c1/9f142beb4d373a2a01ebb58d5117289315baa5131d880ec804db49e94bf7/time_machine-2.16.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:667b150fedb54acdca2a4bea5bf6da837b43e6dd12857301b48191f8803ba93f", size = 33447 }, { url = "https://files.pythonhosted.org/packages/95/f7/ed9ecd93c2d38dca77d0a28e070020f3ce0fb23e0d4a6edb14bcfffa5526/time_machine-2.16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:da3ae1028af240c0c46c79adf9c1acffecc6ed1701f2863b8132f5ceae6ae4b5", size = 33408 }, { url = "https://files.pythonhosted.org/packages/91/40/d0d274d70fa2c4cad531745deb8c81346365beb0a2736be05a3acde8b94a/time_machine-2.16.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:520a814ea1b2706c89ab260a54023033d3015abef25c77873b83e3d7c1fafbb2", size = 31526 }, { url = "https://files.pythonhosted.org/packages/1d/ba/a27cdbb324d9a6d779cde0d514d47b696b5a6a653705d4b511fd65ef1514/time_machine-2.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8243664438bb468408b29c6865958662d75e51f79c91842d2794fa22629eb697", size = 33042 }, { url = "https://files.pythonhosted.org/packages/72/63/64e9156c9e38c18720d0cc41378168635241de44013ffe3dd5b099447eb0/time_machine-2.16.0-cp313-cp313-win32.whl", hash = "sha256:32d445ce20d25c60ab92153c073942b0bac9815bfbfd152ce3dcc225d15ce988", size = 19108 }, { url = "https://files.pythonhosted.org/packages/3d/40/27f5738fbd50b78dcc0682c14417eac5a49ccf430525dd0c5a058be125a2/time_machine-2.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:f6927dda86425f97ffda36131f297b1a601c64a6ee6838bfa0e6d3149c2f0d9f", size = 19935 }, { url = "https://files.pythonhosted.org/packages/35/75/c4d8b2f0fe7dac22854d88a9c509d428e78ac4bf284bc54cfe83f75cc13b/time_machine-2.16.0-cp313-cp313-win_arm64.whl", hash = "sha256:4d3843143c46dddca6491a954bbd0abfd435681512ac343169560e9bab504129", size = 18047 }, { url = "https://files.pythonhosted.org/packages/df/aa/6d4925b22f3f5f53e2bcb12923f2463cac8c7c2360ac55196d51546787a5/time_machine-2.16.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:23c5283c01b4f80b7dfbc88f3d8088c06c301b94b7c35366be498c2d7b308549", size = 20490 }, { url = "https://files.pythonhosted.org/packages/b9/58/2bd28329c3c47de58c9234d177e809bed29d9e54729da79b5d0d8bc47e5e/time_machine-2.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ac95ae4529d7d85b251f9cf0f961a8a408ba285875811268f469d824a3b0b15a", size = 16753 }, { url = "https://files.pythonhosted.org/packages/c3/47/c8d388d6e061be146cf357bce727221f1d1d60dff2a36b880cb26e1a3199/time_machine-2.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfb76674db946a74f0ca6e3b81caa8265e35dafe9b7005c7d2b8dd5bbd3825cf", size = 34228 }, { url = "https://files.pythonhosted.org/packages/d2/be/b0fb8693f2e9dfb5b50c5a89bb1d6ff8d4705075722b7987c0f1e18c6694/time_machine-2.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0b6ff3ccde9b16bbc694a2b5facf2d8890554f3135ff626ed1429e270e3cc4f", size = 32250 }, { url = "https://files.pythonhosted.org/packages/6a/bc/e827239b0020195f4e2fa4e7fdf248838bb49230be2bf374181fac892a92/time_machine-2.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1906ec6e26e6b803cd6aab28d420c87285b9c209ff2a69f82d12f82278f78bb", size = 34066 }, { url = "https://files.pythonhosted.org/packages/39/a9/c962c702b94ca4c7fd8264bc9baed431bd92d4ee2aa698dd92ff6e864164/time_machine-2.16.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e46bd09c944ec7a20868abd2b83d7d7abdaf427775e9df3089b9226a122b340f", size = 33799 }, { url = "https://files.pythonhosted.org/packages/34/5f/91df8e8465a2d5a168c25eebf5a62d813f30e01909c32749dbbd442b66db/time_machine-2.16.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cac3e2b4101db296b150cb665e5461c03621e6ede6117fc9d5048c0ec96d6e7c", size = 32076 }, { url = "https://files.pythonhosted.org/packages/04/45/bcc3304b545a15f614ecb12b277ec8d93fe0f67fa74e9e4b856e4ecba4c6/time_machine-2.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e0dcc97cfec12ae306e3036746e7631cc7ef65c31889f7264c25217d4938367", size = 33460 }, { url = "https://files.pythonhosted.org/packages/96/2c/9f14cd6fb912995e9984e67b8160071e8950cd7b0a787796d58b45324269/time_machine-2.16.0-cp39-cp39-win32.whl", hash = "sha256:c761d32d0c5d1fe5b71ac502e1bd5edec4598a7fc6f607b9b906b98e911148ce", size = 19133 }, { url = "https://files.pythonhosted.org/packages/63/0b/95bfa4a2b3a893d91de8304d98edbeb4e29b864977ef36929aa6eda1357f/time_machine-2.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:ddfab1c622342f2945942c5c2d6be327656980e8f2d2b2ce0c022d0aa3711361", size = 19989 }, { url = "https://files.pythonhosted.org/packages/30/36/470c7d77d3a5c7e6a5e29ac40495b8dd3b66f3058ab8bdc823706fec1353/time_machine-2.16.0-cp39-cp39-win_arm64.whl", hash = "sha256:2e08a4015d5d1aab2cb46c780e85b33efcd5cbe880bb363b282a6972e617b8bb", size = 18106 }, ] [[package]] name = "tomli" version = "2.2.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } wheels = [ { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, ] [[package]] name = "tomlkit" version = "0.13.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885 } wheels = [ { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 }, ] [[package]] name = "tracerite" version = "1.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "html5tagger" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b3/82/9372199dd72b02e8f3cf3143096ed453f010668a8e2cbe9cd59b116da3de/tracerite-1.1.1.tar.gz", hash = "sha256:6400a35a187747189e4bb8d4a8e471bd86d14dbdcc94bcad23f4eda023f41356", size = 269462 } wheels = [ { url = "https://files.pythonhosted.org/packages/4e/71/127927fdd41dd577fd946c319cf9c012366f3ff9f048d0b0689dc72819ef/tracerite-1.1.1-py3-none-any.whl", hash = "sha256:3a787a9ecb1a136ea9ce17e6328e414ec414a4f644130af4e1e330bec2dece29", size = 12301 }, ] [[package]] name = "typer" version = "0.15.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "rich" }, { name = "shellingham" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711 } wheels = [ { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061 }, ] [[package]] name = "types-aiofiles" version = "24.1.0.20241221" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/f984b9ddc7eecdf31e683e692d933f3672276ed95aad6adb9aea9ecbdc29/types_aiofiles-24.1.0.20241221.tar.gz", hash = "sha256:c40f6c290b0af9e902f7f3fa91213cf5bb67f37086fb21dc0ff458253586ad55", size = 14081 } wheels = [ { url = "https://files.pythonhosted.org/packages/ff/da/77902220df98ce920444cf3611fa0b1cf0dc2cfa5a137c55e93829aa458e/types_aiofiles-24.1.0.20241221-py3-none-any.whl", hash = "sha256:11d4e102af0627c02e8c1d17736caa3c39de1058bea37e2f4de6ef11a5b652ab", size = 14162 }, ] [[package]] name = "types-colorama" version = "0.4.15.20240311" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/59/73/0fb0b9fe4964b45b2a06ed41b60c352752626db46aa0fb70a49a9e283a75/types-colorama-0.4.15.20240311.tar.gz", hash = "sha256:a28e7f98d17d2b14fb9565d32388e419f4108f557a7d939a66319969b2b99c7a", size = 5608 } wheels = [ { url = "https://files.pythonhosted.org/packages/b7/83/6944b4fa01efb2e63ac62b791a8ddf0fee358f93be9f64b8f152648ad9d3/types_colorama-0.4.15.20240311-py3-none-any.whl", hash = "sha256:6391de60ddc0db3f147e31ecb230006a6823e81e380862ffca1e4695c13a0b8e", size = 5840 }, ] [[package]] name = "types-docutils" version = "0.21.0.20241128" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/dd/df/64e7ab01a4fc5ce46895dc94e31cffc8b8087c8d91ee54c45ac2d8d82445/types_docutils-0.21.0.20241128.tar.gz", hash = "sha256:4dd059805b83ac6ec5a223699195c4e9eeb0446a4f7f2aeff1759a4a7cc17473", size = 26739 } wheels = [ { url = "https://files.pythonhosted.org/packages/59/b6/10ba95739f2cbb9c5bd2f6568148d62b468afe01a94c633e8892a2936d8a/types_docutils-0.21.0.20241128-py3-none-any.whl", hash = "sha256:e0409204009639e9b0bf4521eeabe58b5e574ce9c0db08421c2ac26c32be0039", size = 34677 }, ] [[package]] name = "types-pillow" version = "10.2.0.20240822" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/18/4a/4495264dddaa600d65d68bcedb64dcccf9d9da61adff51f7d2ffd8e4c9ce/types-Pillow-10.2.0.20240822.tar.gz", hash = "sha256:559fb52a2ef991c326e4a0d20accb3bb63a7ba8d40eb493e0ecb0310ba52f0d3", size = 35389 } wheels = [ { url = "https://files.pythonhosted.org/packages/66/23/e81a5354859831fcf54d488d33b80ba6133ea84f874a9c0ec40a4881e133/types_Pillow-10.2.0.20240822-py3-none-any.whl", hash = "sha256:d9dab025aba07aeb12fd50a6799d4eac52a9603488eca09d7662543983f16c5d", size = 54354 }, ] [[package]] name = "types-psycopg2" version = "2.9.21.20250318" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/09/29/9e86192ffa0a7ffc48d222f510026ec92aa93c7321ee24128480553661ec/types_psycopg2-2.9.21.20250318.tar.gz", hash = "sha256:eb6eac5bfb16adfd5f16b818918b9e26a40ede147e0f2bbffdf53a6ef7025a87", size = 26614 } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/9c/34da1d5c2fe53c91f3382f45e18c58141cebef38e7204f676a93d1af6a1c/types_psycopg2-2.9.21.20250318-py3-none-any.whl", hash = "sha256:7296d111ad950bbd2fc979a1ab0572acae69047f922280e77db657c00d2c79c0", size = 24939 }, ] [[package]] name = "types-pygments" version = "2.19.0.20250305" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-docutils" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e6/be/88f777c75022b111f9e9fe4cdb430bf92892fe90188b0fd037601ded2ea1/types_pygments-2.19.0.20250305.tar.gz", hash = "sha256:044c50e80ecd4128c00a7268f20355e16f5c55466d3d49dfda09be920af40b4b", size = 18521 } wheels = [ { url = "https://files.pythonhosted.org/packages/6f/c6/b6d3ad345b76425e46d25a2da1758603d80c3a59405bdcbbbaa86d8c8070/types_pygments-2.19.0.20250305-py3-none-any.whl", hash = "sha256:ca88aae5ec426f9b107c0f7adc36dc096d2882d930a49f679eaf4b8b643db35d", size = 25638 }, ] [[package]] name = "types-pymysql" version = "1.1.0.20241103" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b3/ac/5a23decbcf53893df11636b7d61cc000a97b0ed45e09cee94d6c75f159ec/types-PyMySQL-1.1.0.20241103.tar.gz", hash = "sha256:a7628542919a0ba87625fb79eefb2a2de45fb4ad32afe6e561e8f2f27fb58b8c", size = 14987 } wheels = [ { url = "https://files.pythonhosted.org/packages/3e/04/d02323dd4dfd6e0af4ecbb88a00215c37aa79894a2d158390700c84c8597/types_PyMySQL-1.1.0.20241103-py3-none-any.whl", hash = "sha256:1a32efd8a74b5bf74c4de92a86c1cc6edaf3802dcfd5546635ab501eb5e3c096", size = 15610 }, ] [[package]] name = "types-python-dateutil" version = "2.9.0.20241206" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a9/60/47d92293d9bc521cd2301e423a358abfac0ad409b3a1606d8fbae1321961/types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb", size = 13802 } wheels = [ { url = "https://files.pythonhosted.org/packages/0f/b3/ca41df24db5eb99b00d97f89d7674a90cb6b3134c52fb8121b6d8d30f15c/types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53", size = 14384 }, ] [[package]] name = "types-pytz" version = "2025.1.0.20250318" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d2/ab/3910b323908a848e7852c3d0b2bdf0f027cc293dc1650c8911b16cd32227/types_pytz-2025.1.0.20250318.tar.gz", hash = "sha256:97e0e35184c6fe14e3a5014512057f2c57bb0c6582d63c1cfcc4809f82180449", size = 10450 } wheels = [ { url = "https://files.pythonhosted.org/packages/c9/7c/c94c1c3c8f4d70ca205ac94be714cfbe723fa93fdb9a41ca608ddd21de39/types_pytz-2025.1.0.20250318-py3-none-any.whl", hash = "sha256:04dba4907c5415777083f9548693c6d9f80ec53adcaff55a38526a3f8ddcae04", size = 10063 }, ] [[package]] name = "types-pyyaml" version = "6.0.12.20241230" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9a/f9/4d566925bcf9396136c0a2e5dc7e230ff08d86fa011a69888dd184469d80/types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c", size = 17078 } wheels = [ { url = "https://files.pythonhosted.org/packages/e8/c1/48474fbead512b70ccdb4f81ba5eb4a58f69d100ba19f17c92c0c4f50ae6/types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6", size = 20029 }, ] [[package]] name = "types-ujson" version = "5.10.0.20240515" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/12/49/abb4bcb9f2258f785edbf236b517c3e7ba8a503a8cbce6b5895930586cc0/types-ujson-5.10.0.20240515.tar.gz", hash = "sha256:ceae7127f0dafe4af5dd0ecf98ee13e9d75951ef963b5c5a9b7ea92e0d71f0d7", size = 3571 } wheels = [ { url = "https://files.pythonhosted.org/packages/3f/1f/9d018cee3d09ab44a5211f0b5ed9b0422ad9a8c226bf3967f5884498d8f0/types_ujson-5.10.0.20240515-py3-none-any.whl", hash = "sha256:02bafc36b3a93d2511757a64ff88bd505e0a57fba08183a9150fbcfcb2015310", size = 2757 }, ] [[package]] name = "typing-extensions" version = "4.12.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } wheels = [ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, ] [[package]] name = "tzdata" version = "2025.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/43/0f/fa4723f22942480be4ca9527bbde8d43f6c3f2fe8412f00e7f5f6746bc8b/tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694", size = 194950 } wheels = [ { url = "https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639", size = 346762 }, ] [[package]] name = "ujson" version = "5.10.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f0/00/3110fd566786bfa542adb7932d62035e0c0ef662a8ff6544b6643b3d6fd7/ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1", size = 7154885 } wheels = [ { url = "https://files.pythonhosted.org/packages/7d/91/91678e49a9194f527e60115db84368c237ac7824992224fac47dcb23a5c6/ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd", size = 55354 }, { url = "https://files.pythonhosted.org/packages/de/2f/1ed8c9b782fa4f44c26c1c4ec686d728a4865479da5712955daeef0b2e7b/ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf", size = 51808 }, { url = "https://files.pythonhosted.org/packages/51/bf/a3a38b2912288143e8e613c6c4c3f798b5e4e98c542deabf94c60237235f/ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6", size = 51995 }, { url = "https://files.pythonhosted.org/packages/b4/6d/0df8f7a6f1944ba619d93025ce468c9252aa10799d7140e07014dfc1a16c/ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569", size = 53566 }, { url = "https://files.pythonhosted.org/packages/d5/ec/370741e5e30d5f7dc7f31a478d5bec7537ce6bfb7f85e72acefbe09aa2b2/ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770", size = 58499 }, { url = "https://files.pythonhosted.org/packages/fe/29/72b33a88f7fae3c398f9ba3e74dc2e5875989b25f1c1f75489c048a2cf4e/ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1", size = 997881 }, { url = "https://files.pythonhosted.org/packages/70/5c/808fbf21470e7045d56a282cf5e85a0450eacdb347d871d4eb404270ee17/ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5", size = 1140631 }, { url = "https://files.pythonhosted.org/packages/8f/6a/e1e8281408e6270d6ecf2375af14d9e2f41c402ab6b161ecfa87a9727777/ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51", size = 1043511 }, { url = "https://files.pythonhosted.org/packages/cb/ca/e319acbe4863919ec62498bc1325309f5c14a3280318dca10fe1db3cb393/ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518", size = 38626 }, { url = "https://files.pythonhosted.org/packages/78/ec/dc96ca379de33f73b758d72e821ee4f129ccc32221f4eb3f089ff78d8370/ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f", size = 42076 }, { url = "https://files.pythonhosted.org/packages/23/ec/3c551ecfe048bcb3948725251fb0214b5844a12aa60bee08d78315bb1c39/ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00", size = 55353 }, { url = "https://files.pythonhosted.org/packages/8d/9f/4731ef0671a0653e9f5ba18db7c4596d8ecbf80c7922dd5fe4150f1aea76/ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126", size = 51813 }, { url = "https://files.pythonhosted.org/packages/1f/2b/44d6b9c1688330bf011f9abfdb08911a9dc74f76926dde74e718d87600da/ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8", size = 51988 }, { url = "https://files.pythonhosted.org/packages/29/45/f5f5667427c1ec3383478092a414063ddd0dfbebbcc533538fe37068a0a3/ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b", size = 53561 }, { url = "https://files.pythonhosted.org/packages/26/21/a0c265cda4dd225ec1be595f844661732c13560ad06378760036fc622587/ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9", size = 58497 }, { url = "https://files.pythonhosted.org/packages/28/36/8fde862094fd2342ccc427a6a8584fed294055fdee341661c78660f7aef3/ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f", size = 997877 }, { url = "https://files.pythonhosted.org/packages/90/37/9208e40d53baa6da9b6a1c719e0670c3f474c8fc7cc2f1e939ec21c1bc93/ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4", size = 1140632 }, { url = "https://files.pythonhosted.org/packages/89/d5/2626c87c59802863d44d19e35ad16b7e658e4ac190b0dead17ff25460b4c/ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1", size = 1043513 }, { url = "https://files.pythonhosted.org/packages/2f/ee/03662ce9b3f16855770f0d70f10f0978ba6210805aa310c4eebe66d36476/ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f", size = 38616 }, { url = "https://files.pythonhosted.org/packages/3e/20/952dbed5895835ea0b82e81a7be4ebb83f93b079d4d1ead93fcddb3075af/ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720", size = 42071 }, { url = "https://files.pythonhosted.org/packages/e8/a6/fd3f8bbd80842267e2d06c3583279555e8354c5986c952385199d57a5b6c/ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5", size = 55642 }, { url = "https://files.pythonhosted.org/packages/a8/47/dd03fd2b5ae727e16d5d18919b383959c6d269c7b948a380fdd879518640/ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e", size = 51807 }, { url = "https://files.pythonhosted.org/packages/25/23/079a4cc6fd7e2655a473ed9e776ddbb7144e27f04e8fc484a0fb45fe6f71/ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043", size = 51972 }, { url = "https://files.pythonhosted.org/packages/04/81/668707e5f2177791869b624be4c06fb2473bf97ee33296b18d1cf3092af7/ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1", size = 53686 }, { url = "https://files.pythonhosted.org/packages/bd/50/056d518a386d80aaf4505ccf3cee1c40d312a46901ed494d5711dd939bc3/ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3", size = 58591 }, { url = "https://files.pythonhosted.org/packages/fc/d6/aeaf3e2d6fb1f4cfb6bf25f454d60490ed8146ddc0600fae44bfe7eb5a72/ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21", size = 997853 }, { url = "https://files.pythonhosted.org/packages/f8/d5/1f2a5d2699f447f7d990334ca96e90065ea7f99b142ce96e85f26d7e78e2/ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2", size = 1140689 }, { url = "https://files.pythonhosted.org/packages/f2/2c/6990f4ccb41ed93744aaaa3786394bca0875503f97690622f3cafc0adfde/ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e", size = 1043576 }, { url = "https://files.pythonhosted.org/packages/14/f5/a2368463dbb09fbdbf6a696062d0c0f62e4ae6fa65f38f829611da2e8fdd/ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e", size = 38764 }, { url = "https://files.pythonhosted.org/packages/59/2d/691f741ffd72b6c84438a93749ac57bf1a3f217ac4b0ea4fd0e96119e118/ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc", size = 42211 }, { url = "https://files.pythonhosted.org/packages/0d/69/b3e3f924bb0e8820bb46671979770c5be6a7d51c77a66324cdb09f1acddb/ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287", size = 55646 }, { url = "https://files.pythonhosted.org/packages/32/8a/9b748eb543c6cabc54ebeaa1f28035b1bd09c0800235b08e85990734c41e/ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e", size = 51806 }, { url = "https://files.pythonhosted.org/packages/39/50/4b53ea234413b710a18b305f465b328e306ba9592e13a791a6a6b378869b/ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557", size = 51975 }, { url = "https://files.pythonhosted.org/packages/b4/9d/8061934f960cdb6dd55f0b3ceeff207fcc48c64f58b43403777ad5623d9e/ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988", size = 53693 }, { url = "https://files.pythonhosted.org/packages/f5/be/7bfa84b28519ddbb67efc8410765ca7da55e6b93aba84d97764cd5794dbc/ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816", size = 58594 }, { url = "https://files.pythonhosted.org/packages/48/eb/85d465abafb2c69d9699cfa5520e6e96561db787d36c677370e066c7e2e7/ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20", size = 997853 }, { url = "https://files.pythonhosted.org/packages/9f/76/2a63409fc05d34dd7d929357b7a45e3a2c96f22b4225cd74becd2ba6c4cb/ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0", size = 1140694 }, { url = "https://files.pythonhosted.org/packages/45/ed/582c4daba0f3e1688d923b5cb914ada1f9defa702df38a1916c899f7c4d1/ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f", size = 1043580 }, { url = "https://files.pythonhosted.org/packages/d7/0c/9837fece153051e19c7bade9f88f9b409e026b9525927824cdf16293b43b/ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165", size = 38766 }, { url = "https://files.pythonhosted.org/packages/d7/72/6cb6728e2738c05bbe9bd522d6fc79f86b9a28402f38663e85a28fddd4a0/ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539", size = 42212 }, { url = "https://files.pythonhosted.org/packages/97/94/50ff2f1b61d668907f20216873640ab19e0eaa77b51e64ee893f6adfb266/ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b", size = 55421 }, { url = "https://files.pythonhosted.org/packages/0c/b3/3d2ca621d8dbeaf6c5afd0725e1b4bbd465077acc69eff1e9302735d1432/ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27", size = 51816 }, { url = "https://files.pythonhosted.org/packages/8d/af/5dc103cb4d08f051f82d162a738adb9da488d1e3fafb9fd9290ea3eabf8e/ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76", size = 52023 }, { url = "https://files.pythonhosted.org/packages/5d/dd/b9a6027ba782b0072bf24a70929e15a58686668c32a37aebfcfaa9e00bdd/ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5", size = 53622 }, { url = "https://files.pythonhosted.org/packages/1f/28/bcf6df25c1a9f1989dc2ddc4ac8a80e246857e089f91a9079fd8a0a01459/ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0", size = 58563 }, { url = "https://files.pythonhosted.org/packages/9e/82/89404453a102d06d0937f6807c0a7ef2eec68b200b4ce4386127f3c28156/ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1", size = 998050 }, { url = "https://files.pythonhosted.org/packages/63/eb/2a4ea07165cad217bc842bb684b053bafa8ffdb818c47911c621e97a33fc/ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1", size = 1140672 }, { url = "https://files.pythonhosted.org/packages/72/53/d7bdf6afabeba3ed899f89d993c7f202481fa291d8c5be031c98a181eda4/ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996", size = 1043577 }, { url = "https://files.pythonhosted.org/packages/19/b1/75f5f0d18501fd34487e46829de3070724c7b350f1983ba7f07e0986720b/ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9", size = 38654 }, { url = "https://files.pythonhosted.org/packages/77/0d/50d2f9238f6d6683ead5ecd32d83d53f093a3c0047ae4c720b6d586cb80d/ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a", size = 42134 }, { url = "https://files.pythonhosted.org/packages/95/53/e5f5e733fc3525e65f36f533b0dbece5e5e2730b760e9beacf7e3d9d8b26/ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64", size = 51846 }, { url = "https://files.pythonhosted.org/packages/59/1f/f7bc02a54ea7b47f3dc2d125a106408f18b0f47b14fc737f0913483ae82b/ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3", size = 48103 }, { url = "https://files.pythonhosted.org/packages/1a/3a/d3921b6f29bc744d8d6c56db5f8bbcbe55115fd0f2b79c3c43ff292cc7c9/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a", size = 47257 }, { url = "https://files.pythonhosted.org/packages/f1/04/f4e3883204b786717038064afd537389ba7d31a72b437c1372297cb651ea/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746", size = 48468 }, { url = "https://files.pythonhosted.org/packages/17/cd/9c6547169eb01a22b04cbb638804ccaeb3c2ec2afc12303464e0f9b2ee5a/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88", size = 54266 }, { url = "https://files.pythonhosted.org/packages/70/bf/ecd14d3cf6127f8a990b01f0ad20e257f5619a555f47d707c57d39934894/ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b", size = 42224 }, { url = "https://files.pythonhosted.org/packages/8d/96/a3a2356ca5a4b67fe32a0c31e49226114d5154ba2464bb1220a93eb383e8/ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4", size = 51855 }, { url = "https://files.pythonhosted.org/packages/73/3d/41e78e7500e75eb6b5a7ab06907a6df35603b92ac6f939b86f40e9fe2c06/ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8", size = 48059 }, { url = "https://files.pythonhosted.org/packages/be/14/e435cbe5b5189483adbba5fe328e88418ccd54b2b1f74baa4172384bb5cd/ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b", size = 47238 }, { url = "https://files.pythonhosted.org/packages/e8/d9/b6f4d1e6bec20a3b582b48f64eaa25209fd70dc2892b21656b273bc23434/ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804", size = 48457 }, { url = "https://files.pythonhosted.org/packages/23/1c/cfefabb5996e21a1a4348852df7eb7cfc69299143739e86e5b1071c78735/ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e", size = 54238 }, { url = "https://files.pythonhosted.org/packages/af/c4/fa70e77e1c27bbaf682d790bd09ef40e86807ada704c528ef3ea3418d439/ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7", size = 42230 }, ] [[package]] name = "urllib3" version = "2.3.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } wheels = [ { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, ] [[package]] name = "uuid-utils" version = "0.10.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/66/0a/cbdb2eb4845dafeb632d02a18f47b02f87f2ce4f25266f5e3c017976ce89/uuid_utils-0.10.0.tar.gz", hash = "sha256:5db0e1890e8f008657ffe6ded4d9459af724ab114cfe82af1557c87545301539", size = 18828 } wheels = [ { url = "https://files.pythonhosted.org/packages/44/54/9d22fa16b19e5d1676eba510f08a9c458d96e2a62ff2c8ebad64251afb18/uuid_utils-0.10.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:8d5a4508feefec62456cd6a41bcdde458d56827d908f226803b886d22a3d5e63", size = 573006 }, { url = "https://files.pythonhosted.org/packages/08/8e/f895c6e52aa603e521fbc13b8626ba5dd99b6e2f5a55aa96ba5b232f4c53/uuid_utils-0.10.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:dbefc2b9113f9dfe56bdae58301a2b3c53792221410d422826f3d1e3e6555fe7", size = 292543 }, { url = "https://files.pythonhosted.org/packages/b6/58/cc4834f377a5e97d6e184408ad96d13042308de56643b6e24afe1f6f34df/uuid_utils-0.10.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffc49c33edf87d1ec8112a9b43e4cf55326877716f929c165a2cc307d31c73d5", size = 323340 }, { url = "https://files.pythonhosted.org/packages/37/e3/6aeddf148f6a7dd7759621b000e8c85382ec83f52ae79b60842d1dc3ab6b/uuid_utils-0.10.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0636b6208f69d5a4e629707ad2a89a04dfa8d1023e1999181f6830646ca048a1", size = 329653 }, { url = "https://files.pythonhosted.org/packages/0c/00/dd6c2164ace70b7b1671d9129267df331481d7d1e5f9c5e6a564f07953f6/uuid_utils-0.10.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bc06452856b724df9dedfc161c3582199547da54aeb81915ec2ed54f92d19b0", size = 365471 }, { url = "https://files.pythonhosted.org/packages/b4/e7/0ab8080fcae5462a7b5e555c1cef3d63457baffb97a59b9bc7b005a3ecb1/uuid_utils-0.10.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:263b2589111c61decdd74a762e8f850c9e4386fb78d2cf7cb4dfc537054cda1b", size = 325844 }, { url = "https://files.pythonhosted.org/packages/73/39/52d94e9ef75b03f44b39ffc6ac3167e93e74ef4d010a93d25589d9f48540/uuid_utils-0.10.0-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a558db48b7096de6b4d2d2210d82bba8586a6d55f99106b03bb7d01dc5c5bcd6", size = 344389 }, { url = "https://files.pythonhosted.org/packages/7c/29/4824566f62666238290d99c62a58e4ab2a8b9cf2eccf94cebd9b3359131e/uuid_utils-0.10.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:807465067f3c892514230326ac71a79b28a8dfe2c88ecd2d5675fc844f3c76b5", size = 510078 }, { url = "https://files.pythonhosted.org/packages/5e/8f/bbcc7130d652462c685f0d3bd26bb214b754215b476340885a4cb50fb89a/uuid_utils-0.10.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:57423d4a2b9d7b916de6dbd75ba85465a28f9578a89a97f7d3e098d9aa4e5d4a", size = 515937 }, { url = "https://files.pythonhosted.org/packages/23/f8/34e0c00f5f188604d336713e6a020fcf53b10998e8ab24735a39ab076740/uuid_utils-0.10.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:76d8d660f18ff6b767e319b1b5f927350cd92eafa4831d7ef5b57fdd1d91f974", size = 494111 }, { url = "https://files.pythonhosted.org/packages/1a/52/b7f0066cc90a7a9c28d54061ed195cd617fde822e5d6ac3ccc88509c3c44/uuid_utils-0.10.0-cp39-abi3-win32.whl", hash = "sha256:6c11a71489338837db0b902b75e1ba7618d5d29f05fde4f68b3f909177dbc226", size = 173520 }, { url = "https://files.pythonhosted.org/packages/8b/15/f04f58094674d333974243fb45d2c740cf4b79186fb707168e57943c84a3/uuid_utils-0.10.0-cp39-abi3-win_amd64.whl", hash = "sha256:11c55ae64f6c0a7a0c741deae8ca2a4eaa11e9c09dbb7bec2099635696034cf7", size = 182965 }, { url = "https://files.pythonhosted.org/packages/c9/1f/8f3288797487c82981134732dee13b1ad12082890905476f95994ce49e0f/uuid_utils-0.10.0-pp310-pypy310_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:acea543dfc7b87df749e3e814c54ac739a82ff5e3800d25bd25a3e00599e1554", size = 573053 }, { url = "https://files.pythonhosted.org/packages/91/28/0eb5190aa39547015d60ce5453cfd37c4d87a48d25026d72044c20cad4fc/uuid_utils-0.10.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0767eefa7b1e96f06cfa9b95758d286240c01bbf19e9d8f1b6043cdbe76cc639", size = 292596 }, { url = "https://files.pythonhosted.org/packages/e4/27/a451725d5df0db8baaa84adde94bbac4a33c3816a5215740c3f1dbdc46d3/uuid_utils-0.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973fe4bb5258fd2ccb144d8b40c2d3158f16cc856a20527f8b40d14b2ae1dee9", size = 323414 }, { url = "https://files.pythonhosted.org/packages/22/6b/0edc2ad855cbe07ffd891ec636c6ff57ae3a56cdf0e90467b2edbe5b7b43/uuid_utils-0.10.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:71b8505b67a0d77d0fbd765d8463094a8f447677125da7647bec7ea0b99406f0", size = 329720 }, { url = "https://files.pythonhosted.org/packages/4b/1d/f73af741d9a4d3168704235ef06fbda823bf2ecf551ac29caa8d7cf8ea2a/uuid_utils-0.10.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdcb1211bb61476cbef12a87101fa48243e20ed82b2bd324c816b1b5826bd5e", size = 365545 }, { url = "https://files.pythonhosted.org/packages/b1/06/92104c8ea66a6d645f00520222a52c4b91a444c2c30201ff0036dedfb8da/uuid_utils-0.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5247f1df040aae71ea313819b563debe69bca7086a2cc6a3ac0eaddd3dadac", size = 325920 }, { url = "https://files.pythonhosted.org/packages/94/fe/0710e28b94f2311b40757dc43513290134cb4579f79981127c58640d736c/uuid_utils-0.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a50bd29ef89660b93aa07ffa95ac691a0e12832375030569a8bd5c9272f3b8e6", size = 344458 }, { url = "https://files.pythonhosted.org/packages/0b/8f/c12d449789d756f6c0ed81de060900c0e616f80d1e3944949859921ef1e5/uuid_utils-0.10.0-pp39-pypy39_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:a778cd9d8f995b94bba6e51f3ebee5b338fd834b0c4ecc8f932bd23e29db3e19", size = 573049 }, { url = "https://files.pythonhosted.org/packages/03/ac/85677abca4832417b28e40c7b634820158e29c57da780de2f3a131b6e24d/uuid_utils-0.10.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3d5b5c5ed66ff923961b9ebb902232cd67f6a7ec6b6f7a58e05e00ff44e3c7f", size = 292587 }, { url = "https://files.pythonhosted.org/packages/cf/35/7d909b7acfb7eec3af0f671b5fec8ae88fdf60857a4f6344a83d0286837a/uuid_utils-0.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:789ed6335225326c66f5d6162649bed978105a85f232be7811387c395c226801", size = 323412 }, { url = "https://files.pythonhosted.org/packages/e8/9c/daff11748511fef37d8959bc83fb3f5d50128292a9e10c50c4aa02390cd2/uuid_utils-0.10.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05d1aa7b944b719eb1ee472435ae5444a3f8a00eb6350e3b1d1217d738477d33", size = 329716 }, { url = "https://files.pythonhosted.org/packages/45/2c/30c7ef5dfc07f41854a46a09858419483376091731973ae91ac50392fe17/uuid_utils-0.10.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa8d8559c2d25d6ac87e0adeee601d2c91ec40b357ab780bcf79061cc23324e6", size = 365539 }, { url = "https://files.pythonhosted.org/packages/2c/28/196f2b4c2717d865b8a0eb2064f7e69d31533b58a2528edf64b1c18cd943/uuid_utils-0.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0badcbfe3c72b5b30d59c2b12f120923127abd95a0d2aa64ddc1234e495abc2", size = 325916 }, { url = "https://files.pythonhosted.org/packages/87/a6/485bec297615b530aa967eef7b212a33d02a72441846642d721dedb307fb/uuid_utils-0.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a7c1c494012335113748815156c5b6234c59b0fe0d3a8eede1b1a46f7e25a69", size = 344453 }, ] [[package]] name = "uvicorn" version = "0.34.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } wheels = [ { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, ] [package.optional-dependencies] standard = [ { name = "colorama", marker = "sys_platform == 'win32'" }, { name = "httptools" }, { name = "python-dotenv" }, { name = "pyyaml" }, { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, { name = "watchfiles" }, { name = "websockets" }, ] [[package]] name = "uvloop" version = "0.21.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 } wheels = [ { url = "https://files.pythonhosted.org/packages/3d/76/44a55515e8c9505aa1420aebacf4dd82552e5e15691654894e90d0bd051a/uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f", size = 1442019 }, { url = "https://files.pythonhosted.org/packages/35/5a/62d5800358a78cc25c8a6c72ef8b10851bdb8cca22e14d9c74167b7f86da/uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d", size = 801898 }, { url = "https://files.pythonhosted.org/packages/f3/96/63695e0ebd7da6c741ccd4489b5947394435e198a1382349c17b1146bb97/uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26", size = 3827735 }, { url = "https://files.pythonhosted.org/packages/61/e0/f0f8ec84979068ffae132c58c79af1de9cceeb664076beea86d941af1a30/uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb", size = 3825126 }, { url = "https://files.pythonhosted.org/packages/bf/fe/5e94a977d058a54a19df95f12f7161ab6e323ad49f4dabc28822eb2df7ea/uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f", size = 3705789 }, { url = "https://files.pythonhosted.org/packages/26/dd/c7179618e46092a77e036650c1f056041a028a35c4d76945089fcfc38af8/uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c", size = 3800523 }, { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410 }, { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476 }, { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855 }, { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185 }, { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256 }, { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323 }, { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284 }, { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349 }, { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089 }, { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770 }, { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321 }, { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022 }, { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 }, { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 }, { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 }, { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 }, { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 }, { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 }, { url = "https://files.pythonhosted.org/packages/3c/a4/646a9d0edff7cde25fc1734695d3dfcee0501140dd0e723e4df3f0a50acb/uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b", size = 1439646 }, { url = "https://files.pythonhosted.org/packages/01/2e/e128c66106af9728f86ebfeeb52af27ecd3cb09336f3e2f3e06053707a15/uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2", size = 800931 }, { url = "https://files.pythonhosted.org/packages/2d/1a/9fbc2b1543d0df11f7aed1632f64bdf5ecc4053cf98cdc9edb91a65494f9/uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0", size = 3829660 }, { url = "https://files.pythonhosted.org/packages/b8/c0/392e235e4100ae3b95b5c6dac77f82b529d2760942b1e7e0981e5d8e895d/uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75", size = 3827185 }, { url = "https://files.pythonhosted.org/packages/e1/24/a5da6aba58f99aed5255eca87d58d1760853e8302d390820cc29058408e3/uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd", size = 3705833 }, { url = "https://files.pythonhosted.org/packages/1a/5c/6ba221bb60f1e6474474102e17e38612ec7a06dc320e22b687ab563d877f/uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff", size = 3804696 }, ] [[package]] name = "virtualenv" version = "20.29.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c7/9c/57d19fa093bcf5ac61a48087dd44d00655f85421d1aa9722f8befbf3f40a/virtualenv-20.29.3.tar.gz", hash = "sha256:95e39403fcf3940ac45bc717597dba16110b74506131845d9b687d5e73d947ac", size = 4320280 } wheels = [ { url = "https://files.pythonhosted.org/packages/c2/eb/c6db6e3001d58c6a9e67c74bb7b4206767caa3ccc28c6b9eaf4c23fb4e34/virtualenv-20.29.3-py3-none-any.whl", hash = "sha256:3e3d00f5807e83b234dfb6122bf37cfadf4be216c53a49ac059d02414f819170", size = 4301458 }, ] [[package]] name = "watchfiles" version = "1.0.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f5/26/c705fc77d0a9ecdb9b66f1e2976d95b81df3cae518967431e7dbf9b5e219/watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205", size = 94625 } wheels = [ { url = "https://files.pythonhosted.org/packages/14/02/22fcaed0396730b0d362bc8d1ffb3be2658fd473eecbb2ba84243e157f11/watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08", size = 395212 }, { url = "https://files.pythonhosted.org/packages/e9/3d/ec5a2369a46edf3ebe092c39d9ae48e8cb6dacbde51c4b4f98936c524269/watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1", size = 384815 }, { url = "https://files.pythonhosted.org/packages/df/b4/898991cececbe171e67142c31905510203649569d9817848f47c4177ee42/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47eb32ef8c729dbc4f4273baece89398a4d4b5d21a1493efea77a17059f4df8a", size = 450680 }, { url = "https://files.pythonhosted.org/packages/58/f7/d4aa3000e812cfb5e5c2c6c0a3ec9d0a46a42489a8727edd160631c4e210/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:076f293100db3b0b634514aa0d294b941daa85fc777f9c698adb1009e5aca0b1", size = 455923 }, { url = "https://files.pythonhosted.org/packages/dd/95/7e2e4c6aba1b02fb5c76d2f6a450b85215921ec5f8f7ad5efd075369563f/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eacd91daeb5158c598fe22d7ce66d60878b6294a86477a4715154990394c9b3", size = 482339 }, { url = "https://files.pythonhosted.org/packages/bb/67/4265b0fabcc2ef2c9e3e8802ba7908cf718a357ebfb49c72e53787156a48/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13c2ce7b72026cfbca120d652f02c7750f33b4c9395d79c9790b27f014c8a5a2", size = 519908 }, { url = "https://files.pythonhosted.org/packages/0d/96/b57802d5f8164bdf070befb4fd3dec4edba5a364ec0670965a97eb8098ce/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90192cdc15ab7254caa7765a98132a5a41471cf739513cc9bcf7d2ffcc0ec7b2", size = 501410 }, { url = "https://files.pythonhosted.org/packages/8b/18/6db0de4e8911ba14e31853201b40c0fa9fea5ecf3feb86b0ad58f006dfc3/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278aaa395f405972e9f523bd786ed59dfb61e4b827856be46a42130605fd0899", size = 452876 }, { url = "https://files.pythonhosted.org/packages/df/df/092a961815edf723a38ba2638c49491365943919c3526cc9cf82c42786a6/watchfiles-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a462490e75e466edbb9fc4cd679b62187153b3ba804868452ef0577ec958f5ff", size = 615353 }, { url = "https://files.pythonhosted.org/packages/f3/cf/b85fe645de4ff82f3f436c5e9032379fce37c303f6396a18f9726cc34519/watchfiles-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8d0d0630930f5cd5af929040e0778cf676a46775753e442a3f60511f2409f48f", size = 613187 }, { url = "https://files.pythonhosted.org/packages/f6/d4/a9fea27aef4dd69689bc3556718c1157a7accb72aa035ece87c1fa8483b5/watchfiles-1.0.4-cp310-cp310-win32.whl", hash = "sha256:cc27a65069bcabac4552f34fd2dce923ce3fcde0721a16e4fb1b466d63ec831f", size = 270799 }, { url = "https://files.pythonhosted.org/packages/df/02/dbe9d4439f15dd4ad0720b6e039bde9d66d1f830331f34c18eb70fa6608e/watchfiles-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:8b1f135238e75d075359cf506b27bf3f4ca12029c47d3e769d8593a2024ce161", size = 284145 }, { url = "https://files.pythonhosted.org/packages/0f/bb/8461adc4b1fed009546fb797fc0d5698dcfe5e289cb37e1b8f16a93cdc30/watchfiles-1.0.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2a9f93f8439639dc244c4d2902abe35b0279102bca7bbcf119af964f51d53c19", size = 394869 }, { url = "https://files.pythonhosted.org/packages/55/88/9ebf36b3547176d1709c320de78c1fa3263a46be31b5b1267571d9102686/watchfiles-1.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eea33ad8c418847dd296e61eb683cae1c63329b6d854aefcd412e12d94ee235", size = 384905 }, { url = "https://files.pythonhosted.org/packages/03/8a/04335ce23ef78d8c69f0913e8b20cf7d9233e3986543aeef95ef2d6e43d2/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31f1a379c9dcbb3f09cf6be1b7e83b67c0e9faabed0471556d9438a4a4e14202", size = 449944 }, { url = "https://files.pythonhosted.org/packages/17/4e/c8d5dcd14fe637f4633616dabea8a4af0a10142dccf3b43e0f081ba81ab4/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab594e75644421ae0a2484554832ca5895f8cab5ab62de30a1a57db460ce06c6", size = 456020 }, { url = "https://files.pythonhosted.org/packages/5e/74/3e91e09e1861dd7fbb1190ce7bd786700dc0fbc2ccd33bb9fff5de039229/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc2eb5d14a8e0d5df7b36288979176fbb39672d45184fc4b1c004d7c3ce29317", size = 482983 }, { url = "https://files.pythonhosted.org/packages/a1/3d/e64de2d1ce4eb6a574fd78ce3a28c279da263be9ef3cfcab6f708df192f2/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f68d8e9d5a321163ddacebe97091000955a1b74cd43724e346056030b0bacee", size = 520320 }, { url = "https://files.pythonhosted.org/packages/2c/bd/52235f7063b57240c66a991696ed27e2a18bd6fcec8a1ea5a040b70d0611/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9ce064e81fe79faa925ff03b9f4c1a98b0bbb4a1b8c1b015afa93030cb21a49", size = 500988 }, { url = "https://files.pythonhosted.org/packages/3a/b0/ff04194141a5fe650c150400dd9e42667916bc0f52426e2e174d779b8a74/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b77d5622ac5cc91d21ae9c2b284b5d5c51085a0bdb7b518dba263d0af006132c", size = 452573 }, { url = "https://files.pythonhosted.org/packages/3d/9d/966164332c5a178444ae6d165082d4f351bd56afd9c3ec828eecbf190e6a/watchfiles-1.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1941b4e39de9b38b868a69b911df5e89dc43767feeda667b40ae032522b9b5f1", size = 615114 }, { url = "https://files.pythonhosted.org/packages/94/df/f569ae4c1877f96ad4086c153a8eee5a19a3b519487bf5c9454a3438c341/watchfiles-1.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f8c4998506241dedf59613082d1c18b836e26ef2a4caecad0ec41e2a15e4226", size = 613076 }, { url = "https://files.pythonhosted.org/packages/15/ae/8ce5f29e65d5fa5790e3c80c289819c55e12be2e1b9f5b6a0e55e169b97d/watchfiles-1.0.4-cp311-cp311-win32.whl", hash = "sha256:4ebbeca9360c830766b9f0df3640b791be569d988f4be6c06d6fae41f187f105", size = 271013 }, { url = "https://files.pythonhosted.org/packages/a4/c6/79dc4a7c598a978e5fafa135090aaf7bbb03b8dec7bada437dfbe578e7ed/watchfiles-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:05d341c71f3d7098920f8551d4df47f7b57ac5b8dad56558064c3431bdfc0b74", size = 284229 }, { url = "https://files.pythonhosted.org/packages/37/3d/928633723211753f3500bfb138434f080363b87a1b08ca188b1ce54d1e05/watchfiles-1.0.4-cp311-cp311-win_arm64.whl", hash = "sha256:32b026a6ab64245b584acf4931fe21842374da82372d5c039cba6bf99ef722f3", size = 276824 }, { url = "https://files.pythonhosted.org/packages/5b/1a/8f4d9a1461709756ace48c98f07772bc6d4519b1e48b5fa24a4061216256/watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2", size = 391345 }, { url = "https://files.pythonhosted.org/packages/bc/d2/6750b7b3527b1cdaa33731438432e7238a6c6c40a9924049e4cebfa40805/watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9", size = 381515 }, { url = "https://files.pythonhosted.org/packages/4e/17/80500e42363deef1e4b4818729ed939aaddc56f82f4e72b2508729dd3c6b/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712", size = 449767 }, { url = "https://files.pythonhosted.org/packages/10/37/1427fa4cfa09adbe04b1e97bced19a29a3462cc64c78630787b613a23f18/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12", size = 455677 }, { url = "https://files.pythonhosted.org/packages/c5/7a/39e9397f3a19cb549a7d380412fd9e507d4854eddc0700bfad10ef6d4dba/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844", size = 482219 }, { url = "https://files.pythonhosted.org/packages/45/2d/7113931a77e2ea4436cad0c1690c09a40a7f31d366f79c6f0a5bc7a4f6d5/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733", size = 518830 }, { url = "https://files.pythonhosted.org/packages/f9/1b/50733b1980fa81ef3c70388a546481ae5fa4c2080040100cd7bf3bf7b321/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af", size = 497997 }, { url = "https://files.pythonhosted.org/packages/2b/b4/9396cc61b948ef18943e7c85ecfa64cf940c88977d882da57147f62b34b1/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a", size = 452249 }, { url = "https://files.pythonhosted.org/packages/fb/69/0c65a5a29e057ad0dc691c2fa6c23b2983c7dabaa190ba553b29ac84c3cc/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff", size = 614412 }, { url = "https://files.pythonhosted.org/packages/7f/b9/319fcba6eba5fad34327d7ce16a6b163b39741016b1996f4a3c96b8dd0e1/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e", size = 611982 }, { url = "https://files.pythonhosted.org/packages/f1/47/143c92418e30cb9348a4387bfa149c8e0e404a7c5b0585d46d2f7031b4b9/watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94", size = 271822 }, { url = "https://files.pythonhosted.org/packages/ea/94/b0165481bff99a64b29e46e07ac2e0df9f7a957ef13bec4ceab8515f44e3/watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c", size = 285441 }, { url = "https://files.pythonhosted.org/packages/11/de/09fe56317d582742d7ca8c2ca7b52a85927ebb50678d9b0fa8194658f536/watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90", size = 277141 }, { url = "https://files.pythonhosted.org/packages/08/98/f03efabec64b5b1fa58c0daab25c68ef815b0f320e54adcacd0d6847c339/watchfiles-1.0.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8012bd820c380c3d3db8435e8cf7592260257b378b649154a7948a663b5f84e9", size = 390954 }, { url = "https://files.pythonhosted.org/packages/16/09/4dd49ba0a32a45813debe5fb3897955541351ee8142f586303b271a02b40/watchfiles-1.0.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa216f87594f951c17511efe5912808dfcc4befa464ab17c98d387830ce07b60", size = 381133 }, { url = "https://files.pythonhosted.org/packages/76/59/5aa6fc93553cd8d8ee75c6247763d77c02631aed21551a97d94998bf1dae/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c9953cf85529c05b24705639ffa390f78c26449e15ec34d5339e8108c7c407", size = 449516 }, { url = "https://files.pythonhosted.org/packages/4c/aa/df4b6fe14b6317290b91335b23c96b488d365d65549587434817e06895ea/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf684aa9bba4cd95ecb62c822a56de54e3ae0598c1a7f2065d51e24637a3c5d", size = 454820 }, { url = "https://files.pythonhosted.org/packages/5e/71/185f8672f1094ce48af33252c73e39b48be93b761273872d9312087245f6/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f44a39aee3cbb9b825285ff979ab887a25c5d336e5ec3574f1506a4671556a8d", size = 481550 }, { url = "https://files.pythonhosted.org/packages/85/d7/50ebba2c426ef1a5cb17f02158222911a2e005d401caf5d911bfca58f4c4/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38320582736922be8c865d46520c043bff350956dfc9fbaee3b2df4e1740a4b", size = 518647 }, { url = "https://files.pythonhosted.org/packages/f0/7a/4c009342e393c545d68987e8010b937f72f47937731225b2b29b7231428f/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39f4914548b818540ef21fd22447a63e7be6e24b43a70f7642d21f1e73371590", size = 497547 }, { url = "https://files.pythonhosted.org/packages/0f/7c/1cf50b35412d5c72d63b2bf9a4fffee2e1549a245924960dd087eb6a6de4/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12969a3765909cf5dc1e50b2436eb2c0e676a3c75773ab8cc3aa6175c16e902", size = 452179 }, { url = "https://files.pythonhosted.org/packages/d6/a9/3db1410e1c1413735a9a472380e4f431ad9a9e81711cda2aaf02b7f62693/watchfiles-1.0.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0986902677a1a5e6212d0c49b319aad9cc48da4bd967f86a11bde96ad9676ca1", size = 614125 }, { url = "https://files.pythonhosted.org/packages/f2/e1/0025d365cf6248c4d1ee4c3d2e3d373bdd3f6aff78ba4298f97b4fad2740/watchfiles-1.0.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:308ac265c56f936636e3b0e3f59e059a40003c655228c131e1ad439957592303", size = 611911 }, { url = "https://files.pythonhosted.org/packages/55/55/035838277d8c98fc8c917ac9beeb0cd6c59d675dc2421df5f9fcf44a0070/watchfiles-1.0.4-cp313-cp313-win32.whl", hash = "sha256:aee397456a29b492c20fda2d8961e1ffb266223625346ace14e4b6d861ba9c80", size = 271152 }, { url = "https://files.pythonhosted.org/packages/f0/e5/96b8e55271685ddbadc50ce8bc53aa2dff278fb7ac4c2e473df890def2dc/watchfiles-1.0.4-cp313-cp313-win_amd64.whl", hash = "sha256:d6097538b0ae5c1b88c3b55afa245a66793a8fec7ada6755322e465fb1a0e8cc", size = 285216 }, { url = "https://files.pythonhosted.org/packages/15/81/54484fc2fa715abe79694b975692af963f0878fb9d72b8251aa542bf3f10/watchfiles-1.0.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:d3452c1ec703aa1c61e15dfe9d482543e4145e7c45a6b8566978fbb044265a21", size = 394967 }, { url = "https://files.pythonhosted.org/packages/14/b3/557f0cd90add86586fe3deeebd11e8299db6bc3452b44a534f844c6ab831/watchfiles-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7b75fee5a16826cf5c46fe1c63116e4a156924d668c38b013e6276f2582230f0", size = 384707 }, { url = "https://files.pythonhosted.org/packages/03/a3/34638e1bffcb85a405e7b005e30bb211fd9be2ab2cb1847f2ceb81bef27b/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e997802d78cdb02623b5941830ab06f8860038faf344f0d288d325cc9c5d2ff", size = 450442 }, { url = "https://files.pythonhosted.org/packages/8f/9f/6a97460dd11a606003d634c7158d9fea8517e98daffc6f56d0f5fde2e86a/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0611d244ce94d83f5b9aff441ad196c6e21b55f77f3c47608dcf651efe54c4a", size = 455959 }, { url = "https://files.pythonhosted.org/packages/9d/bb/e0648c6364e4d37ec692bc3f0c77507d17d8bb8f75689148819142010bbf/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9745a4210b59e218ce64c91deb599ae8775c8a9da4e95fb2ee6fe745fc87d01a", size = 483187 }, { url = "https://files.pythonhosted.org/packages/dd/ad/d9290586a25288a81dfa8ad6329cf1de32aa1a9798ace45259eb95dcfb37/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4810ea2ae622add560f4aa50c92fef975e475f7ac4900ce5ff5547b2434642d8", size = 519733 }, { url = "https://files.pythonhosted.org/packages/4e/a9/150c1666825cc9637093f8cae7fc6f53b3296311ab8bd65f1389acb717cb/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:740d103cd01458f22462dedeb5a3382b7f2c57d07ff033fbc9465919e5e1d0f3", size = 502275 }, { url = "https://files.pythonhosted.org/packages/44/dc/5bfd21e20a330aca1706ac44713bc322838061938edf4b53130f97a7b211/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdbd912a61543a36aef85e34f212e5d2486e7c53ebfdb70d1e0b060cc50dd0bf", size = 452907 }, { url = "https://files.pythonhosted.org/packages/50/fe/8f4fc488f1699f564687b697456eb5c0cb8e2b0b8538150511c234c62094/watchfiles-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0bc80d91ddaf95f70258cf78c471246846c1986bcc5fd33ccc4a1a67fcb40f9a", size = 615927 }, { url = "https://files.pythonhosted.org/packages/ad/19/2e45f6f6eec89dd97a4d281635e3d73c17e5f692e7432063bdfdf9562c89/watchfiles-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab0311bb2ffcd9f74b6c9de2dda1612c13c84b996d032cd74799adb656af4e8b", size = 613435 }, { url = "https://files.pythonhosted.org/packages/91/17/dc5ac62ca377827c24321d68050efc2eaee2ebaf3f21d055bbce2206d309/watchfiles-1.0.4-cp39-cp39-win32.whl", hash = "sha256:02a526ee5b5a09e8168314c905fc545c9bc46509896ed282aeb5a8ba9bd6ca27", size = 270810 }, { url = "https://files.pythonhosted.org/packages/82/2b/dad851342492d538e7ffe72a8c756f747dd147988abb039ac9d6577d2235/watchfiles-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:a5ae5706058b27c74bac987d615105da17724172d5aaacc6c362a40599b6de43", size = 284866 }, { url = "https://files.pythonhosted.org/packages/6f/06/175d5ac6b838fb319008c0cd981d7bf289317c510154d411d3584ca2b67b/watchfiles-1.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdcc92daeae268de1acf5b7befcd6cfffd9a047098199056c72e4623f531de18", size = 396269 }, { url = "https://files.pythonhosted.org/packages/86/ee/5db93b0b57dc0587abdbac4149296ee73275f615d790a82cb5598af0557f/watchfiles-1.0.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8d3d9203705b5797f0af7e7e5baa17c8588030aaadb7f6a86107b7247303817", size = 386010 }, { url = "https://files.pythonhosted.org/packages/75/61/fe0dc5fedf152bfc085a53711f740701f6bdb8ab6b5c950402b681d4858b/watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdef5a1be32d0b07dcea3318a0be95d42c98ece24177820226b56276e06b63b0", size = 450913 }, { url = "https://files.pythonhosted.org/packages/9f/dd/3c7731af3baf1a9957afc643d176f94480921a690ec3237c9f9d11301c08/watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:342622287b5604ddf0ed2d085f3a589099c9ae8b7331df3ae9845571586c4f3d", size = 453474 }, { url = "https://files.pythonhosted.org/packages/6b/b4/c3998f54c91a35cee60ee6d3a855a069c5dff2bae6865147a46e9090dccd/watchfiles-1.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9fe37a2de80aa785d340f2980276b17ef697ab8db6019b07ee4fd28a8359d2f3", size = 395565 }, { url = "https://files.pythonhosted.org/packages/3f/05/ac1a4d235beb9ddfb8ac26ce93a00ba6bd1b1b43051ef12d7da957b4a9d1/watchfiles-1.0.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9d1ef56b56ed7e8f312c934436dea93bfa3e7368adfcf3df4c0da6d4de959a1e", size = 385406 }, { url = "https://files.pythonhosted.org/packages/4c/ea/36532e7d86525f4e52a10efed182abf33efb106a93d49f5fbc994b256bcd/watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b42cac65beae3a362629950c444077d1b44f1790ea2772beaea95451c086bb", size = 450424 }, { url = "https://files.pythonhosted.org/packages/7a/e9/3cbcf4d70cd0b6d3f30631deae1bf37cc0be39887ca327a44462fe546bf5/watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e0227b8ed9074c6172cf55d85b5670199c99ab11fd27d2c473aa30aec67ee42", size = 452488 }, ] [[package]] name = "wcmatch" version = "10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "bracex" }, ] sdist = { url = "https://files.pythonhosted.org/packages/41/ab/b3a52228538ccb983653c446c1656eddf1d5303b9cb8b9aef6a91299f862/wcmatch-10.0.tar.gz", hash = "sha256:e72f0de09bba6a04e0de70937b0cf06e55f36f37b3deb422dfaf854b867b840a", size = 115578 } wheels = [ { url = "https://files.pythonhosted.org/packages/ab/df/4ee467ab39cc1de4b852c212c1ed3becfec2e486a51ac1ce0091f85f38d7/wcmatch-10.0-py3-none-any.whl", hash = "sha256:0dd927072d03c0a6527a20d2e6ad5ba8d0380e60870c383bc533b71744df7b7a", size = 39347 }, ] [[package]] name = "wcwidth" version = "0.2.13" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 } wheels = [ { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 }, ] [[package]] name = "webencodings" version = "0.5.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721 } wheels = [ { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774 }, ] [[package]] name = "websockets" version = "15.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 } wheels = [ { url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b", size = 175423 }, { url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205", size = 173080 }, { url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a", size = 173329 }, { url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e", size = 182312 }, { url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf", size = 181319 }, { url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb", size = 181631 }, { url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d", size = 182016 }, { url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9", size = 181426 }, { url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c", size = 181360 }, { url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256", size = 176388 }, { url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41", size = 176830 }, { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423 }, { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082 }, { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330 }, { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878 }, { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883 }, { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252 }, { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521 }, { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958 }, { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918 }, { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388 }, { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828 }, { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437 }, { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096 }, { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332 }, { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152 }, { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096 }, { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523 }, { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790 }, { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165 }, { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160 }, { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395 }, { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841 }, { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440 }, { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098 }, { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329 }, { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111 }, { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054 }, { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496 }, { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829 }, { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217 }, { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195 }, { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393 }, { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837 }, { url = "https://files.pythonhosted.org/packages/36/db/3fff0bcbe339a6fa6a3b9e3fbc2bfb321ec2f4cd233692272c5a8d6cf801/websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5", size = 175424 }, { url = "https://files.pythonhosted.org/packages/46/e6/519054c2f477def4165b0ec060ad664ed174e140b0d1cbb9fafa4a54f6db/websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a", size = 173077 }, { url = "https://files.pythonhosted.org/packages/1a/21/c0712e382df64c93a0d16449ecbf87b647163485ca1cc3f6cbadb36d2b03/websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b", size = 173324 }, { url = "https://files.pythonhosted.org/packages/1c/cb/51ba82e59b3a664df54beed8ad95517c1b4dc1a913730e7a7db778f21291/websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770", size = 182094 }, { url = "https://files.pythonhosted.org/packages/fb/0f/bf3788c03fec679bcdaef787518dbe60d12fe5615a544a6d4cf82f045193/websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb", size = 181094 }, { url = "https://files.pythonhosted.org/packages/5e/da/9fb8c21edbc719b66763a571afbaf206cb6d3736d28255a46fc2fe20f902/websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054", size = 181397 }, { url = "https://files.pythonhosted.org/packages/2e/65/65f379525a2719e91d9d90c38fe8b8bc62bd3c702ac651b7278609b696c4/websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee", size = 181794 }, { url = "https://files.pythonhosted.org/packages/d9/26/31ac2d08f8e9304d81a1a7ed2851c0300f636019a57cbaa91342015c72cc/websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed", size = 181194 }, { url = "https://files.pythonhosted.org/packages/98/72/1090de20d6c91994cd4b357c3f75a4f25ee231b63e03adea89671cc12a3f/websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880", size = 181164 }, { url = "https://files.pythonhosted.org/packages/2d/37/098f2e1c103ae8ed79b0e77f08d83b0ec0b241cf4b7f2f10edd0126472e1/websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411", size = 176381 }, { url = "https://files.pythonhosted.org/packages/75/8b/a32978a3ab42cebb2ebdd5b05df0696a09f4d436ce69def11893afa301f0/websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4", size = 176841 }, { url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3", size = 173109 }, { url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1", size = 173343 }, { url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475", size = 174599 }, { url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9", size = 174207 }, { url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04", size = 174155 }, { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884 }, { url = "https://files.pythonhosted.org/packages/b7/48/4b67623bac4d79beb3a6bb27b803ba75c1bdedc06bd827e465803690a4b2/websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940", size = 173106 }, { url = "https://files.pythonhosted.org/packages/ed/f0/adb07514a49fe5728192764e04295be78859e4a537ab8fcc518a3dbb3281/websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e", size = 173339 }, { url = "https://files.pythonhosted.org/packages/87/28/bd23c6344b18fb43df40d0700f6d3fffcd7cef14a6995b4f976978b52e62/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9", size = 174597 }, { url = "https://files.pythonhosted.org/packages/6d/79/ca288495863d0f23a60f546f0905ae8f3ed467ad87f8b6aceb65f4c013e4/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b", size = 174205 }, { url = "https://files.pythonhosted.org/packages/04/e4/120ff3180b0872b1fe6637f6f995bcb009fb5c87d597c1fc21456f50c848/websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f", size = 174150 }, { url = "https://files.pythonhosted.org/packages/cb/c3/30e2f9c539b8da8b1d76f64012f3b19253271a63413b2d3adb94b143407f/websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123", size = 176877 }, { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 }, ] [[package]] name = "werkzeug" version = "3.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925 } wheels = [ { url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498 }, ] [[package]] name = "wheel" version = "0.45.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8a/98/2d9906746cdc6a6ef809ae6338005b3f21bb568bea3165cfc6a243fdc25c/wheel-0.45.1.tar.gz", hash = "sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729", size = 107545 } wheels = [ { url = "https://files.pythonhosted.org/packages/0b/2c/87f3254fd8ffd29e4c02732eee68a83a1d3c346ae39bc6822dcbcb697f2b/wheel-0.45.1-py3-none-any.whl", hash = "sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248", size = 72494 }, ] [[package]] name = "zipp" version = "3.21.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } wheels = [ { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 }, ]