pax_global_header00006660000000000000000000000064147013111710014506gustar00rootroot0000000000000052 comment=8014b8acb863b9bace6df6098486babb5de2f61e django-pgschemas-1.0.1/000077500000000000000000000000001470131117100147175ustar00rootroot00000000000000django-pgschemas-1.0.1/.github/000077500000000000000000000000001470131117100162575ustar00rootroot00000000000000django-pgschemas-1.0.1/.github/dependabot.yml000066400000000000000000000001531470131117100211060ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" django-pgschemas-1.0.1/.github/workflows/000077500000000000000000000000001470131117100203145ustar00rootroot00000000000000django-pgschemas-1.0.1/.github/workflows/code.yaml000066400000000000000000000037251470131117100221210ustar00rootroot00000000000000name: code on: pull_request: push: branches: - master paths: - pyproject.toml - poetry.lock - "**.py" jobs: django-tests: runs-on: ubuntu-latest strategy: max-parallel: 4 matrix: python-version: ["3.10", "3.11", "3.12"] django-version: ["5.0", "5.1"] psycopg-version: ["psycopg", "psycopg2"] services: postgres: image: postgres:17 env: POSTGRES_PASSWORD: postgres ports: - 5432:5432 options: --name postgres --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install Dependencies run: poetry install - name: Install Django specific version ${{ matrix.django-version }} run: poetry run pip install "Django~=${{ matrix.django-version }}" - name: Install psycopg specific version ${{ matrix.psycopg-version }} run: poetry run pip install ${{ matrix.psycopg-version }} - name: Run Tests run: | poetry run pytest --cov="django_pgschemas" sandbox/tests poetry run coverage lcov -o ./coverage/lcov.info - name: Upload coverage to Coveralls in parallel uses: coverallsapp/github-action@master with: github-token: ${{ secrets.GITHUB_TOKEN }} flag-name: run-py${{ matrix.python-version }}-Django${{ matrix.django-version }}-${{ matrix.psycopg-version }} parallel: true finish: needs: django-tests runs-on: ubuntu-latest steps: - name: Finish report to Coveralls uses: coverallsapp/github-action@master with: github-token: ${{ secrets.GITHUB_TOKEN }} parallel-finished: true django-pgschemas-1.0.1/.github/workflows/deploy.yaml000066400000000000000000000011521470131117100224730ustar00rootroot00000000000000name: deploy on: push: tags: - "v*" jobs: build-n-publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.x" - name: Install Dependencies run: poetry install - name: Build Package run: poetry build - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} django-pgschemas-1.0.1/.github/workflows/linters.yaml000066400000000000000000000010731470131117100226610ustar00rootroot00000000000000name: linters on: [push] jobs: check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.10" # lower python supported - name: Install Dependencies run: poetry install - name: Install psycopg run: poetry run pip install psycopg - name: Run pre-commit uses: pre-commit/action@v2.0.0 - name: Run mypy run: poetry run mypy . django-pgschemas-1.0.1/.github/workflows/postgres.yaml000066400000000000000000000022471470131117100230530ustar00rootroot00000000000000name: postgres on: pull_request: push: branches: - master paths: - pyproject.toml - poetry.lock - "**.py" jobs: postgres-version: runs-on: ubuntu-latest strategy: max-parallel: 4 matrix: postgres-version: [13, 14, 15, 16, 17] psycopg-version: ["psycopg", "psycopg2"] services: postgres: image: postgres:${{ matrix.postgres-version }} env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: sandbox ports: - 5432:5432 options: --name postgres --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.x" - name: Install Dependencies run: poetry install - name: Install psycopg specific version ${{ matrix.psycopg-version }} run: poetry run pip install ${{ matrix.psycopg-version }} - name: Run Tests run: poetry run pytest sandbox/tests django-pgschemas-1.0.1/.gitignore000066400000000000000000000024051470131117100167100ustar00rootroot00000000000000# MacOS .DS_Store # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ # vscode .vscode/ # local only by design setup.py .postgres/ django-pgschemas-1.0.1/.pre-commit-config.yaml000066400000000000000000000015331470131117100212020ustar00rootroot00000000000000repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: - id: check-added-large-files - id: check-case-conflict - id: check-merge-conflict - id: check-symlinks - id: check-toml - id: check-yaml - id: end-of-file-fixer - id: mixed-line-ending args: ["--fix=lf"] - id: trailing-whitespace - repo: local hooks: - id: ruff name: ruff entry: bash -c 'poetry run ruff check --fix $0 $@' language: system types: [python] - id: ruff-format name: ruff-format entry: bash -c 'poetry run ruff format $0 $@' language: system types: [python] - repo: https://github.com/adamchainz/django-upgrade rev: "1.20.0" hooks: - id: django-upgrade args: [--target-version, "5.1"] django-pgschemas-1.0.1/.readthedocs.yaml000066400000000000000000000002441470131117100201460ustar00rootroot00000000000000version: 2 build: os: ubuntu-22.04 tools: python: "3.12" mkdocs: configuration: mkdocs.yml python: install: - requirements: docs/requirements.txt django-pgschemas-1.0.1/CODE_OF_CONDUCT.md000066400000000000000000000064261470131117100175260ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at lorinkoz@gmail.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq django-pgschemas-1.0.1/LICENSE000066400000000000000000000020511470131117100157220ustar00rootroot00000000000000MIT License Copyright (c) Lorenzo Peña Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. django-pgschemas-1.0.1/Makefile000066400000000000000000000012251470131117100163570ustar00rootroot00000000000000# Makefile CLONESCHEMA_FILE := https://raw.githubusercontent.com/denishpatel/pg-clone-schema/master/clone_schema.sql .PHONY: test test: poetry run pytest sandbox/tests --reuse-db .PHONY: coverage coverage: poetry run pytest --cov="django_pgschemas" sandbox/tests --reuse-db poetry run coverage html .PHONY: types types: poetry run mypy . .PHONY: down down: docker compose down .PHONY: up up: docker compose up --wait poetry run sandbox/manage.py migrate .PHONY: docs docs: poetry run mkdocs serve -a localhost:9005 .PHONY: update-clone-schema update-clone-schema: curl ${CLONESCHEMA_FILE} | python -m gzip - > django_pgschemas/clone_schema.gz django-pgschemas-1.0.1/README.md000066400000000000000000000045051470131117100162020ustar00rootroot00000000000000# django-pgschemas [![Packaging: poetry](https://img.shields.io/badge/packaging-poetry-purple.svg)](https://python-poetry.org/) [![Build status](https://github.com/lorinkoz/django-pgschemas/workflows/code/badge.svg)](https://github.com/lorinkoz/django-pgschemas/actions) [![Documentation status](https://readthedocs.org/projects/django-pgschemas/badge/?version=latest)](https://django-pgschemas.readthedocs.io/) [![Code coverage](https://coveralls.io/repos/github/lorinkoz/django-pgschemas/badge.svg?branch=master)](https://coveralls.io/github/lorinkoz/django-pgschemas?branch=master) [![PyPi version](https://badge.fury.io/py/django-pgschemas.svg)](http://badge.fury.io/py/django-pgschemas) [![Downloads](https://pepy.tech/badge/django-pgschemas/month)](https://pepy.tech/project/django-pgschemas/) This package uses Postgres schemas to support data multi-tenancy in a single Django project. It is a fork of [django-tenants](https://github.com/django-tenants/django-tenants) with some conceptual changes: - There are static tenants and dynamic tenants. Static tenants can have their own apps and urlconf. - Tenants can be routed via: - URL using subdomain or subfolder on shared subdomain - Session - Headers - Public schema should not be used for storing the main site data, but the true shared data across all tenants. Table "overriding" via search path is not encouraged. - Management commands can be run on multiple schemas via wildcards, either sequentially or in parallel using multithreading. ## Documentation https://django-pgschemas.readthedocs.io/ Version 1.0 has several breaking changes from the 0.\* series. Please refer to [this discussion](https://github.com/lorinkoz/django-pgschemas/discussions/277) for details and bug reports. ## Contributing - Join the discussion at https://github.com/lorinkoz/django-pgschemas/discussions. - PRs are welcome! If you have questions or comments, please use the discussions link above. - To run the test suite run `make` or `make coverage`. The tests for this project live inside a small django project called `sandbox`. ## Credits - Tom Turner for [django-tenants](https://github.com/django-tenants/django-tenants). - Bernardo Pires for [django-tenant-schemas](https://github.com/bernardopires/django-tenant-schemas). - Denish Patel for [pg-clone-schema](https://github.com/denishpatel/pg-clone-schema) django-pgschemas-1.0.1/SECURITY.md000066400000000000000000000012241470131117100165070ustar00rootroot00000000000000# Security Policy ## Supported Versions | Version | Supported | | ------- | ------------------ | | < 1.0 | :x: | | >= 1.0 | :white_check_mark: | ## Reporting a Vulnerability In order to report a vulnerability, please DO NOT create an issue on this repository. Instead, write an email to lorinkoz@gmail.com with full details of the finding. Alternatively, use GitHub's integrated vulnerability report. Expect a response in 24 to 48 hours. If you report is accepted, we will work to publish a patch as soon as possible. We will also provide an advisory covering the details of the vulnerability, as well as the affected versions. django-pgschemas-1.0.1/compose.yaml000066400000000000000000000006121470131117100172470ustar00rootroot00000000000000services: postgres: image: postgres:17-alpine environment: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: sandbox ports: - 5432:5432 volumes: - postgres:/var/lib/postgresql/data healthcheck: test: ["CMD", "pg_isready", "-U", "postgres"] start_period: 1s interval: 1s retries: 5 volumes: postgres: django-pgschemas-1.0.1/django_pgschemas/000077500000000000000000000000001470131117100202135ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/__init__.py000066400000000000000000000003371470131117100223270ustar00rootroot00000000000000from .schema import ( Schema, activate, activate_public, deactivate, get_current_schema, ) __all__ = [ "Schema", "activate", "activate_public", "deactivate", "get_current_schema", ] django-pgschemas-1.0.1/django_pgschemas/apps.py000066400000000000000000000010621470131117100215270ustar00rootroot00000000000000from django.apps import AppConfig as BaseAppConfig class AppConfig(BaseAppConfig): name = "django_pgschemas" def ready(self) -> None: from . import checks # noqa from .checks import ( ensure_tenant_dict, ensure_public_schema, ensure_default_schemas, ensure_overall_schemas, ensure_extra_search_paths, ) ensure_tenant_dict() ensure_public_schema() ensure_default_schemas() ensure_overall_schemas() ensure_extra_search_paths() django-pgschemas-1.0.1/django_pgschemas/checks.py000066400000000000000000000227221470131117100220320ustar00rootroot00000000000000from typing import Any from django.conf import settings from django.contrib.auth import get_user_model from django.contrib.sessions.base_session import AbstractBaseSession from django.core import checks from django.core.exceptions import ImproperlyConfigured from django.db import connection from django.db.utils import ProgrammingError from django.utils.module_loading import import_module from django_pgschemas.settings import get_extra_search_paths from django_pgschemas.utils import ( get_clone_reference, get_domain_model, get_tenant_model, is_valid_schema_name, ) def get_tenant_app() -> str | None: model = get_tenant_model() if model is None: return None return model._meta.app_config.name def get_domain_app() -> str | None: model = get_domain_model() if model is None: return None return model._meta.app_config.name def get_user_app() -> str | None: try: return get_user_model()._meta.app_config.name except (AttributeError, ImproperlyConfigured): return None def get_session_app() -> str | None: engine = import_module(settings.SESSION_ENGINE) store = engine.SessionStore if hasattr(store, "get_model_class"): session_model = store.get_model_class() if issubclass(session_model, AbstractBaseSession): return session_model._meta.app_config.name return None def ensure_tenant_dict() -> None: if not isinstance(getattr(settings, "TENANTS", None), dict): raise ImproperlyConfigured("TENANTS dict setting not set.") def ensure_public_schema() -> None: if not isinstance(settings.TENANTS.get("public"), dict): raise ImproperlyConfigured("TENANTS must contain a 'public' dict.") tenants_public = settings.TENANTS["public"] if "URLCONF" in tenants_public: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'URLCONF' key.") if "WS_URLCONF" in tenants_public: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'WS_URLCONF' key.") if "DOMAINS" in tenants_public: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'DOMAINS' key.") if "SESSION_KEY" in tenants_public: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'SESSION_KEY' key.") if "HEADER" in tenants_public: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'HEADER' key.") if "FALLBACK_DOMAINS" in tenants_public: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'FALLBACK_DOMAINS' key.") def ensure_default_schemas() -> None: if "default" not in settings.TENANTS: return # Escape hatch for static only configs if not isinstance(settings.TENANTS["default"], dict): raise ImproperlyConfigured("TENANTS must contain a 'default' dict.") tenants_default = settings.TENANTS["default"] if "TENANT_MODEL" not in tenants_default: raise ImproperlyConfigured("TENANTS['default'] must contain a 'TENANT_MODEL' key.") if "URLCONF" not in tenants_default: raise ImproperlyConfigured("TENANTS['default'] must contain a 'URLCONF' key.") if "DOMAINS" in tenants_default: raise ImproperlyConfigured("TENANTS['default'] cannot contain a 'DOMAINS' key.") if "SESSION_KEY" in tenants_default: raise ImproperlyConfigured("TENANTS['default'] cannot contain a 'SESSION_KEY' key.") if "HEADER" in tenants_default: raise ImproperlyConfigured("TENANTS['default'] cannot contain a 'HEADER' key.") if "FALLBACK_DOMAINS" in tenants_default: raise ImproperlyConfigured("TENANTS['default'] cannot contain a 'FALLBACK_DOMAINS' key.") if tenants_default.get("CLONE_REFERENCE") in settings.TENANTS: raise ImproperlyConfigured( "TENANTS['default']['CLONE_REFERENCE'] must be a unique schema name." ) def ensure_overall_schemas() -> None: for schema in settings.TENANTS: if schema not in ["public", "default"]: if not is_valid_schema_name(schema): raise ImproperlyConfigured(f"'{schema}' is not a valid schema name.") def ensure_extra_search_paths() -> None: if not (extra_search_paths := get_extra_search_paths()): return TenantModel = get_tenant_model() dynamic_tenants = [] if "default" in settings.TENANTS and "CLONE_REFERENCE" in settings.TENANTS["default"]: dynamic_tenants.append(settings.TENANTS["default"]["CLONE_REFERENCE"]) if TenantModel is not None: with connection.cursor() as cursor: cursor.execute( "SELECT 1 FROM information_schema.tables WHERE table_name = %s;", [TenantModel._meta.db_table], ) if cursor.fetchone(): dynamic_tenants += list(TenantModel.objects.values_list("schema_name", flat=True)) invalid_schemas = set(extra_search_paths) & ( set(settings.TENANTS.keys()) | set(dynamic_tenants) ) if invalid_schemas: invalid = ", ".join(invalid_schemas) raise ImproperlyConfigured(f"Do not include '{invalid}' on PGSCHEMAS_EXTRA_SEARCH_PATHS.") @checks.register() def check_principal_apps(app_configs: Any, **kwargs: Any) -> list: errors = [] tenant_app = get_tenant_app() domain_app = get_domain_app() tenants_public = settings.TENANTS["public"] if tenant_app is not None and tenant_app not in tenants_public.get("APPS", []): errors.append( checks.Error( f"Your tenant app '{tenant_app}' must be on the 'public' schema.", id="pgschemas.W001", ) ) if domain_app is not None and domain_app not in tenants_public.get("APPS", []): errors.append( checks.Error( f"Your domain app '{domain_app}' must be on the 'public' schema.", id="pgschemas.W001", ) ) for schema in settings.TENANTS: schema_apps = settings.TENANTS[schema].get("APPS", []) if schema == "public": continue if tenant_app is not None and tenant_app in schema_apps: errors.append( checks.Error( f"Your tenant app '{tenant_app}' in TENANTS['{schema}']['APPS'] " "must be on the 'public' schema only.", id="pgschemas.W001", ) ) if domain_app is not None and domain_app in schema_apps: errors.append( checks.Error( f"Your domain app '{domain_app}' in TENANTS['{schema}']['APPS'] " "must be on the 'public' schema only.", id="pgschemas.W001", ) ) return errors @checks.register() def check_other_apps(app_configs: Any, **kwargs: Any) -> list: errors = [] user_app = get_user_app() session_app = get_session_app() if "django.contrib.contenttypes" in settings.TENANTS.get("default", {}).get("APPS", []): errors.append( checks.Warning( "'django.contrib.contenttypes' in TENANTS['default']['APPS'] " "must be on 'public' schema only.", id="pgschemas.W002", ) ) for schema in settings.TENANTS: schema_apps = settings.TENANTS[schema].get("APPS", []) if schema not in ["public", "default"]: if "django.contrib.contenttypes" in schema_apps: errors.append( checks.Warning( f"'django.contrib.contenttypes' in TENANTS['{schema}']['APPS'] " "must be on 'public' schema only.", id="pgschemas.W002", ) ) if user_app and session_app: if session_app in schema_apps and user_app not in schema_apps: errors.append( checks.Warning( f"'{user_app}' must be together with '{session_app}' in " f"TENANTS['{schema}']['APPS'].", id="pgschemas.W003", ) ) elif ( user_app in schema_apps and session_app not in schema_apps and session_app in settings.INSTALLED_APPS ): errors.append( checks.Warning( f"'{session_app}' must be together with '{user_app}' in " f"TENANTS['{schema}']['APPS'].", id="pgschemas.W003", ) ) return errors @checks.register(checks.Tags.database) def check_schema_names(app_configs: Any, **kwargs: Any) -> list: errors = [] static_names = set(settings.TENANTS.keys()) clone_reference = get_clone_reference() TenantModel = get_tenant_model() if TenantModel is None: return [] if clone_reference: static_names.add(clone_reference) try: dynamic_names = set(TenantModel.objects.values_list("schema_name", flat=True)) except ProgrammingError: # This happens on the first run of migrate, with empty database. # It can also happen when the tenant model contains unapplied migrations that break. dynamic_names = set() intersection = static_names & dynamic_names if intersection: errors.append( checks.Critical( f"Name clash found between static and dynamic tenants: {intersection}", id="pgschemas.W004", ) ) return errors django-pgschemas-1.0.1/django_pgschemas/clone_schema.gz000066400000000000000000001075101470131117100232010ustar00rootroot00000000000000|g}ZI)rNh>㍁ lflL$!͌g .bC׸`+ pB"1.&m5Ux|J=!!SXSZhT_ "} Q4@(Y G4O"YrC{$&.F ʢr3T{vqƏ&C)o]̢I<@:D0ڰHf>i&5$1;$X@@ᘜF{q?,KpD,E4!Z(ft@1 Š\q0j; 9fpqH=hZ\o3oWP*Łg6`aR^ߴ5">o 7:N>.$ހ|d!vcPm~{ ) iK,KmLlZk,5'zxJdQ| QHv8f]\Aʇ1e2`@#D@w;֐SARv *Ke{`B=nUX~2R:{EL'5ӗ3Ͳ-Pg@*r8`v%`?b%p2y>kĉ8NuRDK- f!/X9QmS$=xedRBjYl]Cez0\mq7Rc# c:Si)lpс 8+u'A9:?|rh6@FZ)p.VE&1@D2ޛ;x.l.(1o>i9 e* (M_K=$if'XiFq*jC˅4]S nw/&cJ 0A{P kӍ`oNz;݃<@._*ws\wjRKC2\P_*!,BфteE&EuAM;b'g<QsN$ᄩa6TR#~ `"8trфu@ 5 ,^6Dw8h(\J)55x4WMDJ=;7&xφ |aFzQhAף8 $N8s93aI0 e9w^H<=>!OZ(ak˜_o A)).iR;S BuzZ ƚ3jۈ ʬ@^T=Ai5Z6RiW9Z{G<͂#pqw"bFHaZS>>kR51 f%,o}t23tGHo~RXLPԁe NAgʘ$NƓ%+ٔn35D ϫ}eKJd4USՑi݁)(Rܿ-yv\\#E70gkHxʘ UԬ!*Gz[H Zk9̲@5]G*p@RuGxIv b.(jtv ۀC0aEt9;Cz@R1{&8Hn5^u.GpHT2˦i YvcYeuGj bs%f "w?;n)+%)TqJ-uDH$QTS:ptvƃ=-sd%w[Їsx;#/S@KwK0![Yg̜jp!QsiK:!C *w@ۀ`%fŦ6Λxo$i<|DfSzSL#U-ds0ǿ)rEN* 7Xݓrss powĸ|V2,~m ,tlnm斵wpLJmLLh@G*IÖ]77ʯ@[~qkewZ=nkGyW2XpXq$:,’sERMA:t{ k+NFO?vvG5$ك Zpf^wv{hP8?+sAN{ ^P]JbɾVvJ]TzHfo= dA:O7]8~԰S+jֺo^͓5?5C'|V/򴎝^{c΂sڄ1_%4`~×Cm4Րt(V[;^h4@1^`}=.=# PؾghTzfغhiS)2jpzNTl@BRu.M+ D%sǀA/NQ?P 2t|Ƴ),JOQgg݁\|2l N0(%s04?O'_ZFs#R)K% $րTܭK&؅hp68e zPh<`_w* dag)#]Y^Bږ"w9ua_3Ꮬ[PR|(UkE;,`Z7pQ?4(ӭa~ncyXKz jC_mֱb{K$&'8s/tEcmjߵZ4CC7qb0689f@i,{!]PJg2rdHZ6u~قzrpރeФ 24NuC0V"lAd6iI;G뻝7^gכz'zi[gp7٣ sņUH@xTqsEEi+K%,iGn}C3Z@-`~'Y.A?yXǂNhl  I_4.܂R Y;ʦ^,;Rz6KBMTi# WrY`DR mm)3AZyJpKғZr۬HXXR{h.U;<̼-Yݙ@Wkа2!#ITV`ƈr-S^&hs|-Ț-mwVj"y W݉$FCkw ˛>k;}w7{{G}؈k;ૃ{&/:vvڡMu2KC Fx/v6BighE SF^XY2I<蠵y(/[Ճ@z3IȾIFީ b *"er:9Q l7HAMKm qI=]U\Ǧ<%tpҝ kk@Stޣ!`]{oφx GHݴ=:QGcagރ u{; w=o"{cqxt{ G>>=89o7ǿ:t273vNТZ:M =w>L w!qW5vxw_wPA';Hv 47Bgp`0Gm3x}BX C촻ׅ=zs~hpv^'i;'{#qxr#4A {@IYvO<tvP}J%Hqebm;voS(qڝ;,UӚ!}S B⪂A]5~EX?(G8'z[(0d~N"؊4:_σm9LWdՃfh7z2^uX*DFZ[LO:dI4YՉt6E#FaH?ƒ~Sҟ#1AiF{g٨uj,cAiDn1O=|+^6Q(؉>pD΅WKHoZ㹳.m,oJz$8c3Ms=4‡6Y!wiCFxǞ:]˹$guci)[v"ē!yyT9 /봇zjtEzDFamMz0fcu^HN?[^?[hGeFnҼj~qWZUR {T]eY5L2} 16A<݁)ţh(ߓnMZ1 # d1):/o {-䆭t2EQæD52sZ9(S_#Iԛ)"JlqHM]ɰEeQKW9Kk:m:l`%40XFk:{^%Ec'rFLw L,ٶgT*l+]S>W3 ߠ3؉Ȉ[#v#ƩѰ9cd5ϜN@|$tjGෟMq|-.(ضB^5"yv z_Y&{Zy8`iQs\dZoBq6aKh[*Oe21f<v;_\,\N"tcIn0;'l9O&; h  q πQ PN h)WG'}߼W`[x|f IEK0pyGyZaWV\&B/SӈnsY;4؝mrThJ#WS|ْp8Ckylr7scZm({,&L/B8dg1ڣ(fn ԗaLѓg5f_NwH|ИE%:DwAYg_lxB$051R-nj2F[D`dR80 RF|sO^S .sz|}sg0ƊOyլrRmyXnQ(DC|Of}?)g/5\ A-Ou{1R\Wr`tfaTjh3h,(dJ#p2Hpƚ6=gZ['jKH;V"tlDՠd|0> @>ZtDu@{KRo3SIcNo$MHŠ# G pM M*T2Ć >/&SXp6^됖6,)PѽvL׭V^Bnf`/9? "_?Bm"ћ$)[U6 ǀR(/E0lC <,C U?d;.T,ԭ@"mv ]TKLOٳtj5SHWDyP np Jp bp}jmjcT=ߥfb|h7wn^֢2ߵB܄w c6yѦ"tBŤ ~geWru͚l`1uCrwXntZYy^8R8M~Pƹ5:In{wc[$`P)4DrCAHC{k}BBӟu8,TPH lwPG˫J6;//A ;N`/-LWo/zWq˅:&~/H.Fĝ t"$^,K̒*EweoQp,/,/*/zR^Ynk]s,nhA( _N>8YU9^ THDԳx-;Lyz+[zUR͚7Jy7HAue|Ur1$3,Kc'5twid) >42$~U@/,;~n}O}wH'(yeZ]PFz"7A4-At]\q4h-[4[jP2㩄ôSǯ2w~xl/ca'Ԛd=j_G=P@[j X;+J2PmL1 <*k J$ҥ$qX>iR4 Ą I,=,fq9((яvXۚpREUneȥ"kN.*dڐJcḾJ.H,-u,*ըmOJPk "yOU@vx\eK) CipvtlKM)EY_l|*Ul Dy0dEVj֜M&aUrJ㲚uCfyWbȮJpsnbU9N.tˣh#3U,檶dr.g VEe7;QMW%CAyWӆ2BI]BvIn)aQ sk5dWT1^q "b8og\X}'!Ka;dF](oF92 pʣ88Z9i4> `Wv'cw7lLA9Tkr-h%Hd`u ,׃yCbQ3$MjUh qp+1<$V;#҇= T>Qs ;W][7HAg=v2/9ȼ%-:Q8BX{Z$yM95G0,摔駚] PH~΋7?uqx̨ ?"Sg-|hj O2dbXkjf`EW.zrtQ+ubcRQ|YY[ }/3꾜4^ҮJLw2)})LAƌ Jf(B<>}ٵCgwGꚕETg!$S2 'G9#r}&CTvZh IcW/~ç$O[8& [LxW-ܫ|f<U|/Ed4r?WMuRe3拐a)%=}/pX_xFpD#Yք/o?ա  ?یSks/- Mn=.\-Zb;OI+v$Nb*/ĤUwy\lJY>k$WJKy˦ha0^C C_\%^Ż-._d5P|IhRh#_ŬI#;͗bڠ*_`DBм<,I,W1(Zny7sе>>*=/7y*rIWeE]ʺ:ׅ5e])wvSea[eA:|YmTwm$Y<_ݶ}nWhߒN' ;K@ƾåb<^hb㳐b  }Ղ/嘲peyA"KB4$o;QqOdEŧB3%ytQ$mu;jH1lS4ouI1̧kM G]w1dx-Š@ܧ#jom G2,&_N1ot3F~*BN2jVў*ѓgZr 7$`,muڡ+>Rm('`92ٗF= TKش&KѩGVGыX_nMMm9:Pĺr1<-^=6hh6h7PoM5@ܿk;0NmKOۇ-*ګGE.>J%@y˵P&d*-e+Zjf@T]0ŶO^Ԕ+'=[ ٥^m݁5|x4߁tŁkֽ$ @>)gUB/"\2LXΧTf Zj󋓪sTY4*!O.ǰX5KyeIZ@,2VĖMmᐆM 7oo[6> S82!S®G)<|:>]~.ޟ|!lP[P{G?к!/[o"hfY #So ^4Z&e1y`Fphu45BZWۚ'co)q'۽s]"{v;Ss)_ P0*ƛQGh@l.w+N &A$5(Blcmz5Q72}:vKM gg?w{'_5 TRFϼ8<.@\GY)2Ğ<"ʣn{]+UFj0-+0)ʯVJ.\ú8pwi%+M]E<Ep Fac$L_RD@~:QS-ֹSY!+kF|i 0$:a<:lLl9AfVLD[zhM 'r}Y&-B| &P96G/nA)S-Z9 E~<cFHdfG&VQN)Iel0& xB5LA6k9eyqfy+-Z,Ru]oo!nKp6 D^-/H<࿭0+?9'c6Ka[j&*ĨF #2Yj8͍;jJ\KeX\c8^'ȼ$A i<B\N n@L}}OML~Ul<:R1W]Pؔ קq;0̦-1%|ũc$(˸vOIcqyrZbmmTfvLu5Izl5yy)[A3HZA2%hz]!ts-U-=SOMoY=ۓ`F!qsn˳L%Q#OL&L fіC8*d1)g$:piتJ"1!P˟nRC 7syB1]İ!(c8a?f)Q1`9B.Un*VypDPJKL=h՝P?4l(}9}c ;e7-:hU1Ү6{tp(ޜvwhU{˖nZ[k[T0 q_ H+ȎRwk,jP!g@f9Y6Ww;iSWi#P__~VWF%d۩ߥ b 7Qx_L9 fH4ļl`OSFrh{kYHlPUҀ}bGvJśZ^w} (q٨9)D2({3W@p/+,+XaXMqI?K7~Kn(j* dB^S:߇6* p'RY*. ѡ0Xx%B† NWT]:.ZR-, `j0J"Bo!"Exb X 8=tZh+*[<[E݉ha|4 $2qDW}΢ S4hEY_OL̗aBi)P'q"hjcQghh$sJ&)xy朲9eOҧ F:,å'ύ -M;> AAXX_RP᫤IYӲg``@AN' C)2mѷ:bSծ)kò1YB)#KEZV0eX(廡fsD}[,͍ɩg@ml@ZtN[LF7+)³Ţzq{goqg=*,Gm+`( 'r~\\=x!%gԸW.Y%gP/{TR5R9o570 ٛhS="bdD4!Q[2VOR&t~Km{kʒv}tLjuyM@-"#FhЁJUֱopyk0>z\GK+⎥A! j e8>lSJ2 RU-9OgRc?XQ`ضyp$3#[VX?26oٷ9KvCPCN۞k(MkzDސqTu} }ȍN2E>eы:GWm#?kz暊J7=sSD?5p!ߗm[ivQo;2c=e'puq6]C_!>ʽpr;(<=hFJ*Dde픊a 0&v9s|f_\f,dE^oW+49ٶ|T5 f(X>Jgp_b̼X c֕u7rv^4}*lI/RoU+>SUi(VIxᥕ\޽Z?CUSgD\579l:8 4`bT͒=SVxxh4( 厱<nԸեY `at[z2 Ɂw(QWN`wX9lXKԒf[=9֡YěҨ98 ':OZ^ͺ0wr*o[)@$7äj2}@0 拃f咵MTYE"nP@5DoB U~9Ig_-ZˍoV $.jRK+Y._^>絍aPp 8kxו?dOaF'Cr؛!^ "i@׽& >(ק&JUE XM*ƌLߕtYUF@C^5eI J*UT.5P%l\\4KUpҞC)^?fsSG5!f UdΪjmq޹%6-)n$jNZ8ԝ ] |QN%UYӝw6|AvþIןCЃv{~99](/G0 ~vycs҈;hS4^{8>wDҧkv/ 'g7Ѱ @[RRf<#NjD@*S$PgAF̗(p*T@nn\j(+7_ ҃ [D(lMOuMvms[K@]=k~9iaНRNt*;2 BsupbI38dz |lf36zESW_sN>TB;Gi9Khm$ČWJa Q;ǒ6Zv*(ͦ -òN<2Zrx?PCNA݈"L'x,7zSg +F1f e4|ٽSzj3`Roҡ2C(3j)L.ߣ$eow[>1LZc$2ur/Qh. W{X鞵+n#[NO].ݮlM{)RVIDrΣ޼r'cW>WV48SKMmG,s4,( ^ k W^oX XwW/²/yH2evY}WJ+6*㹖;jءʮ66 ڤ_&UYmQlQ}'tM7T9.҈r0Ľw>co]ӑ\ ۻU uZ+8t{<h>.<z܃/b*>pOo?{z}u`m{^?e/\BfPsut }֚a 8F*Z`QNZx1:*:!ww| ]6oL t˄yr 64PFC6)Wl!n&:fՂ8@yI#wr`]͢t\e&S:~p,LY '!L{nc {e7PLFQTҢϿU#c"ehZ(Qͺ@ -7RCP*1 xlswLO*+ O/ p|` *_8L(M wˢDњB$/y#rTAH$|/Ɨ {T)oRA$ǗUKYyTZpt`~S/Ɨepfqf(&E@ D/B}G#qDT ~2E# 2Ҿ+CD7d9FS/4N2Vj5`U?ykN$ʭ/~. Z9fօ!$ϊd 4X .g27RFYlj̛c0*:#{{tprf]lœLA:GHZݎ9 ẃi3ٞdw} sLS  :*Bx?ȉcmjzcK'V>+bo< e:]{2G)ҘbVN^R?E||Ot DEtd ـ_g7\,/Km`3} f%u9 %)>_r|.vA't2a;~Xqܰ;{\w? ƭfXȹGz|&Zѧ??=(&9kȮ*o쾙alF4Li)C.Ew;WCXt)/#dEGMd{3M MK% Hiji])FmW]F?m7bWADVz/ )9ge6d&aƳtS?IP>LzG!BY=}B%$*ӯ[ 7 W|g XEsGƗy!d0NjI9Cނ3s4Qn4&ՙ0̓Bl y[ hNy~@-+.P9<(do<"0`JbWMhB=.k_# K ?Y8 ~Q9098P} >QJ9QE CPG u:ȸ15;2}וT~J>WݓtAFoIr{JWos2>Cʹv8ɾܙ}Wb€})@F,^*^+Ť?dy=Y܂C,kY]mͣehOi]LӼrR;W*O/w@uZNwt0 h;e]>ߑPFT^wS[Pd~_knaޣZ#oeHG[nѪfk{: Q|)}\ûv#=k$S Y^HfI&TE@VzQw~FG*oR wGa?pߩANL8xr/}yౕY#^Y#t򀪧9\ D 09Qݵi(<0`;p'<cHr@4pVa<( ie Jd 0ԧ1L ]OSW >!gm dEh ua;^n055^,ӭ[w 6JsVY ͅ+N4Tƃd;މR^;E/-xA-lxʓ1RJ./etGAOY,l)x/USi3[bT`Uj;$=j(BY2heh—$uze9jCdEF2 IE-_hAQd=_D:#r<4#/O$0UZ¼ =&Xd (d9:/W]2VS,7Ң}`xw[M ᄇnԑ_Eu| hF 1w4PT'qSV+= ݑ[9knWpb&#* 4:kw4ϼx <ʨTĄiݳ>g-IVs/9o兹ɟ2oU@T\qAG<ЭdmQ'MTխmuzUƷ⭮3UY͍ 筗H[x Y4,0&CŊڴsDd}9P4BEA2 4}FK>GקHMYK}MY^Rf̩VJ2PMHuDz&b⳧rQxirY^Jb*6 T`25-uZv}_Ѿ *67L`d-d&o(ܚu4 lnYT!,'nMUl;#On Ǩ斡%H%okܰv2N{ID*[jkr3x{)n뇄6Ę:ޙhsɛEäO t:ZsȢgpiuz95X&I,]g`,oQ1n[n־.D-}/ꡰh uuI-Gc6%Oy(Čqk$=U}ڪvF6_mP!OC~=Gn(Aʚc] qc`Q;p=hAv]a`)W^!)4瓽p6UrD. :™aǒMiubY ZJwb.*e?="УөUۿpVh4v3QY;s9tV㖓MǶ &q2#6IcEOs@hq;xJ!,P1ɯBouMWu[&[[9z0PDN@To K·iMT|4ψCB>TK٘#&%kI!b[Fl 0 3tkL@#S1Kf5{dJ4Fcy742GVf{x:j",At#pгִ@o/9 h$xRɐ7i H_<W7aKM{|ly[$ I_$**v (v-2WhHjLF(~l&F)5eau`hB428#]r8-EzC`]qHv."3s>0\6p4G1xqdG^9c blr|CG˷rfЗI\&-:IN| ,`&jQOUʤ²#v|JuHʝ' 7hd^v3?M*)4ƧrHis( }Mo:~*}U&bRݒV픔D! au|lG`Zǀ0c b 6WQ%JLhX&bL2bl˱@)[n+ z͘S)6Y Sm[*u{PXy|JG`xۓ1w}t5C?i޴-Y+*0,6cP> c>$|Ռƣ]8.Qp8Сך%UUq49*p@nScR:-9 ?q:.,NB͓{A khcr5t:A¯T0#O1* >K:yVUX$X\gwdMؤS|[8!U؝Sb4NElSs6$5=$:zx,,6WbTTUp`NIuֽ̹hzmKu-jFno{-ܩ:#Z^eu~d_P8z +vJlLй f:':udPq(W ]lZlvjF_ 2O>=S97iy$V(!) A+.j'<&nP>+{sN%P.Τ)<"\s8e~𱒭lռ-jjM]h& I4{NEbk?Ξ{9(RW'id@ q2L97 s[cMWYh|<67_}u {xtst%? $\Zq@<o;pnwv;myXU~I3ر9yBڑO|gp>sXA#Rcl+5;>'(Y¸ ;PD1I|-QLj&>r -;GRKMN4C]ٸQ~Ud^>8j mx 7!'Ab4BwHE $ԱzAב}J (3XqLLm˽ )1?M#W<<5i!*)!Lo &22yq !>31Fkgllf 4,n{x#}jmD$lhj[u$WIMg*eH+;juUURMC^Gm\0Acj入k!>irwF'jTeVbhFLq6>VSVᘕ~5W^4z*' &)+gsE (ŒX *USx&; Ʋ_@AH(c80%ƔN￀sK@WNPJ!廌vMDQh :<EvJ<1"Itw{S_LTl_p@9r=L;A+吝ߔlVвU%hAy-0Jtm+bfu4'匰Hpfl)vK0cJ e/ÖY;]D/o$s9: 4FA&u~*[ӵ!1fa~8ʏ K "~khtw%eMyd #B>m}9犜 `*aƉX֜d)/<)t=(b!ڣ2]\8. %Pqt"'iK(5t蒑60!Hs_/)QjցDJ6o˗/k:1wrM~\<ު:_%axڱ6b[@&X'vщ&$]s砭[Cۻb$BkI!Cu5J ePFCx-M M >G!/󢨭gO|xyfpVQK] }AYEt YQWdz;o} |:6br8'oŴP!'T _ j#25q4V&˅ *G;ݓN%W\ysA8g"L24-D] V@W&)2jZld& pgT,A{ĝhB`V'>q=0b ~֬X&έc jŘC>U `dXK"(PXûP(➥Gp6DR?/LI QRJo)UQ%x| yT,۷GU*+RhܫW "-.yF?Hك/QjjeiS7Jv:ˬOДB JouJ9AžZKLn:֡$d:-nد[yʚ'JVKmmߑLU\ռ1EyPup(b8O"RUj NR}`i 1^~`7TjSW3g3躙Fp>LӆM0_2y#ϐ/*eP|#畺fI#a\Fڻ=_b+b"+b+h1@s5:=>;bX~aF)z9lͽ\SsḬ<;K mRE=&.X?U*tc9e-ӄ%r  d隫F{6XzTT x 2|͊OwZK7{e te1eӌb78@A<"7f6d'ޅͭ-EVmmK>2KJ(DD|U8_+1hLix`6 S)ㅒ%6Uk7Kfr@a Ȇ[{3d_|M3Tsdz)}<{RPGM+xRF-5d/x<8 /f@4Cvb 1ZvJ֠iyF ;z]E}v.M׾i~|r+S~m]Y駖KsLO Xc?\9o^Pw޳6qeݿGED IL +eg(-1iU?~v q2\t=yXD~!!POg_6c;¹dh叿'O}Ξ&yXۇ|3Wh䫈x=-d>G2A3/# ZPqZHC'Nٔ܈ sl X&R~rw$R>Ե@C%1'M>J&˟} ,x|?۸?+iO[r:6F"+JA ;wbp#a RVAOD X=88M++qZn]Q$*,\2N[^`YD+w^K`NmOVL+wRi!Pm22BP\1Aݮ4G-) >T@b %+-85X#]fyJ1BF| vhoj dCy!!3 i'`Z=YhQEY7<:w .>>1R3E( sw+xOn+G }{9&fl3}CДmTCE X~ȍ¯?N>~/ Ȅ A,>I88 ^k*̤+o#(k Roe]4I j\ۗ+JOtiG._2kc8(OYdžUvU ~b$F7@V`mgƷDw΋ua+PZjZBr dȌ;%Ȣ#E|/%ݕD9z׳m\:k{RT(-OUW(]ӱ{PvvK6ÑvNLm7u#)"Yf@aӶ=2hSvqw|nv?ʀ54'6׸Q' ?\0ئcTvYU#˭g;|̅=sa)giM)MKgB'URG;m mG _^NE?/^;ZtuuZe=Q9ǤCy g2SQ8DRGWj]GD 4ރ*&'k{%콱mpD@N2o;z˼|n:nZ!-K薶CQv&r8 ɚpddx[Jtڅ^e) B|U$ NT34`t?KN caqQ̸K*-{p@%=l*&٫cܔpa*drY&?6N1*B)W-ZK(F>%6~CU#C /tLU cDjȂ8)X8(} qbn^nW Ġu|-)%/R>ڻ[f[` ULi$vZYtnr>rBg̼g3t9OhM;+iΓ o'AЉ oh6RNI⾩eTHefzM^DDӆvyqR6f:ӭ-Bz[kG9qO<c2F:,uJ^M(2pB!|7 d)R\XlCbdz7D9qa>RT4&1|tp9uЦs>՜O`{26,-L:Mg}Y_oZc__U@Vw=tO_hM&f*լ ;=4:ϯPZ1?\bF}k"2y4Hx>GG#9k^s8W}.K~q%{[M-yB * yl:iN6[_,i &~ks1^wB| VM~vS~Ycx9<hB@Q(`?{uXxC T 8t9CjV/__ 5iL>)Yf _~xuRK&"ƕxjxP5+IVN K[3A8Y~v T(Je!H %2qiVZ+S#TYRPۋx:xXSI$ɪ2EG:B3KNbpgEdvjXh4qemժ{{/9 -y4y" dGhø5sEc-?)lF) @y(9f sh9TVlJ&a#|(NkDg|+X,ϝ߾)_oߔJ+AI_\T F^׭:=$>݌P+Iju-.*vWE.8b49_yK918\,cLbOۭ ]-.:(燨ϣ Mw[_Gp{>z㪗'GT.8B 3ޥ[`OVqR?^PpNx9 2~+As4bo$=ijIWbPM96/+X*^Ek@{ځ1 ;~lŕfj(yJMXA-7[q]BW$2 Ҩjr'q1 1'߂rVעWNz mmg;۲3՜Rx[=M~>u:HۃWqc1:4@଀Ejm8@V*%A' *. !ޜ}fWlPO;o`o6~6[*DjqmTcW7E q[$&ep;C1hty7 #;#W'xN`.O=̠4_ogŒ"X}_M硗w!L6H4 #t2=vJR0Y 5}ٸʡՆmu!DQaݽ+,0<}sNOpyҥ7:OO} BE>Ս| {~Ҭ:0n;n]wtueWSAu_OZF-hoDn]@=JM ( (0V"&;nԬ?*WT.w7hT16=Q3HGuOE@1r1M7>1/8ߗK6f` <Q3V,h|G/C{2363rzug](:b4UĞdYBC`޻\qO9 _]f:̍@j ?j[ !`vL94 Lt8WZf [35hUS ΁@Lv\Țq|]ξ"EۙHf{8kkYob *.Ip ʽX–TE0Q` 4EoQUK@:f 5GSkjʼ8_|tOApۈc8xgγ: ٮhby\KnY5/ξKf,I[Rvݭ 7kl(1~xŘC Q=:Bbg{Zɚ$7f7R:Og$5Q{a'սz G2btLc^i'r&gMI$N:M^XQ7 y*` 1+BwB".ߍۢUcBZ) u61I) cjG!<ؔKK9L:"P8t@׀B/B }zlOTړԸ*.axmy ,A0&"{w1ƟA|+ֵ6<('uD#\{ r3U!SoqN0F P?"a^WgvX=gEu$,L Dg"<`^tgT~o:rx?8s;;\B("QLŻ䱀)ww̕g.9|Iv,8If%Fr.an[~㗦-Ё$dʹ4 &iAG=Aܑ2Y&l6% ][?~84u0?w_eo=aѭ7}O%yYYȗ/&Ifw{Ct׋Cڝ&~-+ԓD2],"yM Wr1[.&ѓV/WbjTKVQpPm"@Mw{ ~ 3c[%Hx@:)430]n??'7M(PL6 PdB&N9DM ^Oi`'tf Q8)؁V4@_Fͮ+P p}&T^E⓸zgmUB}}@('N]YjYե0 j.f۴O\|,FbWrXR̀uƎV_G['F~~zg&>k1jHKs䮊yHb֏oV Z?96nomP,U#؟y>4N django-pgschemas-1.0.1/django_pgschemas/contrib/000077500000000000000000000000001470131117100216535ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/contrib/__init__.py000066400000000000000000000000001470131117100237520ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/contrib/cache.py000066400000000000000000000007271470131117100232760ustar00rootroot00000000000000from django_pgschemas.schema import get_current_schema def make_key(key, key_prefix, version): """ Tenant aware function to generate a cache key. """ current_schema = get_current_schema() return "%s:%s:%s:%s" % (current_schema.schema_name, key_prefix, version, key) def reverse_key(key): """ Tenant aware function to reverse a cache key. Required for django-redis REVERSE_KEY_FUNCTION setting. """ return key.split(":", 3)[3] django-pgschemas-1.0.1/django_pgschemas/contrib/channels.py000066400000000000000000000121371470131117100240240ustar00rootroot00000000000000from typing import cast from channels.db import database_sync_to_async from channels.middleware import BaseMiddleware from channels.routing import URLRouter from django.conf import settings from django.db.models import Q from django.urls import URLResolver, path from django.utils.encoding import force_bytes, force_str from django.utils.module_loading import import_string from django_pgschemas.models import TenantModel as TenantModelBase from django_pgschemas.routing.info import DomainInfo, HeadersInfo from django_pgschemas.routing.urlresolvers import get_ws_urlconf_from_schema from django_pgschemas.schema import Schema from django_pgschemas.settings import get_tenant_header from django_pgschemas.utils import get_domain_model, get_tenant_model, remove_www def TenantURLRouter(): async def router(scope, receive, send): schema: Schema | None = scope.get("tenant") routes = [] if schema is not None: ws_urlconf = get_ws_urlconf_from_schema(schema) if schema else None if ws_urlconf: routes = import_string(ws_urlconf + ".urlpatterns") match (routes, schema.routing): case ([URLResolver()], DomainInfo(_, folder)) if folder: routes = [path(f"{folder}/", URLRouter(routes[0].url_patterns))] case _: pass _router = URLRouter(routes) return await _router(scope, receive, send) return router class BaseRoutingMiddleware(BaseMiddleware): @database_sync_to_async def get_scope_tenant(self, scope): raise NotImplementedError async def __call__(self, scope, receive, send): scope = dict(scope) scope["tenant"] = await self.get_scope_tenant(scope) return await super().__call__(scope, receive, send) class DomainRoutingMiddleware(BaseRoutingMiddleware): @database_sync_to_async def get_scope_tenant(self, scope) -> Schema | None: hostname = force_str(dict(scope["headers"]).get(b"host", b"")) hostname = remove_www(hostname.split(":")[0]) tenant: Schema | None = None # Checking for static tenants for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if hostname in data.get("DOMAINS", []): tenant = Schema.create( schema_name=schema, routing=DomainInfo(domain=hostname), ) break # Checking for dynamic tenants else: DomainModel = get_domain_model() prefix = scope["path"].split("/")[1] domain = None if DomainModel is not None: try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder=prefix ) except DomainModel.DoesNotExist: try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder="" ) except DomainModel.DoesNotExist: pass if domain is not None: tenant = cast(TenantModelBase, domain.tenant) tenant.routing = DomainInfo(domain=hostname) if prefix and domain.folder == prefix: tenant.routing = DomainInfo(domain=hostname, folder=prefix) # Checking fallback domains if not tenant: for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if hostname in data.get("FALLBACK_DOMAINS", []): tenant = Schema.create( schema_name=schema, routing=DomainInfo(domain=hostname), ) break return tenant class HeadersRoutingMiddleware(BaseRoutingMiddleware): @database_sync_to_async def get_scope_tenant(self, scope) -> Schema | None: tenant_header = get_tenant_header() tenant_ref = force_str(dict(scope["headers"]).get(force_bytes(tenant_header), b"")) if not tenant_ref: return None tenant: Schema | None = None # Checking for static tenants for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if tenant_ref == schema or tenant_ref == data.get("HEADER"): tenant = Schema.create( schema_name=schema, routing=HeadersInfo(reference=tenant_ref) ) break # Checking for dynamic tenants else: if (TenantModel := get_tenant_model()) is not None: tenant = TenantModel._default_manager.filter( Q(pk__iexact=tenant_ref) | Q(schema_name=tenant_ref) ).first() if tenant is not None: tenant.routing = HeadersInfo(reference=tenant_ref) return tenant django-pgschemas-1.0.1/django_pgschemas/contrib/storage.py000066400000000000000000000042711470131117100236750ustar00rootroot00000000000000import os from django.core.files.storage import FileSystemStorage from django_pgschemas.routing.info import DomainInfo from django_pgschemas.schema import get_current_schema from django_pgschemas.settings import get_pathname_function class TenantFileSystemStorage(FileSystemStorage): """ Tenant aware file system storage. Appends the tenant identifier to the base location and base URL. """ def get_schema_path_identifier(self): schema = get_current_schema() if schema is None: return "" path_identifier = schema.schema_name if hasattr(schema, "schema_pathname"): path_identifier = schema.schema_pathname() elif pathname_function := get_pathname_function(): path_identifier = pathname_function(schema) return path_identifier @property # To avoid caching of tenant def base_location(self): """ Appends base location with the schema path identifier. """ file_folder = self.get_schema_path_identifier() location = os.path.join(super().base_location, file_folder) if not location.endswith("/"): location += "/" return location @property # To avoid caching of tenant def location(self): return super().location @property # To avoid caching of tenant def base_url(self): """ Optionally appends base URL with the schema path identifier. If the current schema is already using a folder, no path identifier is appended. """ schema = get_current_schema() url_folder = self.get_schema_path_identifier() # Specific case of domain+folder routing if ( url_folder and schema and isinstance(schema.routing, DomainInfo) and schema.routing.folder ): # Since we're already prepending all URLs with schema, there is no # need to make the differentiation here url_folder = "" parent_base_url = super().base_url.strip("/") url = "/".join(["", parent_base_url, url_folder]) if not url.endswith("/"): url += "/" return url django-pgschemas-1.0.1/django_pgschemas/log.py000066400000000000000000000014711470131117100213510ustar00rootroot00000000000000import logging from typing import Any from django_pgschemas.routing.info import DomainInfo, HeadersInfo, SessionInfo from django_pgschemas.schema import get_current_schema class SchemaContextFilter(logging.Filter): """ Add the current routing info to log records. """ def filter(self, record: Any) -> bool: current_schema = get_current_schema() record.schema_name = current_schema.schema_name match current_schema.routing: case DomainInfo(domain, folder): record.domain = domain record.folder = folder case SessionInfo(reference): record.reference = reference case HeadersInfo(reference): record.reference = reference case _: pass return True django-pgschemas-1.0.1/django_pgschemas/management/000077500000000000000000000000001470131117100223275ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/management/__init__.py000066400000000000000000000000001470131117100244260ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/management/commands/000077500000000000000000000000001470131117100241305ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/management/commands/__init__.py000066400000000000000000000265511470131117100262520ustar00rootroot00000000000000import enum from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.db.models import Case, CharField, Q, Value, When from django.db.models.functions import Concat from django.db.utils import ProgrammingError from django_pgschemas.management.commands._executors import parallel, sequential from django_pgschemas.schema import Schema, get_current_schema from django_pgschemas.utils import ( create_schema, dynamic_models_exist, get_clone_reference, get_domain_model, get_tenant_model, ) class CommandScope(enum.Enum): ALL = "all" DYNAMIC = "dynamic" STATIC = "static" @classmethod def allow_static(cls): return [cls.ALL, cls.STATIC] @classmethod def allow_dynamic(cls): return [cls.ALL, cls.DYNAMIC] EXECUTORS = { "sequential": sequential, "parallel": parallel, } class WrappedSchemaOption: scope = CommandScope.ALL specific_schemas = None allow_interactive = True allow_wildcards = True def add_arguments(self, parser): if self.allow_interactive: parser.add_argument( "--noinput", "--no-input", action="store_false", dest="interactive", help="Tells Django to NOT prompt the user for input of any kind.", ) parser.add_argument( "-s", "--schema", nargs="+", dest="schemas", help="Schema(s) to execute the current command", ) parser.add_argument( "-x", "--exclude-schema", nargs="+", dest="excluded_schemas", help="Schema(s) to exclude when executing the current command", ) if self.allow_wildcards: parser.add_argument( "-as", "--include-all-schemas", action="store_true", dest="all_schemas", help="Include all schemas when executing the current command", ) parser.add_argument( "-ss", "--include-static-schemas", action="store_true", dest="static_schemas", help="Include all static schemas when executing the current command", ) parser.add_argument( "-ds", "--include-dynamic-schemas", action="store_true", dest="dynamic_schemas", help="Include all dynamic schemas when executing the current command", ) parser.add_argument( "-ts", "--include-tenant-schemas", action="store_true", dest="tenant_schemas", help="Include all tenant-like schemas when executing the current command", ) parser.add_argument( "--parallel", dest="parallel", action="store_true", help="Run command in parallel mode", ) parser.add_argument( "--no-create-schemas", dest="skip_schema_creation", action="store_true", help="Skip automatic creation of non-existing schemas", ) def get_schemas_from_options(self, **options): skip_schema_creation = options.get("skip_schema_creation", False) try: schemas = self._get_schemas_from_options(**options) except ProgrammingError: # This happens with unmigrated database. # It can also happen when the tenant model contains unapplied migrations that break. raise CommandError( "Error while attempting to retrieve dynamic schemas. " "Perhaps you need to migrate the 'public' schema first?" ) if self.specific_schemas is not None: schemas = [x for x in schemas if x in self.specific_schemas] if not schemas: raise CommandError("This command can only run in %s" % self.specific_schemas) if not skip_schema_creation: for schema in schemas: create_schema(schema, check_if_exists=True, sync_schema=False, verbosity=0) return schemas def get_executor_from_options(self, **options): return EXECUTORS["parallel"] if options.get("parallel") else EXECUTORS["sequential"] def get_scope_display(self): return "|".join(self.specific_schemas or []) or self.scope.value def _get_schemas_from_options(self, **options): schemas = options.get("schemas") or [] excluded_schemas = options.get("excluded_schemas") or [] include_all_schemas = options.get("all_schemas") or False include_static_schemas = options.get("static_schemas") or False include_dynamic_schemas = options.get("dynamic_schemas") or False include_tenant_schemas = options.get("tenant_schemas") or False dynamic_ready = dynamic_models_exist() allow_static = self.scope in CommandScope.allow_static() allow_dynamic = self.scope in CommandScope.allow_dynamic() clone_reference = get_clone_reference() if ( not schemas and not include_all_schemas and not include_static_schemas and not include_dynamic_schemas and not include_tenant_schemas ): if not self.allow_interactive: include_all_schemas = True elif options.get("interactive", True): schema = input( "Enter schema to run command (leave blank for running on '%s' schemas): " % self.get_scope_display() ).strip() if schema: schemas.append(schema) else: include_all_schemas = True else: raise CommandError("No schema provided") TenantModel = get_tenant_model() has_domains = get_domain_model() is not None static_schemas = ( [x for x in settings.TENANTS.keys() if x != "default"] if allow_static else [] ) dynamic_schemas = ( TenantModel.objects.values_list("schema_name", flat=True) if TenantModel is not None and dynamic_ready and allow_dynamic else [] ) if clone_reference and allow_static: static_schemas.append(clone_reference) schemas_to_return = set() if include_all_schemas: if not allow_static and not allow_dynamic: raise CommandError("Including all schemas is NOT allowed") schemas_to_return = schemas_to_return.union(static_schemas + list(dynamic_schemas)) if include_static_schemas: if not allow_static: raise CommandError("Including static schemas is NOT allowed") schemas_to_return = schemas_to_return.union(static_schemas) if include_dynamic_schemas: if not allow_dynamic: raise CommandError("Including dynamic schemas is NOT allowed") schemas_to_return = schemas_to_return.union(dynamic_schemas) if include_tenant_schemas: if not allow_dynamic: raise CommandError("Including tenant-like schemas is NOT allowed") schemas_to_return = schemas_to_return.union(dynamic_schemas) if clone_reference: schemas_to_return.add(clone_reference) def find_schema_by_reference(reference, as_excluded=False): if reference in settings.TENANTS and reference != "default" and allow_static: return reference elif reference == clone_reference: return reference elif ( TenantModel is not None and dynamic_ready and TenantModel.objects.filter(schema_name=reference).exists() and allow_dynamic ): return reference else: local = [] if allow_static: local += [ schema_name for schema_name, data in settings.TENANTS.items() if schema_name not in ["public", "default"] and any(x for x in data.get("DOMAINS", []) if x.startswith(reference)) ] if TenantModel is not None and dynamic_ready and allow_dynamic: local += ( TenantModel.objects.annotate( route=Case( When( domains__folder="", then="domains__domain", ), default=Concat( "domains__domain", Value("/"), "domains__folder", output_field=CharField(), ), output_field=CharField(), ) if has_domains else Value("") ) .filter(Q(schema_name=reference) | Q(route__startswith=reference)) .distinct() .values_list("schema_name", flat=True) ) if not local: message = ( "No schema found for '%s' (excluded)" if as_excluded else "No schema found for '%s'" ) raise CommandError(message % reference) if len(local) > 1: message = ( "More than one tenant found for schema '%s' by domain (excluded), " "please, narrow down the filter" if as_excluded else "More than one tenant found for schema '%s' by domain, please, narrow down the filter" ) raise CommandError(message % reference) return local[0] for schema in schemas: included = find_schema_by_reference(schema, as_excluded=False) schemas_to_return.add(included) for schema in excluded_schemas: excluded = find_schema_by_reference(schema, as_excluded=True) schemas_to_return -= {excluded} return ( list(schemas_to_return) if "public" not in schemas_to_return else ["public"] + list(schemas_to_return - {"public"}) ) class SchemaCommand(WrappedSchemaOption, BaseCommand): def handle(self, *args, **options): schemas = self.get_schemas_from_options(**options) executor = self.get_executor_from_options(**options) executor(schemas, self, "_raw_handle_schema", args, options, pass_schema_in_kwargs=True) def _raw_handle_schema(self, *args, **kwargs): kwargs.pop("schema_name") self.handle_schema(get_current_schema(), *args, **kwargs) def handle_schema(self, schema: Schema, *args, **options): raise NotImplementedError class StaticSchemaCommand(SchemaCommand): scope = CommandScope.STATIC class DynamicSchemaCommand(SchemaCommand): scope = CommandScope.DYNAMIC django-pgschemas-1.0.1/django_pgschemas/management/commands/_executors.py000066400000000000000000000107161470131117100266670ustar00rootroot00000000000000import functools from concurrent.futures import ThreadPoolExecutor, as_completed from django.conf import settings from django.core.management import call_command from django.core.management.base import BaseCommand, CommandError, OutputWrapper from django.db.utils import ProgrammingError from django_pgschemas.routing.info import DomainInfo from django_pgschemas.routing.models import get_primary_domain_for_tenant from django_pgschemas.schema import Schema, activate from django_pgschemas.utils import get_clone_reference, get_tenant_model def run_on_schema( schema_name, executor_codename, command, function_name=None, args=None, kwargs=None, pass_schema_in_kwargs=False, ): if args is None: args = [] if kwargs is None: kwargs = {} if not isinstance(command, BaseCommand): # Parallel executor needs to pass command 'type' instead of 'instance' # Therefore, no customizations for the command can be done, nor using custom stdout, stderr command = command() command.stdout = kwargs.pop("stdout", command.stdout) if not isinstance(command.stdout, OutputWrapper): command.stdout = OutputWrapper(command.stdout) command.stderr = kwargs.pop("stderr", command.stderr) if not isinstance(command.stderr, OutputWrapper): command.stderr = OutputWrapper(command.stderr) # Since we are prepending every output with the schema_name and executor, we need to determine # whether we need to do so based on the last ending used to write. If the last write didn't end # in '\n' then we don't do the prefixing in order to keep the output looking good. class StyleFunc: last_message = None def __call__(self, message): last_message = self.last_message self.last_message = message if last_message is None or last_message.endswith("\n"): return "[%s:%s] %s" % ( command.style.NOTICE(executor_codename), command.style.NOTICE(schema_name), message, ) return message command.stdout.style_func = StyleFunc() command.stderr.style_func = StyleFunc() if schema_name in settings.TENANTS: domains = settings.TENANTS[schema_name].get("DOMAINS", []) schema = Schema.create( schema_name=schema_name, routing=DomainInfo(domain=domains[0]) if domains else None, ) elif schema_name == get_clone_reference(): schema = Schema.create(schema_name=schema_name) elif (TenantModel := get_tenant_model()) is not None: try: schema = TenantModel.objects.get(schema_name=schema_name) if (domain := get_primary_domain_for_tenant(schema)) is not None: schema.routing = DomainInfo(domain=domain.domain, folder=domain.folder) except ProgrammingError: schema = Schema.create(schema_name=schema_name) else: raise CommandError(f"Unable to find schema {schema_name}!") if pass_schema_in_kwargs: kwargs.update({"schema_name": schema_name}) activate(schema) if function_name == "special:call_command": call_command(command, *args, **kwargs) elif function_name == "special:run_from_argv": command.run_from_argv(args) else: getattr(command, function_name)(*args, **kwargs) return schema_name def sequential( schemas, command, function_name, args=None, kwargs=None, pass_schema_in_kwargs=False ): runner = functools.partial( run_on_schema, executor_codename="sequential", command=command, function_name=function_name, args=args, kwargs=kwargs, pass_schema_in_kwargs=pass_schema_in_kwargs, ) for schema in schemas: runner(schema) return schemas def parallel(schemas, command, function_name, args=None, kwargs=None, pass_schema_in_kwargs=False): processes = getattr(settings, "PGSCHEMAS_PARALLEL_MAX_PROCESSES", None) runner = functools.partial( run_on_schema, executor_codename="parallel", command=type(command), # Can't pass streams to children processes function_name=function_name, args=args, kwargs=kwargs, pass_schema_in_kwargs=pass_schema_in_kwargs, ) with ThreadPoolExecutor(max_workers=processes) as executor: results = {executor.submit(runner, schema) for schema in schemas} as_completed(results) return schemas django-pgschemas-1.0.1/django_pgschemas/management/commands/cloneschema.py000066400000000000000000000123671470131117100267740ustar00rootroot00000000000000from distutils.util import strtobool from django.core.checks import Tags, run_checks from django.core.management.base import BaseCommand, CommandError from django_pgschemas.utils import clone_schema, get_domain_model, get_tenant_model class Command(BaseCommand): help = "Clones a schema" def _run_checks(self, **kwargs): # pragma: no cover issues = run_checks(tags=[Tags.database]) issues.extend(super()._run_checks(**kwargs)) return issues def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( "source", help="The name of the schema you want to clone", ) parser.add_argument( "destination", help="The name of the schema you want to create as clone", ) parser.add_argument( "--noinput", "--no-input", action="store_false", dest="interactive", help="Tells Django to NOT prompt the user for input of any kind.", ) parser.add_argument( "--dry-run", dest="dry_run", action="store_true", help="Just show what clone would do; without actually cloning.", ) def _ask(self, question): answer = None while answer is None: try: raw_answer = input(f"{question.strip()} [Y/n] ").strip() or "y" answer = strtobool(raw_answer) except ValueError: self.stderr.write(f"{raw_answer} is not a valid answer.") pass return answer def _check_required_field(self, field, exclude=None): if exclude is None: exclude = [] return ( field.editable and not field.primary_key and not field.is_relation and not ( field.null or field.has_default() or (field.blank and field.empty_strings_allowed) or getattr(field, "auto_now", False) or getattr(field, "auto_now_add", False) ) and field.name not in exclude ) def _get_constructed_instance(self, model_class, data): fields = [ field for field in model_class._meta.fields if self._check_required_field(field, data.keys()) ] instance = model_class(**data) if fields: self.stdout.write( self.style.WARNING(f"We need some data for model '{model_class._meta.model_name}':") ) for field in fields: while field.name not in data: raw_value = input(f"Value for field '{field.name}': ") try: data[field.name] = field.clean(raw_value, None) instance = model_class(**data) instance.clean() except Exception as e: if hasattr(e, "message"): self.stderr.write(e.message) # noqa elif hasattr(e, "messages"): self.stderr.write(" ".join(e.messages)) # noqa else: self.stderr.write(e) data.pop(field.name, None) return instance def get_dynamic_tenant(self, **options): tenant = None domain = None if self._ask( "You are cloning a schema for a dynamic tenant. Would you like to create a database entry for it?" ): tenant = self._get_constructed_instance( get_tenant_model(), {"schema_name": options["destination"]} ) domain = self._get_constructed_instance(get_domain_model(), {"is_primary": True}) if options["verbosity"] >= 1: self.stdout.write(self.style.WARNING("Looks good! Let's get to it!")) return tenant, domain def handle(self, *args, **options): tenant = None domain = None dry_run = options.get("dry_run") if options.get("interactive", True): TenantModel = get_tenant_model() if ( TenantModel is not None and TenantModel.objects.filter(schema_name=options["source"]).exists() ): tenant, domain = self.get_dynamic_tenant(**options) try: clone_schema(options["source"], options["destination"], dry_run) if tenant and domain: if options["verbosity"] >= 1: self.stdout.write("Schema cloned.") if not dry_run: tenant.save() domain.tenant = tenant if not dry_run: domain.save() if options["verbosity"] >= 1: self.stdout.write("Tenant and domain successfully saved.") if options["verbosity"] >= 1: self.stdout.write("All done!") except Exception as e: if hasattr(e, "message"): raise CommandError(e.message) # noqa elif hasattr(e, "messages"): raise CommandError(" ".join(e.messages)) # noqa else: raise CommandError(e) django-pgschemas-1.0.1/django_pgschemas/management/commands/createrefschema.py000066400000000000000000000033451470131117100276300ustar00rootroot00000000000000from django.core.checks import Tags, run_checks from django.core.management.base import BaseCommand, CommandError from django_pgschemas.utils import create_schema, drop_schema, get_clone_reference class Command(BaseCommand): help = "Creates the reference schema for faster dynamic tenant creation" def _run_checks(self, **kwargs): # pragma: no cover issues = run_checks(tags=[Tags.database]) issues.extend(super()._run_checks(**kwargs)) return issues def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( "--recreate", action="store_true", dest="recreate", help="Recreate reference schema.", ) def handle(self, *args, **options): clone_reference = get_clone_reference() if not clone_reference: raise CommandError("There is no reference schema configured.") if options.get("recreate", False): drop_schema(clone_reference, check_if_exists=True, verbosity=options["verbosity"]) if options["verbosity"] >= 1: self.stdout.write("Destroyed existing reference schema.") created = create_schema( clone_reference, check_if_exists=True, verbosity=options["verbosity"] ) if options["verbosity"] >= 1: if created: self.stdout.write("Reference schema successfully created!") else: self.stdout.write("Reference schema already exists.") self.stdout.write( self.style.WARNING( "Run this command again with --recreate if you want to recreate the reference schema." ) ) django-pgschemas-1.0.1/django_pgschemas/management/commands/migrate.py000066400000000000000000000001201470131117100261230ustar00rootroot00000000000000from .migrateschema import MigrateSchemaCommand Command = MigrateSchemaCommand django-pgschemas-1.0.1/django_pgschemas/management/commands/migrateschema.py000066400000000000000000000020141470131117100273100ustar00rootroot00000000000000from django.core.checks import Tags, run_checks from django.core.management.base import BaseCommand from django.core.management.commands.migrate import Command as MigrateCommand from . import WrappedSchemaOption from .runschema import Command as RunSchemaCommand class NonInteractiveRunSchemaCommand(RunSchemaCommand): allow_interactive = False class MigrateSchemaCommand(WrappedSchemaOption, BaseCommand): allow_interactive = False requires_system_checks = [] def _run_checks(self, **kwargs): # pragma: no cover issues = run_checks(tags=[Tags.database]) issues.extend(super()._run_checks(**kwargs)) return issues def add_arguments(self, parser): super().add_arguments(parser) MigrateCommand.add_arguments(self, parser) def handle(self, *args, **options): runschema = NonInteractiveRunSchemaCommand() options.pop("run_syncdb", False) runschema.execute(command_name="django.core.migrate", *args, **options) Command = MigrateSchemaCommand django-pgschemas-1.0.1/django_pgschemas/management/commands/runschema.py000066400000000000000000000064701470131117100264760ustar00rootroot00000000000000import argparse import sys from django.core.management import get_commands, load_command_class from django.core.management.base import BaseCommand, CommandError, SystemCheckError from . import WrappedSchemaOption class Command(WrappedSchemaOption, BaseCommand): help = "Wrapper around Django commands for use with an individual schema" def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument("command_name", help="The command name you want to run") def get_command_from_arg(self, arg): *chunks, command = arg.split(".") path = ".".join(chunks) if not path: path = get_commands().get(command) try: cmd = load_command_class(path, command) except Exception: raise CommandError("Unknown command: %s" % arg) if isinstance(cmd, WrappedSchemaOption): raise CommandError("Command '%s' cannot be used in runschema" % arg) return cmd def run_from_argv(self, argv): # pragma: no cover """ Changes the option_list to use the options from the wrapped command. Adds schema parameter to specify which schema will be used when executing the wrapped command. """ try: # load the command object. if len(argv) <= 2: raise CommandError("No command to run") target_class = self.get_command_from_arg(argv[2]) # Ugly, but works. Delete command_name from the argv, parse the schemas manually # and forward the rest of the arguments to the actual command being wrapped. del argv[1] schema_parser = argparse.ArgumentParser() super().add_arguments(schema_parser) schema_ns, args = schema_parser.parse_known_args(argv) schemas = self.get_schemas_from_options( schemas=schema_ns.schemas, all_schemas=schema_ns.all_schemas, static_schemas=schema_ns.static_schemas, dynamic_schemas=schema_ns.dynamic_schemas, tenant_schemas=schema_ns.tenant_schemas, ) executor = self.get_executor_from_options(parallel=schema_ns.parallel) except Exception as e: if not isinstance(e, CommandError): raise # SystemCheckError takes care of its own formatting. if isinstance(e, SystemCheckError): self.stderr.write(str(e), lambda x: x) else: self.stderr.write("%s: %s" % (e.__class__.__name__, e)) sys.exit(1) executor(schemas, target_class, "special:run_from_argv", args) def handle(self, *args, **options): target = self.get_command_from_arg(options.pop("command_name")) schemas = self.get_schemas_from_options(**options) executor = self.get_executor_from_options(**options) options.pop("schemas") options.pop("excluded_schemas") options.pop("all_schemas") options.pop("static_schemas") options.pop("dynamic_schemas") options.pop("tenant_schemas") options.pop("parallel") options.pop("skip_schema_creation") if self.allow_interactive: options.pop("interactive") executor(schemas, target, "special:call_command", args, options) django-pgschemas-1.0.1/django_pgschemas/management/commands/whowill.py000066400000000000000000000006021470131117100261650ustar00rootroot00000000000000from django_pgschemas.schema import Schema from . import SchemaCommand class Command(SchemaCommand): help = "Displays which schemas would be used based on the passed schema selectors" def handle_schema(self, schema: Schema, *args, **options): if options["verbosity"] >= 1: self.stdout.write(str(schema.routing) if schema.routing else schema.schema_name) django-pgschemas-1.0.1/django_pgschemas/models.py000066400000000000000000000071351470131117100220560ustar00rootroot00000000000000from django.db import models from django_pgschemas.postgresql.base import check_schema_name from django_pgschemas.schema import Schema from django_pgschemas.signals import ( dynamic_tenant_needs_sync, dynamic_tenant_post_sync, dynamic_tenant_pre_drop, ) from django_pgschemas.utils import ( create_or_clone_schema, drop_schema, schema_exists, ) class TenantModel(Schema, models.Model): """ All tenant models must inherit this class. """ auto_create_schema = True """ Set this flag to `False` on a parent class if you don't want the schema to be automatically created upon save. """ auto_drop_schema = False """ *USE THIS WITH CAUTION!* Set this flag to `True` on a parent class if you want the schema to be automatically deleted if the tenant row gets deleted. """ is_dynamic = True """ Leave this as `True`. Denotes it's a database controlled tenant. """ schema_name = models.CharField(max_length=63, unique=True, validators=[check_schema_name]) class Meta: abstract = True def save( self, force_insert: bool = False, force_update: bool = False, using: str | None = None, update_fields: list[str] | None = None, verbosity: int = 1, ) -> None: is_new = self.pk is None super().save(force_insert, force_update, using, update_fields) if is_new and self.auto_create_schema: try: self.create_schema(verbosity=verbosity) dynamic_tenant_post_sync.send(sender=TenantModel, tenant=self.serializable_fields()) except Exception: # We failed creating the tenant, delete what we created and re-raise the exception self.delete(force_drop=True) raise elif is_new: # Although we are not using the schema functions directly, the signal might be registered by a listener dynamic_tenant_needs_sync.send(sender=TenantModel, tenant=self.serializable_fields()) elif not is_new and self.auto_create_schema and not schema_exists(self.schema_name): # Create schemas for existing models, deleting only the schema on failure try: self.create_schema(verbosity=verbosity) dynamic_tenant_post_sync.send(sender=TenantModel, tenant=self.serializable_fields()) except Exception: # We failed creating the schema, delete what we created and re-raise the exception self.drop_schema() raise def delete( self, using: str | None = None, keep_parents: bool = False, force_drop: bool = False ) -> None: """ Deletes this row. Drops the tenant's schema if the attribute `auto_drop_schema` is `True`. """ if force_drop or self.auto_drop_schema: dynamic_tenant_pre_drop.send(sender=TenantModel, tenant=self.serializable_fields()) self.drop_schema() super().delete(using, keep_parents) def serializable_fields(self) -> "TenantModel": """ In certain cases the model isn't serializable so you may want to only send the id. """ return self def create_schema(self, sync_schema: bool = True, verbosity: int = 1) -> bool: """ Creates or clones the schema `schema_name` for this tenant. """ return create_or_clone_schema(self.schema_name, sync_schema, verbosity) def drop_schema(self) -> bool: """ Drops the schema. """ return drop_schema(self.schema_name) django-pgschemas-1.0.1/django_pgschemas/postgresql/000077500000000000000000000000001470131117100224165ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/postgresql/__init__.py000066400000000000000000000000001470131117100245150ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/postgresql/_constraints.py000066400000000000000000000105661470131117100255060ustar00rootroot00000000000000from django.db.models.indexes import Index from django_pgschemas.schema import get_current_schema def get_constraints(self, cursor, table_name): """ Retrieve any constraints or keys (unique, pk, fk, check, index) across one or more columns. Also retrieve the definition of expression-based indexes. """ constraints = {} # Loop over the key table, collecting things as constraints. The column # array must return column names in the same order in which they were # created. # The subquery containing generate_series can be replaced with # "WITH ORDINALITY" when support for Postgres 9.3 is dropped. cursor.execute( """ SELECT c.conname, array( SELECT attname FROM ( SELECT unnest(c.conkey) AS colid, generate_series(1, array_length(c.conkey, 1)) AS arridx ) AS cols JOIN pg_attribute AS ca ON cols.colid = ca.attnum WHERE ca.attrelid = c.conrelid ORDER BY cols.arridx ), c.contype, (SELECT fkc.relname || '.' || fka.attname FROM pg_attribute AS fka JOIN pg_class AS fkc ON fka.attrelid = fkc.oid WHERE fka.attrelid = c.confrelid AND fka.attnum = c.confkey[1]), cl.reloptions FROM pg_constraint AS c JOIN pg_class AS cl ON c.conrelid = cl.oid JOIN pg_namespace AS ns ON cl.relnamespace = ns.oid WHERE ns.nspname = %s AND cl.relname = %s """, [get_current_schema().schema_name, table_name], ) for constraint, columns, kind, used_cols, options in cursor.fetchall(): constraints[constraint] = { "columns": columns, "primary_key": kind == "p", "unique": kind in ["p", "u"], "foreign_key": tuple(used_cols.split(".", 1)) if kind == "f" else None, "check": kind == "c", "index": False, "definition": None, "options": options, } # Now get indexes # The row_number() function for ordering the index fields can be # replaced by WITH ORDINALITY in the unnest() functions when support # for Postgres 9.3 is dropped. cursor.execute( """ SELECT indexname, array_agg(attname ORDER BY rnum), indisunique, indisprimary, array_agg(ordering ORDER BY rnum), amname, exprdef, s2.attoptions FROM ( SELECT row_number() OVER () as rnum, c2.relname as indexname, idx.*, attr.attname, am.amname, CASE WHEN idx.indexprs IS NOT NULL THEN pg_get_indexdef(idx.indexrelid) END AS exprdef, CASE am.amname WHEN 'btree' THEN CASE (option & 1) WHEN 1 THEN 'DESC' ELSE 'ASC' END END as ordering, c2.reloptions as attoptions FROM ( SELECT *, unnest(i.indkey) as key, unnest(i.indoption) as option FROM pg_index i ) idx LEFT JOIN pg_class c ON idx.indrelid = c.oid LEFT JOIN pg_namespace n ON n.oid = c.relnamespace LEFT JOIN pg_class c2 ON idx.indexrelid = c2.oid LEFT JOIN pg_am am ON c2.relam = am.oid LEFT JOIN pg_attribute attr ON attr.attrelid = c.oid AND attr.attnum = idx.key WHERE c.relname = %s and n.nspname = %s ) s2 GROUP BY indexname, indisunique, indisprimary, amname, exprdef, attoptions; """, [table_name, get_current_schema().schema_name], ) for index, columns, unique, primary, orders, type_, definition, options in cursor.fetchall(): if index not in constraints: constraints[index] = { "columns": columns if columns != [None] else [], "orders": orders if orders != [None] else [], "primary_key": primary, "unique": unique, "foreign_key": None, "check": False, "index": True, "type": Index.suffix if type_ == "btree" else type_, "definition": definition, "options": options, } return constraints django-pgschemas-1.0.1/django_pgschemas/postgresql/base.py000066400000000000000000000057261470131117100237140ustar00rootroot00000000000000from django.core.exceptions import ImproperlyConfigured from django.db.utils import DatabaseError from django.utils.asyncio import async_unsafe from django_pgschemas.schema import get_current_schema, get_default_schema from django_pgschemas.settings import ( get_base_backend_module, get_extra_search_paths, get_original_backend_module, ) from django_pgschemas.utils import check_schema_name, get_limit_set_calls from .introspection import DatabaseSchemaIntrospection try: try: import psycopg as _psycopg except ImportError: import psycopg2 as _psycopg except ImportError: raise ImproperlyConfigured("Error loading psycopg2 or psycopg module") try: module = get_original_backend_module("base") except AttributeError: module = get_base_backend_module("base") def get_search_path(schema=None): if schema is None: schema = get_default_schema() search_path = ["public"] if schema.schema_name == "public" else [schema.schema_name, "public"] search_path.extend(get_extra_search_paths()) for part in search_path: check_schema_name(part) return ", ".join(search_path) class DatabaseWrapper(module.DatabaseWrapper): def __init__(self, *args, **kwargs): self._search_path = None self._setting_search_path = False super().__init__(*args, **kwargs) # Patched version of DatabaseIntrospection that only returns the table list for the currently selected schema self.introspection = DatabaseSchemaIntrospection(self) @async_unsafe def close(self) -> None: self._search_path = None self._setting_search_path = False super().close() @async_unsafe def rollback(self) -> None: self._search_path = None self._setting_search_path = False super().rollback() def _handle_search_path(self, cursor=None): search_path_for_current_schema = get_search_path(get_current_schema()) skip = self._setting_search_path or ( self._search_path == search_path_for_current_schema and get_limit_set_calls() ) if not skip: self._setting_search_path = True cursor_for_search_path = self.connection.cursor() if cursor is None else cursor try: cursor_for_search_path.execute( f"SET search_path = {search_path_for_current_schema}" ) except (DatabaseError, _psycopg.InternalError): self._search_path = None else: self._search_path = search_path_for_current_schema finally: self._setting_search_path = False if cursor is None: cursor_for_search_path.close() def _cursor(self, name=None): cursor = super()._cursor(name=name) cursor_for_search_path = cursor if name is None else None # Named cursors cannot be reused self._handle_search_path(cursor_for_search_path) return cursor django-pgschemas-1.0.1/django_pgschemas/postgresql/introspection.py000066400000000000000000000136361470131117100257010ustar00rootroot00000000000000from django.db.backends.base.introspection import FieldInfo, TableInfo from django.utils.encoding import force_str from django_pgschemas.schema import get_current_schema from django_pgschemas.settings import get_base_backend_module, get_original_backend_module from . import _constraints try: module = get_original_backend_module("introspection") except AttributeError: module = get_base_backend_module("introspection") class DatabaseSchemaIntrospection(module.DatabaseIntrospection): # pragma: no cover """ database schema introspection class """ _get_indexes_query = """ SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary FROM pg_catalog.pg_class c, INNER JOIN pg_catalog.pg_index idx ON c.oid = idx.indrelid INNER JOIN pg_catalog.pg_class c2 ON idx.indexrelid = c2.oid INNER JOIN pg_catalog.pg_attribute attr ON attr.attrelid = c.oid and attr.attnum = idx.indkey[0] INNER JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relname = %s AND n.nspname = %s """ def get_table_list(self, cursor): """ Returns a list of table names in the current database and schema. """ cursor.execute( """ SELECT c.relname, c.relkind FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind IN ('r', 'v', '') AND n.nspname = '%s' AND pg_catalog.pg_table_is_visible(c.oid)""" % get_current_schema().schema_name ) return [ TableInfo(row[0], {"r": "t", "v": "v"}.get(row[1])) for row in cursor.fetchall() if row[0] not in self.ignored_tables ] def get_table_description(self, cursor, table_name): "Returns a description of the table, with the DB-API cursor.description interface." # As cursor.description does not return reliably the nullable property, # we have to query the information_schema (#7783) cursor.execute( """ SELECT column_name, is_nullable, column_default FROM information_schema.columns WHERE table_schema = %s and table_name = %s""", [get_current_schema().schema_name, table_name], ) field_map = {line[0]: line[1:] for line in cursor.fetchall()} cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)) return [ FieldInfo( *( (force_str(line[0]),) + line[1:6] + (field_map[force_str(line[0])][0] == "YES", field_map[force_str(line[0])][1]) ) ) for line in cursor.description ] def get_indexes(self, cursor, table_name): # This query retrieves each index on the given table, including the # first associated field name cursor.execute(self._get_indexes_query, [table_name, get_current_schema().schema_name]) indexes = {} for row in cursor.fetchall(): # row[1] (idx.indkey) is stored in the DB as an array. It comes out as # a string of space-separated integers. This designates the field # indexes (1-based) of the fields that have indexes on the table. # Here, we skip any indexes across multiple fields. if " " in row[1]: continue if row[0] not in indexes: indexes[row[0]] = {"primary_key": False, "unique": False} # It's possible to have the unique and PK constraints in separate indexes. if row[3]: indexes[row[0]]["primary_key"] = True if row[2]: indexes[row[0]]["unique"] = True return indexes def get_relations(self, cursor, table_name): """ Returns a dictionary of {field_name: (field_name_other_table, other_table)} representing all relationships to the given table. """ cursor.execute( """ SELECT c2.relname, a1.attname, a2.attname FROM pg_constraint con LEFT JOIN pg_class c1 ON con.conrelid = c1.oid LEFT JOIN pg_namespace n ON n.oid = c1.relnamespace LEFT JOIN pg_class c2 ON con.confrelid = c2.oid LEFT JOIN pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1] LEFT JOIN pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1] WHERE c1.relname = %s and n.nspname = %s AND con.contype = 'f'""", [table_name, get_current_schema().schema_name], ) relations = {} for row in cursor.fetchall(): relations[row[1]] = (row[2], row[0]) return relations get_constraints = _constraints.get_constraints def get_key_columns(self, cursor, table_name): key_columns = [] cursor.execute( """ SELECT kcu.column_name, ccu.table_name AS referenced_table, ccu.column_name AS referenced_column FROM information_schema.constraint_column_usage ccu LEFT JOIN information_schema.key_column_usage kcu ON ccu.constraint_catalog = kcu.constraint_catalog AND ccu.constraint_schema = kcu.constraint_schema AND ccu.constraint_name = kcu.constraint_name LEFT JOIN information_schema.table_constraints tc ON ccu.constraint_catalog = tc.constraint_catalog AND ccu.constraint_schema = tc.constraint_schema AND ccu.constraint_name = tc.constraint_name WHERE kcu.table_name = %s AND tc.constraint_type = 'FOREIGN KEY' AND tc.table_schema = %s """, [table_name, get_current_schema().schema_name], ) key_columns.extend(cursor.fetchall()) return key_columns django-pgschemas-1.0.1/django_pgschemas/py.typed000066400000000000000000000000001470131117100217000ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/routers.py000066400000000000000000000025171470131117100222750ustar00rootroot00000000000000from typing import Iterable from django.apps import apps from django.conf import settings from django_pgschemas.schema import get_current_schema from django_pgschemas.utils import get_tenant_database_alias class TenantAppsRouter: """ A router to control which applications will be actually migrated depending on the schema. """ def app_in_list(self, app_label: str, app_list: Iterable) -> bool: app_config = apps.get_app_config(app_label) app_config_full_name = f"{app_config.__module__}.{app_config.__class__.__name__}" return (app_config.name in app_list) or (app_config_full_name in app_list) def allow_migrate( self, db: str, app_label: str, model_name: str | None = None, **hints: object ) -> bool | None: current_schema = get_current_schema() if db != get_tenant_database_alias() or current_schema is None: return False app_list = [] if current_schema.schema_name == "public": app_list = settings.TENANTS["public"]["APPS"] elif current_schema.schema_name in settings.TENANTS: app_list = settings.TENANTS[current_schema.schema_name]["APPS"] else: app_list = settings.TENANTS["default"]["APPS"] if not app_list: return None return self.app_in_list(app_label, app_list) django-pgschemas-1.0.1/django_pgschemas/routing/000077500000000000000000000000001470131117100217025ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/routing/__init__.py000066400000000000000000000000001470131117100240010ustar00rootroot00000000000000django-pgschemas-1.0.1/django_pgschemas/routing/info.py000066400000000000000000000011441470131117100232070ustar00rootroot00000000000000from dataclasses import dataclass from typing import TypeAlias @dataclass(frozen=True) class DomainInfo: domain: str folder: str | None = None def __str__(self) -> str: return f"{self.domain}/{self.folder}" if self.folder else self.domain @dataclass(frozen=True) class SessionInfo: reference: str def __str__(self) -> str: return f"Session: {self.reference}" @dataclass(frozen=True) class HeadersInfo: reference: str def __str__(self) -> str: return f"Header: {self.reference}" RoutingInfo: TypeAlias = DomainInfo | SessionInfo | HeadersInfo | None django-pgschemas-1.0.1/django_pgschemas/routing/middleware.py000066400000000000000000000154431470131117100244000ustar00rootroot00000000000000import re from typing import Callable, TypeAlias, cast from asgiref.sync import iscoroutinefunction, sync_to_async from django.conf import settings from django.db.models import Q from django.http import Http404, HttpRequest, HttpResponse from django.shortcuts import redirect from django.urls import clear_url_caches, set_urlconf from django.utils.decorators import sync_and_async_middleware from django_pgschemas.models import TenantModel as TenantModelBase from django_pgschemas.routing.info import DomainInfo, HeadersInfo, SessionInfo from django_pgschemas.routing.models import get_primary_domain_for_tenant from django_pgschemas.routing.urlresolvers import get_urlconf_from_schema from django_pgschemas.schema import Schema, activate, activate_public from django_pgschemas.settings import get_tenant_header, get_tenant_session_key from django_pgschemas.utils import get_domain_model, get_tenant_model, remove_www def strip_tenant_from_path_factory(prefix: str) -> Callable[[str], str]: def strip_tenant_from_path(path: str) -> str: return re.sub(r"^/{}/".format(prefix), "/", path) return strip_tenant_from_path ResponseHandler: TypeAlias = Callable[[HttpRequest], HttpResponse] def route_domain(request: HttpRequest) -> HttpResponse | None: hostname = remove_www(request.get_host().split(":")[0]) activate_public() tenant: Schema | None = None # Checking for static tenants for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if hostname in data.get("DOMAINS", []): tenant = Schema.create( schema_name=schema, routing=DomainInfo(domain=hostname), ) break # Checking for dynamic tenants else: DomainModel = get_domain_model() prefix = request.path.split("/")[1] domain = None if DomainModel is not None: try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder=prefix ) except DomainModel.DoesNotExist: try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder="" ) except DomainModel.DoesNotExist: pass if domain is not None: tenant = cast(TenantModelBase, domain.tenant) tenant.routing = DomainInfo(domain=hostname) request.strip_tenant_from_path = lambda x: x if prefix and domain.folder == prefix: tenant.routing = DomainInfo(domain=hostname, folder=prefix) request.strip_tenant_from_path = strip_tenant_from_path_factory(prefix) clear_url_caches() # Required to remove previous tenant prefix from cache (#8) if domain.redirect_to_primary: primary_domain = get_primary_domain_for_tenant(tenant) if primary_domain: path = request.strip_tenant_from_path(request.path) return redirect(primary_domain.absolute_url(path), permanent=True) # Checking fallback domains if not tenant: for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if hostname in data.get("FALLBACK_DOMAINS", []): tenant = Schema.create( schema_name=schema, routing=DomainInfo(domain=hostname), ) break # No tenant found from domain / folder if not tenant: raise Http404("No tenant for hostname '%s'" % hostname) urlconf = get_urlconf_from_schema(tenant) request.tenant = tenant request.urlconf = urlconf set_urlconf(urlconf) activate(tenant) return None def route_session(request: HttpRequest) -> HttpResponse | None: tenant_session_key = get_tenant_session_key() if not hasattr(request, "session") or not ( tenant_ref := request.session.get(tenant_session_key) ): return None tenant: Schema | None = None # Checking for static tenants for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if tenant_ref == schema or tenant_ref == data.get("SESSION_KEY"): tenant = Schema.create(schema_name=schema) break # Checking for dynamic tenants else: if (TenantModel := get_tenant_model()) is not None: tenant = TenantModel._default_manager.filter( Q(pk__iexact=tenant_ref) | Q(schema_name=tenant_ref) ).first() if tenant is not None: tenant.routing = SessionInfo(reference=tenant_ref) request.tenant = tenant activate(tenant) return None def route_headers(request: HttpRequest) -> HttpResponse | None: tenant_header = get_tenant_header() if not (tenant_ref := request.headers.get(tenant_header)): return None tenant: Schema | None = None # Checking for static tenants for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if tenant_ref == schema or tenant_ref == data.get("HEADER"): tenant = Schema.create(schema_name=schema) break # Checking for dynamic tenants else: if (TenantModel := get_tenant_model()) is not None: tenant = TenantModel._default_manager.filter( Q(pk__iexact=tenant_ref) | Q(schema_name=tenant_ref) ).first() if tenant is not None: tenant.routing = HeadersInfo(reference=tenant_ref) request.tenant = tenant activate(tenant) return None def middleware_factory( handler: Callable[[HttpRequest], HttpResponse | None], ) -> Callable[[ResponseHandler], ResponseHandler]: @sync_and_async_middleware def middleware(get_response: ResponseHandler) -> ResponseHandler: if iscoroutinefunction(get_response): async_base_middleware = sync_to_async(handler) async def sync_middleware(request: HttpRequest) -> HttpResponse | None: if response := await async_base_middleware(request): return response return await get_response(request) return sync_middleware else: def async_middleware(request: HttpRequest) -> HttpResponse | None: if response := handler(request): return response return get_response(request) return async_middleware return middleware DomainRoutingMiddleware = middleware_factory(route_domain) SessionRoutingMiddleware = middleware_factory(route_session) HeadersRoutingMiddleware = middleware_factory(route_headers) django-pgschemas-1.0.1/django_pgschemas/routing/models.py000066400000000000000000000043651470131117100235470ustar00rootroot00000000000000from django.conf import settings from django.db import models, transaction from django_pgschemas.models import TenantModel from django_pgschemas.utils import get_domain_model class DomainModel(models.Model): """ All models that store the domains must inherit this class. """ tenant = ( models.ForeignKey( settings.TENANTS["default"]["TENANT_MODEL"], db_index=True, related_name="domains", on_delete=models.CASCADE, ) if getattr(settings, "TENANTS", {}).get("default") else None ) domain = models.CharField(max_length=253, db_index=True) folder = models.SlugField(max_length=253, blank=True, db_index=True) is_primary = models.BooleanField(default=True) redirect_to_primary = models.BooleanField(default=False) class Meta: abstract = True unique_together = (("domain", "folder"),) def __str__(self) -> str: return "/".join([self.domain, self.folder]) if self.folder else self.domain @transaction.atomic def save(self, *args: object, **kwargs: object) -> None: using = kwargs.get("using") domain_list = self.__class__.objects if using: domain_list = domain_list.using(using) domain_list = domain_list.filter(tenant=self.tenant, is_primary=True).exclude(pk=self.pk) self.is_primary = self.is_primary or (not domain_list.exists()) if self.is_primary: domain_list.update(is_primary=False) if self.redirect_to_primary: self.redirect_to_primary = False super().save(*args, **kwargs) def absolute_url(self, path: str) -> str: """ Constructs an absolute url for this domain / folder and a given path """ parts = [self.domain] if self.folder: parts.append(self.folder) parts.append(path) final_path = "/".join(parts).replace("//", "/") return f"//{final_path}" def get_primary_domain_for_tenant(tenant: TenantModel) -> DomainModel | None: DomainModel = get_domain_model() if DomainModel is None: return None try: return tenant.domains.get(is_primary=True) except DomainModel.DoesNotExist: return None django-pgschemas-1.0.1/django_pgschemas/routing/urlresolvers.py000066400000000000000000000102301470131117100250170ustar00rootroot00000000000000import re import sys from types import ModuleType from typing import Any, Literal from django.conf import settings from django.urls import URLResolver from django_pgschemas.routing.info import DomainInfo, HeadersInfo, SessionInfo from django_pgschemas.schema import Schema, get_current_schema DYNAMIC_URLCONF_SUFFIX = "_dynamically_tenant_prefixed" class TenantPrefixPattern: converters: dict = {} @property def tenant_prefix(self) -> str: current_schema = get_current_schema() return ( f"{current_schema.routing.folder}/" if isinstance(current_schema.routing, DomainInfo) and current_schema.routing.folder else "" ) @property def regex(self) -> re.Pattern: # This is only used by reverse() and cached in _reverse_dict. return re.compile(self.tenant_prefix) def match(self, path: str) -> tuple | None: tenant_prefix = self.tenant_prefix if path.startswith(tenant_prefix): return path[len(tenant_prefix) :], (), {} return None def check(self) -> list: return [] def describe(self) -> str: return f"'{self}'" def __str__(self) -> str: return self.tenant_prefix def get_dynamic_tenant_prefixed_urlconf(urlconf: str, dynamic_path: str) -> ModuleType: """ Generates a new urlconf module with all patterns prefixed with tenant. """ class LazyURLConfModule(ModuleType): def __getattr__(self, attr: str) -> Any: if attr == "urlpatterns": return [URLResolver(TenantPrefixPattern(), urlconf)] return self.__getattribute__(attr) return LazyURLConfModule(dynamic_path) def _get_urlconf_from_schema( schema: Schema, config_key: Literal["URLCONF", "WS_URLCONF"] ) -> str | None: match schema.routing: case DomainInfo(domain, _): # Checking for static tenants if not schema.is_dynamic: for schema_name, data in settings.TENANTS.items(): if schema_name in ["public", "default"]: continue if domain in data.get("DOMAINS", []): return data.get(config_key) if domain in data.get("FALLBACK_DOMAINS", []): return data.get(config_key) return None # Checking for dynamic tenants urlconf = settings.TENANTS.get("default", {}).get(config_key) if urlconf is not None and schema.routing.folder: dynamic_path = urlconf + DYNAMIC_URLCONF_SUFFIX if not sys.modules.get(dynamic_path): sys.modules[dynamic_path] = get_dynamic_tenant_prefixed_urlconf( urlconf, dynamic_path ) urlconf = dynamic_path return urlconf case SessionInfo(reference): # Checking for static tenants if not schema.is_dynamic: for schema_name, data in settings.TENANTS.items(): if schema_name in ["public", "default"]: continue if reference == data.get("SESSION_KEY"): return data.get(config_key) return None # Checking for dynamic tenants return settings.TENANTS.get("default", {}).get(config_key) case HeadersInfo(reference): # Checking for static tenants if not schema.is_dynamic: for schema_name, data in settings.TENANTS.items(): if schema_name in ["public", "default"]: continue if reference == data.get("HEADER"): return data.get(config_key) return None # Checking for dynamic tenants return settings.TENANTS.get("default", {}).get(config_key) case _: return None def get_urlconf_from_schema(schema: Schema) -> str | None: return _get_urlconf_from_schema(schema, "URLCONF") def get_ws_urlconf_from_schema(schema: Schema) -> str | None: return _get_urlconf_from_schema(schema, "WS_URLCONF") django-pgschemas-1.0.1/django_pgschemas/schema.py000066400000000000000000000034341470131117100220310ustar00rootroot00000000000000from contextlib import contextmanager from contextvars import ContextVar, Token from functools import lru_cache from typing import Iterator from django_pgschemas.routing.info import RoutingInfo from django_pgschemas.signals import schema_activate class Schema: schema_name: str routing: RoutingInfo = None is_dynamic = False _context_tokens: list[Token["Schema"] | None] = [] @staticmethod def create(schema_name: str, routing: RoutingInfo | None = None) -> "Schema": schema = Schema() schema.schema_name = schema_name schema.routing = routing return schema def __enter__(self) -> None: self._context_tokens.append(push(self)) def __exit__(self, *args: object) -> None: if self._context_tokens: token = self._context_tokens.pop() if token is not None: active.reset(token) def shallow_equal(schema1: Schema, schema2: Schema) -> bool: return schema1.schema_name == schema2.schema_name and schema1.routing == schema2.routing @lru_cache def get_default_schema() -> Schema: return Schema.create("public") active: ContextVar["Schema"] = ContextVar("active", default=get_default_schema()) def get_current_schema() -> Schema: return active.get() def push(schema: Schema) -> Token[Schema] | None: if shallow_equal(get_current_schema(), schema): return None token = active.set(schema) schema_activate.send(sender=Schema, schema=schema) return token def activate(schema: Schema) -> None: push(schema) def deactivate() -> None: push(get_default_schema()) activate_public = deactivate @contextmanager def override(schema: Schema) -> Iterator[None]: token = push(schema) yield if token is not None: active.reset(token) django-pgschemas-1.0.1/django_pgschemas/settings.py000066400000000000000000000025311470131117100224260ustar00rootroot00000000000000from importlib import import_module from types import ModuleType from typing import Callable from django.conf import settings from django.db import DEFAULT_DB_ALIAS DEFAULT_BACKEND = "django.db.backends.postgresql" def get_tenant_db_alias() -> str: return getattr(settings, "PGSCHEMAS_TENANT_DB_ALIAS", DEFAULT_DB_ALIAS) def get_limit_set_calls() -> bool: return getattr(settings, "PGSCHEMAS_LIMIT_SET_CALLS", False) def get_original_backend() -> str: return getattr(settings, "PGSCHEMAS_ORIGINAL_BACKEND", DEFAULT_BACKEND) def get_extra_search_paths() -> list[str]: return getattr(settings, "PGSCHEMAS_EXTRA_SEARCH_PATHS", []) def get_tenant_session_key() -> str: return getattr(settings, "PGSCHEMAS_TENANT_SESSION_KEY", "tenant") def get_tenant_header() -> str: return getattr(settings, "PGSCHEMAS_TENANT_HEADER", "tenant") def get_pathname_function() -> Callable | None: return getattr(settings, "PGSCHEMAS_PATHNAME_FUNCTION", None) def get_base_backend_module(submodule: str | None = None) -> ModuleType: module = DEFAULT_BACKEND if submodule: module += f".{submodule}" return import_module(module) def get_original_backend_module(submodule: str | None = None) -> ModuleType: module = get_original_backend() if submodule: module += f".{submodule}" return import_module(module) django-pgschemas-1.0.1/django_pgschemas/signals.py000066400000000000000000000022421470131117100222250ustar00rootroot00000000000000from typing import Any from django.db.models.signals import pre_delete from django.dispatch import Signal, receiver from django_pgschemas.utils import get_tenant_model, schema_exists schema_activate = Signal() schema_activate.__doc__ = "Sent after a schema has been activated" dynamic_tenant_needs_sync = Signal() dynamic_tenant_needs_sync.__doc__ = ( "Sent when a schema from a dynamic tenant needs to have migrations applied" ) dynamic_tenant_post_sync = Signal() dynamic_tenant_post_sync.__doc__ = ( "Sent after a tenant has been saved, its schema created and all migrations applied" ) dynamic_tenant_pre_drop = Signal() dynamic_tenant_pre_drop.__doc__ = "Sent when a schema from a dynamic tenant is about to be dropped" @receiver(pre_delete) def tenant_delete_callback(sender: Any, instance: Any, **kwargs: object) -> None: TenantModel = get_tenant_model() if TenantModel is None: return if not isinstance(instance, TenantModel): return if instance.auto_drop_schema and schema_exists(instance.schema_name): dynamic_tenant_pre_drop.send(sender=TenantModel, tenant=instance.serializable_fields()) instance.drop_schema() django-pgschemas-1.0.1/django_pgschemas/utils.py000066400000000000000000000166701470131117100217370ustar00rootroot00000000000000import gzip import os import re from typing import Any, Callable from django.apps import apps from django.conf import settings from django.core.exceptions import ValidationError from django.core.management import call_command from django.db import DEFAULT_DB_ALIAS, ProgrammingError, connection, transaction from django.db.models import Model from django.utils.encoding import force_str def get_tenant_model(require_ready: bool = True) -> Model | None: "Returns the tenant model." if "default" not in settings.TENANTS: return None return apps.get_model(settings.TENANTS["default"]["TENANT_MODEL"], require_ready=require_ready) def get_domain_model(require_ready: bool = True) -> Model | None: "Returns the domain model." if "default" not in settings.TENANTS or "DOMAIN_MODEL" not in settings.TENANTS["default"]: return None return apps.get_model(settings.TENANTS["default"]["DOMAIN_MODEL"], require_ready=require_ready) def get_tenant_database_alias() -> str: return getattr(settings, "PGSCHEMAS_TENANT_DB_ALIAS", DEFAULT_DB_ALIAS) def get_limit_set_calls() -> bool: return getattr(settings, "PGSCHEMAS_LIMIT_SET_CALLS", False) def get_clone_reference() -> str | None: if "default" not in settings.TENANTS: return None return settings.TENANTS["default"].get("CLONE_REFERENCE", None) def is_valid_identifier(identifier: str) -> bool: "Checks the validity of identifier." SQL_IDENTIFIER_RE = re.compile(r"^[_a-zA-Z][_a-zA-Z0-9]{,62}$") return bool(SQL_IDENTIFIER_RE.match(identifier)) def is_valid_schema_name(name: str) -> bool: "Checks the validity of a schema name." SQL_SCHEMA_NAME_RESERVED_RE = re.compile(r"^pg_", re.IGNORECASE) return is_valid_identifier(name) and not SQL_SCHEMA_NAME_RESERVED_RE.match(name) def check_schema_name(name: str) -> None: """ Checks schema name and raises `ValidationError` if `name` is not a valid identifier. """ if not is_valid_schema_name(name): raise ValidationError("Invalid string used for the schema name.") def remove_www(path: str) -> str: if path.startswith("www."): return path[4:] return path def django_is_in_test_mode() -> bool: """ I know this is very ugly! I'm looking for more elegant solutions. See: http://stackoverflow.com/questions/6957016/detect-django-testing-mode """ from django.core import mail return hasattr(mail, "outbox") def run_in_public_schema(func: Callable) -> Callable: "Decorator that makes decorated function to be run in the public schema." def wrapper(*args: object, **kwargs: object) -> Any: from django_pgschemas.schema import Schema with Schema.create(schema_name="public"): return func(*args, **kwargs) return wrapper def schema_exists(schema_name: str) -> bool: "Checks if a schema exists in database." sql = """ SELECT EXISTS( SELECT 1 FROM pg_catalog.pg_namespace WHERE LOWER(nspname) = LOWER(%s) ) """ with connection.cursor() as cursor: cursor.execute(sql, (schema_name,)) row = cursor.fetchone() if row: exists = row[0] else: # pragma: no cover exists = False return exists @run_in_public_schema def dynamic_models_exist() -> bool: "Checks if tenant model and domain model are ready to be used in the database." sql = """ SELECT count(*) FROM information_schema.tables WHERE table_schema = 'public' AND table_name in (%s); """ TenantModel = get_tenant_model() DomainModel = get_domain_model() models_to_check = [] if TenantModel is not None: models_to_check.append(TenantModel) if DomainModel is not None: models_to_check.append(DomainModel) if not models_to_check: return False template = ", ".join(f"'{model._meta.db_table}'" for model in models_to_check) with connection.cursor() as cursor: cursor.execute(sql % template) value = cursor.fetchone() == (len(models_to_check),) return value @run_in_public_schema def create_schema( schema_name: str, check_if_exists: bool = False, sync_schema: bool = True, verbosity: int = 1, ) -> bool: """ Creates the schema `schema_name`. Optionally checks if the schema already exists before creating it. Returns `True` if the schema was created, `False` otherwise. """ check_schema_name(schema_name) if check_if_exists and schema_exists(schema_name): return False with connection.cursor() as cursor: cursor.execute("CREATE SCHEMA %s" % schema_name) if sync_schema: call_command("migrateschema", schemas=[schema_name], verbosity=verbosity) return True @run_in_public_schema def drop_schema(schema_name: str, check_if_exists: bool = True, verbosity: int = 1) -> bool: """ Drops the schema. Optionally checks if the schema already exists before dropping it. """ if check_if_exists and not schema_exists(schema_name): return False with connection.cursor() as cursor: cursor.execute("DROP SCHEMA %s CASCADE" % schema_name) return True class DryRunException(Exception): pass def _create_clone_schema_function() -> None: """ Creates a postgres function `clone_schema` that copies a schema and its contents. Will replace any existing `clone_schema` functions owned by the `postgres` superuser. """ with gzip.open( os.path.join(os.path.dirname(os.path.abspath(__file__)), "clone_schema.gz") ) as gzip_file: CLONE_SCHEMA_FUNCTION = ( force_str(gzip_file.read()) .replace("RAISE NOTICE ' source schema", "RAISE EXCEPTION ' source schema") .replace("RAISE NOTICE ' dest schema", "RAISE EXCEPTION ' dest schema") ) with connection.cursor() as cursor: cursor.execute(CLONE_SCHEMA_FUNCTION) @run_in_public_schema def clone_schema(base_schema_name: str, new_schema_name: str, dry_run: bool = False) -> None: """ Creates a new schema `new_schema_name` as a clone of an existing schema `base_schema_name`. """ check_schema_name(new_schema_name) cursor = connection.cursor() # check if the clone_schema function already exists in the db try: cursor.execute( "SELECT 'public.clone_schema(text, text, public.cloneparms[])'::regprocedure" ) except ProgrammingError: # pragma: no cover _create_clone_schema_function() transaction.commit() try: with transaction.atomic(): cursor.callproc("clone_schema", [base_schema_name, new_schema_name, "DATA"]) cursor.close() if dry_run: raise DryRunException except DryRunException: cursor.close() def create_or_clone_schema(schema_name: str, sync_schema: bool = True, verbosity: int = 1) -> bool: """ Creates the schema `schema_name`. Optionally checks if the schema already exists before creating it. Returns `True` if the schema was created, `False` otherwise. """ check_schema_name(schema_name) if schema_exists(schema_name): return False clone_reference = get_clone_reference() if ( clone_reference and schema_exists(clone_reference) and not django_is_in_test_mode() ): # pragma: no cover clone_schema(clone_reference, schema_name) return True return create_schema(schema_name, sync_schema=sync_schema, verbosity=verbosity) django-pgschemas-1.0.1/docs/000077500000000000000000000000001470131117100156475ustar00rootroot00000000000000django-pgschemas-1.0.1/docs/advanced.md000066400000000000000000000221651470131117100177440ustar00rootroot00000000000000## Fast dynamic tenant creation Every time a instance of the tenant model is created, by default, the corresponding schema is created and migrations are applied automatically. Depending on the number of migrations you already have in place, or the amount of time these could take, or whether you need to pre-populate the newly created schema with fixtures, this process could take a considerable amount of time. If you need a faster creation of dynamic schemas, you can do so by provisioning a "reference" schema that can cloned into new schemas. ```python title="settings.py" hl_lines="10" TENANTS |= { "default": { "TENANT_MODEL": "tenants.Tenant", "APPS": [ "django.contrib.auth", "django.contrib.sessions", "customers", ], "URLCONF": "customers.urls", "CLONE_REFERENCE": "sample", } } ``` Once you have this in your settings, you need to prepare your reference schema with everything a newly created dynamic schema will need. The first step consists of creating and applying migrations to the reference schema. After that, you can run any command on it or even edit its tables via `shell`. ```bash python manage.py createrefschema python runschema loaddata customers.products -s sample python runschema shell -s sample ``` The `runschema` command is explained in [running management commands](#running-management-commands). You don't need any extra step. As soon as a reference schema is configured, the next time you create an instance of the tenant model, it will clone the reference schema instead of actually creating the schema and applying all migrations. !!! Note The reference schema looks like a dynamic tenant, but it is actually static. It is also non-routable by design. !!! Warning This package relies on [denishpatel/pg-clone-schema](https://github.com/denishpatel/pg-clone-schema/) for the schema cloning functionality. ## Fallback domains If there is only one domain available, and no possibility to use subdomain routing, the URLs for accessing your different tenants might look like this: | URL | Tenant | | ---------------------- | --------- | | `mydomain.com` | Main site | | `mydomain.com/tenant1` | Tenant 1 | | `mydomain.com/tenant2` | Tenant 2 | In this case, due to the order in which domains are tested, it is not possible to put `mydomain.com` as domain for the main tenant without blocking all dynamic schemas from getting routed. When `django_pgschemas.routing.middleware.DomainRoutingMiddleware` is checking which tenant to route from the incoming domain, it checks for static tenants first, then for dynamic tenants. If `mydomain.com` is used for the main tenant (which is static), then URLs like `mydomain.com/tenant1/some/url/` will match the main tenant always. For a case like this, we provide a setting called `FALLBACK_DOMAINS`. If no tenant is found for an incoming combination of domain and subfolder, then, static tenants are checked again for the fallback domains. Something like this would be the proper configuration for the present case: ```python title="settings.py" hl_lines="16 17" TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", "django_pgschemas", "tenants", ], }, "main": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "main", ], "DOMAINS": [], "FALLBACK_DOMAINS": ["mydomain.com"], "URLCONF": "main.urls", }, "default": { "TENANT_MODEL": "tenants.Tenant", "DOMAIN_MODEL": "tenants.Domain", "APPS": [ "django.contrib.auth", "django.contrib.sessions", "customers", ], "URLCONF": "customers.urls", } } ``` This example assumes that dynamic tenants will get their domains set to `mydomain.com` with a tenant specific subfolder, like `tenant1` or `tenant2`. Here, an incoming request for `mydomain.com/tenant1/some/url/` will fail for the main tenant, then match against an existing dynamic tenant. On the other hand, an incoming request for `mydomain.com/some/url/` will fail for all static tenants, then fail for all dynamic tenants, and will finally match against the fallback domains of the main tenant. ## Static tenants only It's also possible to have only static tenants and no dynamic tenants at all. For this, the default key must be omitted altogether: ```python title="settings.py" TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", "django_pgschemas", "tenants", ], }, "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "main", ], "DOMAINS": ["mydomain.com"], "URLCONF": "main.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "blog", ], "DOMAINS": ["blog.mydomain.com", "help.mydomain.com"], "URLCONF": "blog.urls", } } ``` In this case, no model is expected to inherit from `TenantModel` and `DomainModel`, and no clone reference schema can be created. ## Running management commands Since all management commands occur outside the request/response cycle, all commands from Django and any other third party apps are executed by default on the public schema. In order to work around this, we provide a `runschema` command that accepts any other command to be run on one or multiple schemas. A minimal synopsis of the `runschema` command is as follows: ```bash usage: manage.py runschema [-s SCHEMAS [SCHEMAS ...]] [-x EXCLUDED_SCHEMAS [EXCLUDED_SCHEMAS ...]] [-as] [-ss] [-ds] [-ts] [--parallel] [--no-create-schemas] [--noinput] command_name ``` The `-s --schema` argument accepts multiple inputs of different kinds: - The key of a static tenant or the `schema_name` of a dynamic tenant. - The prefix of any domain, as long as only one tenant is found. - The prefix of any `domain/folder` of a tenant, like `tenants.mydomain.com/tenant1`, as long as only one tenant is found. The arguments `-as`, `-ss`, `-ds` and `-ts` act as wildcard for selecting a class of schemas as follows: | Wildcard | Selected schemas | | -------- | ------------------------------------------------------------------------------- | | `-as` | All schemas | | `-ss` | Static schemas | | `-ds` | Dynamic schemas | | `-ts` | Tenant-like schemas: all dynamic schemas plus the reference schema if it exists | It's possible to exclude schemas via the `-x` argument. This argument accepts the same inputs as `--schema`. Excluded schemas will take precedence over included ones. At least one schema is mandatory. If it's not provided with the command, either explicitly or via wildcard params, it will be asked interactively, except when the option `--noinput` is passed, in which case the command will fail. If `--parallel` is passed, the command will be run asynchronously, spawning multiple threads controlled by the setting `PGSCHEMAS_PARALLEL_MAX_PROCESSES`. This setting defaults to `None`, in which case the number of CPUs will be used. By default, schemas that do not exist will be created (although migrations won't be applied). This can be bypassed by passing `--no-create-schemas`. !!! Tip When in doubt of which schemas will be selected from a combination of arguments, we provide the management command `whowill` that can be used to just display the selected schemas. ### Inheritable commands We also provide some base commands you can inherit, in order to mimic the behavior of `runschema`. By inheriting these you will get the arguments we discussed in the previous section. The base commands provide a `handle_schema` you must override in order to execute the actions you need on any given tenant. The base commands are: ```python title="django_pgschemas/management/commands/__init__.py" class SchemaCommand(WrappedSchemaOption, BaseCommand): def handle_schema(self, schema, *args, **options): """ Extensible method to perform some action in a schema. """ ... class StaticSchemaCommand(SchemaCommand): """ Management command that can only be run in static schemas. """ ... class DynamicSchemaCommand(SchemaCommand): """ Management command that can only be run in dynamic schemas. """ ... ``` !!! Warning Since these commands can work with the schemas of static and dynamic tenants, the parameter `schema` will be an instance of `django_pgschemas.schema.Schema`. Make sure to do the appropriate type checking before accessing the tenant members, as not always you will get an instance of the tenant model. django-pgschemas-1.0.1/docs/base.md000066400000000000000000000037361470131117100171140ustar00rootroot00000000000000## Schema Base representation of a Postgres schema. ```python class Schema: schema_name: str routing: RoutingInfo = None ``` Routing contains information on the routing method used (e.g. domain, session, header). It's filled automatically via middleware but may be missing in other contexts (e.g. management commands). ### Routing info Information on the routing method. ```python class DomainInfo: domain: str folder: str | None = None class SessionInfo: reference: str class HeadersInfo: reference: str RoutingInfo: TypeAlias = DomainInfo | SessionInfo | HeadersInfo | None ``` ## Tenant model Abstract base class for the tenant model. ```python class TenantModel(Schema, models.Model): auto_create_schema = True auto_drop_schema = False schema_name = models.CharField(max_length=63, unique=True) class Meta: abstract = True ``` `auto_create_schema` controls whether a schema is automatically created when a instance of the tenant model is created. `auto_drop_schema` controls whether the schema is automatically deleted when the instance is deleted. ## Domain model Abstract base class for the domain model. Optional when domain routing is not used. ```python class DomainModel(models.Model): tenant = models.ForeignKey( settings.TENANTS["default"]["TENANT_MODEL"], db_index=True, related_name="domains", on_delete=models.CASCADE, ) domain = models.CharField(max_length=253, db_index=True) folder = models.SlugField(max_length=253, blank=True, db_index=True) is_primary = models.BooleanField(default=True) redirect_to_primary = models.BooleanField(default=False) class Meta: abstract = True unique_together = ("domain", "folder") ``` There should only be one instance per tenant with `is_primary` set to `True`. If `redirect_to_primary` is `True` the routing middleware will perform a permanent redirect to whatever domain and folder is marked as primary. django-pgschemas-1.0.1/docs/basic.md000066400000000000000000000114121470131117100172510ustar00rootroot00000000000000This package requires: - Python (3.10+) - Django (5.0+) - Postgres (13+) - Any version of psycopg. ## Installation You can install `django-pgschemas` via `pip` or any other installer. ```bash pip install django-pgschemas ``` ## Database configuration Use `django_pgschemas.postgresql_backend` as your database engine. This enables the API for setting Postgres search path: ```python title="settings.py" DATABASES = { "default": { "ENGINE": "django_pgschemas.postgresql_backend", # more database configurations here } } ``` Add `django_pgschemas.routers.TenantAppsRouter` to your `DATABASE_ROUTERS`, so that the proper migrations can be applied, depending on the target schema. ```python title="settings.py" DATABASE_ROUTERS = ( "django_pgschemas.routers.TenantAppsRouter", # additional routers here if needed ) ``` Define your tenant model. ```python title="tenants/models.py" from django.db import models from django_pgschemas.models import TenantModel class Tenant(TenantModel): name = models.CharField(max_length=100) paid_until = models.DateField(blank=True, null=True) on_trial = models.BooleanField(default=True) created_on = models.DateField(auto_now_add=True) ``` Add the minimal tenant configuration. ```python title="settings.py" TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", "django_pgschemas", "tenants", ], }, "default": { "TENANT_MODEL": "tenants.Tenant", "APPS": [ "django.contrib.auth", "django.contrib.sessions", "customers", ], "URLCONF": "customers.urls", } } ``` Each entry in the `TENANTS` dictionary represents a static tenant, except for `default`, which controls the settings for all dynamic tenants. Notice how each tenant has the relevant `APPS` whose migrations will be applied in the corresponding schema. For Django to function properly, `INSTALLED_APPS` and `ROOT_URLCONF` settings must be defined. Just make them get their information from the `TENANTS` dictionary, for the sake of consistency. ```python title="settings.py" INSTALLED_APPS = [] for schema in TENANTS: INSTALLED_APPS += [ app for app in TENANTS[schema]["APPS"] if app not in INSTALLED_APPS ] ROOT_URLCONF = TENANTS["default"]["URLCONF"] ``` ## Creating tenants More static tenants can be added to the `TENANTS` dict. ```python title="settings.py" TENANTS |= { "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "main", ], "URLCONF": "main.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "blog", ], "URLCONF": "blog.urls", }, } ``` And dynamic tenants can be added as well, programatically. But first, you must always run migrations in the public schema in order to get the tenant model created. You can then migrate the rest of the schemas. ```bash python manage.py migrate -s public python manage.py migrate ``` Now you are ready to create your first dynamic tenant. In the example, the tenant is created through a `python manage.py shell` session. ```bash >>> from tenants.models import Tenant >>> Tenant.objects.create(schema_name="tenant_1") ``` This will automatically create a schema for the new dynamic tenant and apply migrations. ## Working with tenants Because static and dynamic tenants can have their own Django apps configured, only the models within those apps will be migrated into their respective schemas. Without activating any tenant, the `public` schema will be the only schema in the search path, and therefore only models from the apps in `TENANTS["public"]["APPS"]` will be accessible. For instance, after starting a new Django shell, querying the `Tenant` model will work, but querying models from other apps will raise a `ProgrammingError`: ```bash hl_lines="5 6" >>> from tenants.models import Tenant >>> from blog.models import BlogEntry >>> from customers.models import Product >>> Tenant.objects.all() >>> BlogEntry.objects.all() # ProgrammingError >>> Product.objects.all() # ProgrammingError ``` Before being able to operate in a tenant's schema, that tenant/schema must be activated first: ```bash hl_lines="1 5 8" >>> from django_pgschemas.schemas import Schema >>> from tenants.models import Tenant >>> from blog.models import BlogEntry >>> from customers.models import Product >>> with Schema.create("blog"): ... BlogEntry.objects.all() >>> tenant1 = Tenant.objects.first() >>> with tenant1: ... Product.objects.all() ``` Tenant activation happens automatically during the request/response cycle through [tenant routing](routing.md). django-pgschemas-1.0.1/docs/contrib.md000066400000000000000000000100641470131117100176320ustar00rootroot00000000000000All contributions and third party integrations live inside `django_pgschemas.contrib`. If you want to implement an integration with other Django packages, please submit a pull request containing: - The code for your integration. - The tests for your integration. - The docs for your integration in this section of the documentation. We're striving to maintain/increase our code coverage, but please, make sure your integration is properly tested. Proper tests will always beat meaningless 100% coverage. ## Caching In order to generate tenant aware cache keys, we provide `django_pgschemas.contrib.cache.make_key` which can be used as`KEY_FUNCTION`: ```python title="settings.py" CACHES = { "default": { "KEY_FUNCTION": "django_pgschemas.contrib.cache.make_key", } } ``` ## Tenant aware file system storage We provide a tenant aware file system storage at `django_pgschemas.contrib.storage.TenantFileSystemStorage`. It subclasses `django.core.files.storage.FileSystemStorage` and behaves like it in every aspect, except that it prepends a tenant identifier to the path and URL of all files. By default, the tenant identifier is the schema name of the current tenant. In order to override this behavior, it is possible to provide a different identifier. The storage will consider these options when looking for an identifier: - A method called `schema_pathname` in the current tenant. This method must accept no arguments and return an identifier. - A function specified in a setting called `PGSCHEMAS_PATHNAME_FUNCTION`. This function must accept a `Schema` and return an identifier. - Finally, the identifier will default to the schema name of the current tenant. In the case of the URL returned from the storage, if the storage detects that the current schema has been routed via subfolder, it won't prepend the schema identifier, because it considers that the path is properly disambiguated as is. This means that instead of something like: /tenant1/static/tenant1/path/to/file.txt It will generate: /tenant1/static/path/to/file.txt This storage class is a convenient way of storing media files in a folder structure organized at the top by tenants, as well as providing a tenant centric organization in the URLs that are generated. However, this storage class does NOT provide any form of security, such as controlling that from one tenant, files from another tenant are not accessible. Such security requirements have other implications that fall out of the scope of this package. !!! Tip In a project that requires airtight security you might want to use and customize [django-private-storage](https://github.com/edoburu/django-private-storage). ## Channels (websockets) We provide some tenant middleware and a tenant URL router for using with `channels`. You can use it as follows: ```python title="routing.py" hl_lines="12 14" from channels.routing import ProtocolTypeRouter from channels.auth import AuthMiddlewareStack from channels.security.websocket import AllowedHostsOriginValidator from django_pgschemas.contrib.channels import ( DomainRoutingMiddleware, TenantURLRouter ) application = ProtocolTypeRouter( { "websocket": AllowedHostsOriginValidator( DomainRoutingMiddleware( AuthMiddlewareStack( TenantURLRouter() ) ) ), } ) ``` ```python title="settings.py" ASGI_APPLICATION = "routing.application" ``` There is also the `HeadersRoutingMiddleware` for headers-based routing. The `TenantURLRouter` requires a urlconf for websockets: ```python title="settings.py" hl_lines="10" TENANTS |= { "default": { "TENANT_MODEL": "tenants.Tenant", "APPS": [ "django.contrib.auth", "django.contrib.sessions", "customers", ], "URLCONF": "customers.urls", "WS_URLCONF": "customers.ws_urls", } } ``` You still need to name your channel groups appropriately, taking the current tenant into account, if you want to keep your groups tenant-specific. The current tenant will be passed in `scope["tenant"]`. django-pgschemas-1.0.1/docs/credits.md000066400000000000000000000005331470131117100176270ustar00rootroot00000000000000# Credits This project stands on the shoulders of giants. Special thanks to: - Tom Turner for [django-tenants](https://github.com/tomturner/django-tenants). - Bernardo Pires for [django-tenant-schemas](https://github.com/bernardopires/django-tenant-schemas). - Denish Patel for [pg-clone-schema](https://github.com/denishpatel/pg-clone-schema) django-pgschemas-1.0.1/docs/index.md000066400000000000000000000043101470131117100172760ustar00rootroot00000000000000# django-pgschemas [![Packaging: poetry](https://img.shields.io/badge/packaging-poetry-purple.svg)](https://python-poetry.org/) [![Build status](https://github.com/lorinkoz/django-pgschemas/workflows/code/badge.svg)](https://github.com/lorinkoz/django-pgschemas/actions) [![Documentation status](https://readthedocs.org/projects/django-pgschemas/badge/?version=latest)](https://django-pgschemas.readthedocs.io/) [![Code coverage](https://coveralls.io/repos/github/lorinkoz/django-pgschemas/badge.svg?branch=master)](https://coveralls.io/github/lorinkoz/django-pgschemas?branch=master) [![PyPi version](https://badge.fury.io/py/django-pgschemas.svg)](http://badge.fury.io/py/django-pgschemas) [![Downloads](https://pepy.tech/badge/django-pgschemas/month)](https://pepy.tech/project/django-pgschemas/) --- This package uses Postgres schemas to support data multi-tenancy in a single Django project. Schemas are a layer of separation between databases and tables, so that one database can have multiple schemas, which in turn can have multiple (and possibly identical) tables. For an accurate description on schemas, see [the official documentation on Postgres schemas](http://www.postgresql.org/docs/9.1/static/ddl-schemas.html). Postgres uses a "search path" to denote in which schemas it should look for the appropriate tables. If there are three schemas: `tenant1`, `common` and `public` and the search path is set to `["tenant1", "public"]`, Postgres will look for tables first on schema `tenant1`, and then, if not found, will look on schema `public`. The tables on schema `common` would never be searched. Also, if there is a table with the same name on both `tenant1` and `public` schemas (i.e. `django_migrations`), only the table in `tenant1` will be found by that search path. Table creation always takes place on the first schema in the search path. `django-pgschemas`, as well as its predecessors `django-tenants` and `django-tenant-schemas`, takes advantage of Postgres schemas to emulate multi-tenancy, by mapping an incoming HTTP request to a specific schema, and setting the search path accordingly. It also provides an API to change the search path outside the request/response cycle, in order to perform schema-specific tasks. django-pgschemas-1.0.1/docs/overview.md000066400000000000000000000071071470131117100200440ustar00rootroot00000000000000There are typically three solutions for solving the multi-tenancy problem. 1. Isolated approach: Separate databases. Each tenant has its own database. 2. Semi-isolated approach: Shared database, separate schemas. One database for all tenants, but one schema per tenant. 3. Shared approach: Shared database, shared schema. All tenants share the same database and schema. There is a main tenant-table, where all other tables have a foreign key pointing to. Each solution has its up and down sides, for a more in-depth discussion, see Microsoft's excellent article on [Multi-Tenant Data Architecture](https://docs.microsoft.com/en-us/azure/sql-database/saas-tenancy-app-design-patterns). This package implements the second approach, which in our opinion, represents a good compromise between simplicity and performance. !!! Tip If you are looking for an implementation of the third approach, you might be interested in [django-multitenant](https://github.com/citusdata/django-multitenant). For other solutions of the multi-tenancy problem, you could also look [here](https://djangopackages.org/grids/g/multi-tenancy/). The semi-isolated approach through Postgres schemas has some advantages and disadvantages: - Simplicity: barely make any changes to your current code to support multi-tenancy. Plus, you only manage one database. - Performance: make use of shared connections, buffers and memory. vs. - Scalability: for a large number of tenants (thousands) the schema approach might not be feasible, and as of now, there is no clear way for implementing tenant sharding. ## Schemas vs. Tenants The terms _schema_ and _tenant_ are used indistinctly all over the documentation. However, it is important to note some subtle differences between the two. We consider a _tenant_ to be a subset of isolated data, and we use database _schemas_ for that purpose. Still, there can be schemas that cannot be considered tenants according to our definition. One good example is the `public` schema, which contains data shared across all tenants. Therefore every tenant is a schema, but not every schema is a tenant. ## Static vs. Dynamic In a typical software-as-a-service (SaaS), there can be a group of static sites that are related to enterprise level operations. For instance, a site where customers can enter payment information and sign up for a tenant, or an enterprise content management system. These sites are generally well defined at the time of developing the web application. On the other hand, there are the sites of the customers that will sign up of the SaaS. The specific information for these sites is dynamic in nature, because it cannot be determined at the time of developing the application. This package allows you to manage both static and dynamic tenants. Static tenants are defined through Django settings, whereas dynamic tenants are stored in specific tables in the database. ## Users and tenants One of the most important architectural decisions that you must make before implementing a SaaS is to define the relationship between users and tenants. There are three possible approaches: - Users exist outside of tenants and can be granted access to specific tenants: this means that the user registers once and can be assigned to different tenants with different permissions on those tenants. - Users are confined within a tenant: this means that users must be created inside a tenant and cannot be part of more than one tenant. If the same person needs to be member of multiple tenants, they need different users. - Users are tenants: a tenant can only have one user. This is a special case of the previous approach. django-pgschemas-1.0.1/docs/requirements.txt000066400000000000000000000000371470131117100211330ustar00rootroot00000000000000django psycopg mkdocs-material django-pgschemas-1.0.1/docs/routing.md000066400000000000000000000152771470131117100176740ustar00rootroot00000000000000Routing is the process of deciding to which tenant an incoming request belongs, and activating it for the rest of the request/response cycle. This is typically done via middleware and this package provides three routing mechanisms: domain, header and session routing. The goal of these middleware is to augment the `request` object with tenant information. When these middleware are used the `request` will contain a `tenant` property with an instance of either the tenant model or the class `django_pgschemas.schema.Schema`. ## Domain routing Tenants will have one or many domains (or subdomains), but each domain will correspond to only one tenant. In this mechanism we use a database table to control domains per tenant. This domain model can be defined like this: ```python title="tenants/models.py" from django_pgschemas.models import DomainModel class Domain(DomainModel): pass ``` And added to the tenant settings like this: ```python title="settings.py" hl_lines="16 25 30" TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", "django_pgschemas", "tenants", ], }, "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "main", ], "DOMAINS": ["mydomain.com"], "URLCONF": "main.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "blog", ], "DOMAINS": ["blog.mydomain.com", "help.mydomain.com"], "URLCONF": "blog.urls", }, "default": { "TENANT_MODEL": "tenants.Tenant", "DOMAIN_MODEL": "tenants.Domain", "APPS": [ "django.contrib.auth", "django.contrib.sessions", "customers", ], "URLCONF": "customers.urls", } } ``` Then you can assign domains to tenants: ```bash >>> from tenants.models import Tenant, Domain >>> tenant = Tenant.objects.create(schema_name="tenant_1") >>> Domain.objects.create(tenant=tenant, domain="tenant1.mydomain.com") ``` !!! Note Notice that the `public` schema doesn't have `DOMAINS` configured. This is intentional. Attempting to add this key would result in an `ImproperlyConfigured` error. The public schema is non-routable by design. Finally add `DomainRoutingMiddleware` to the top of the middleware stack, so that all subsequent middleware can benefit from the added tenant. ```python title="settings.py" MIDDLEWARE = ( "django_pgschemas.routing.middleware.DomainRoutingMiddleware", # other middleware ) ``` ### Subfolder routing It is also possible to use subfolder routing, instead of using domains/subdomains. In this case all tenants would share the same domain, but with a different "folder" component at the beginning of the requested path. The domain model supports this by default and allows for multiple combinations: ```bash >>> from tenants.models import Tenant, Domain >>> tenant = Tenant.objects.create(schema_name="tenant_1") >>> Domain.objects.create( ... tenant=tenant, ... domain="tenant1.mydomain.com", ... is_primary=True, ... ) >>> Domain.objects.create( ... tenant=tenant, ... domain="tenants.mydomain.com", ... folder="tenant1", ... ) ``` !!! Warning Subfolder routing is currently not supported for static tenants. For a special case with subfolder routing please see [fallback domains](advanced.md#fallback-domains). ## Header routing In this mechanism a request header is defined to pass the tenant database ID or the schema name. ```python title="settings.py" PGSCHEMAS_TENANT_HEADER = "tenant" ``` Static tenants can be routed using the `HEADER` key in the `TENANTS` settings: ```python title="settings.py" hl_lines="16 25" TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", "django_pgschemas", "tenants", ], }, "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "main", ], "HEADER": "main", "URLCONF": "main.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "blog", ], "HEADER": "blog", "URLCONF": "blog.urls", }, "default": { "TENANT_MODEL": "tenants.Tenant", "APPS": [ "django.contrib.auth", "django.contrib.sessions", "customers", ], "URLCONF": "customers.urls", } } ``` !!! Note Notice that the `public` schema doesn't have `HEADER` configured. This is intentional. Attempting to add this key would result in an `ImproperlyConfigured` error. The public schema is non-routable by design. Then add `HeadersRoutingMiddleware` to the top of the middleware stack. ```python title="settings.py" MIDDLEWARE = ( "django_pgschemas.routing.middleware.HeadersRoutingMiddleware", # other middleware ) ``` ## Session routing In this mechanism a session key is defined to store the tenant database ID or the schema name. ```python title="settings.py" PGSCHEMAS_TENANT_SESSION_KEY = "tenant" ``` Static tenants can be routed using the `SESSION_KEY` key in the `TENANTS` settings: ```python title="settings.py" hl_lines="16 25" TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", "django_pgschemas", "tenants", ], }, "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "main", ], "SESSION_KEY": "main", "URLCONF": "main.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "blog", ], "SESSION_KEY": "blog", "URLCONF": "blog.urls", }, "default": { "TENANT_MODEL": "tenants.Tenant", "APPS": [ "django.contrib.auth", "django.contrib.sessions", "customers", ], "URLCONF": "customers.urls", } } ``` !!! Note Notice that the `public` schema doesn't have `SESSION_KEY` configured. This is intentional. Attempting to add this key would result in an `ImproperlyConfigured` error. The public schema is non-routable by design. Then add `SessionRoutingMiddleware` to the top of the middleware stack, but after the session middleware. ```python title="settings.py" MIDDLEWARE = ( "django.contrib.sessions.middleware.SessionMiddleware", "django_pgschemas.routing.middleware.SessionRoutingMiddleware", # other middleware ) ``` django-pgschemas-1.0.1/docs/settings.md000066400000000000000000000044541470131117100200400ustar00rootroot00000000000000## `TENANTS` Default: `None` The tenant configuration dictionary as explained in the [basic configuration](basic.md#database-configuration). A sample tenant configuration is: ```python TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", "django_pgschemas", "tenants", ], }, "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "main", ], "URLCONF": "main.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", "blog", ], "URLCONF": "blog.urls", }, "default": { "TENANT_MODEL": "tenants.Tenant", "APPS": [ "django.contrib.auth", "django.contrib.sessions", "customers", ], "URLCONF": "customers.urls", "CLONE_REFERENCE": "sample", } } ``` ## `PGSCHEMAS_EXTRA_SEARCH_PATHS` Default: `[]` Other schemas to include in Postgres search path. You cannot include the schema for any static or dynamic tenant. The public schema is included by default, so, including it here will raise `ImproperlyConfigured`. ## `PGSCHEMAS_LIMIT_SET_CALLS` Default: `False` By default, the search path is set every time a database cursor is required. In some intense situations, this could ralentize the queries. Set to `True` to limit the number of calls for setting the search path. ## `PGSCHEMAS_ORIGINAL_BACKEND` Default: `"django.db.backends.postgresql"` The base backend to inherit from. If you have a customized backend of Postgres, you can specify it here. ## `PGSCHEMAS_PARALLEL_MAX_PROCESSES` Default: `None` When `--parallel` is passed in any tenant command, this setting will control the max number of processes the parallel executor can spawn. By default, `None` means that the number of CPUs will be used. ## `PGSCHEMAS_TENANT_DB_ALIAS` Default: `"default"` The database alias where the tenant configuration is going to take place. ## `PGSCHEMAS_PATHNAME_FUNCTION` Default: `None` Function that takes a schema descriptor and returns a string identifier for the schema. This identifier will be used in the `TenantFileSystemStorage` as the name of the tenant folder. django-pgschemas-1.0.1/docs/testing.md000066400000000000000000000032141470131117100176460ustar00rootroot00000000000000# Testing This is how you set up dynamic tenants for testing. ## Pytest You can define some tenants in the test configuration. ```python title="confest.py" import pytest from tenants.models import Tenant, Domain @pytest.fixture(scope="session", autouse=True) def setup(django_db_setup, django_db_blocker): with django_db_blocker.unblock(): tenant = Tenant.objects.get_or_create(schema_name="tenant1") Domain.objects.get_or_create( tenant=tenant, domain="tenant1.mydomain.com", is_primary=True, ) ``` And also provide them as fixtures: ```python title="confest.py" @pytest.fixture def tenant(db): return Tenant.objects.get(schema_name="tenant1") ``` If you want the tenant to be activated automatically in your test cases, you can so as follows. Using the tenants as context manager is useful in activating the tenant only in the scope of each test. ```python title="confest.py" @pytest.fixture def tenant(db): with (tenant := Tenant.objects.get(schema_name="tenant1")): yield tenant ``` You can also define a fixture for a client, including the necessary headers: ```python title="confest.py" from django.test import Client @pytest.fixture def domain_client(): return Client(headers={"host": "tenant1.mydomain.com"}) @pytest.fixture def header_client(): return Client(headers={"tenant": "tenant1"}) ``` ## Django test cases This package does not provide base clases to be used in place for Django's `TestCase`. If you need support in this regard, please visit the [dicussions section](https://github.com/lorinkoz/django-pgschemas/discussions) in the package's repository. django-pgschemas-1.0.1/docs/troubleshooting.md000066400000000000000000000065161470131117100214300ustar00rootroot00000000000000## Schema for tenant and domain models The application(s) that contain the tenant model and the domain model should be in the public schema only. Making those models available in other schemas will cause unpredictable problems. This package will raise an error check if the tenant / domain application is found missing in `settings.TENANTS["public"]["APPS"]` or present in other tenant configuration. !!! Tip You can silence this check through the code `pgschemas.W001`. ## Content types Installing `django.contrib.contenttypes` outside of the public schema can lead to problems when using other static or dynamic schemas. The recommended approach is to have this app in `settings.TENANTS["public"]["APPS"]`. This package will raise a warning check if the content types app is found somewhere else. !!! Tip You can silence this check through the code `pgschemas.W002`. ## Session leaking Configuring users in a multi-tenant application can be challenging, because the user model(s) can be installed on any schema. Depending on the scope of your desired authentication mechanism, you should decide whether the user app will leave in the public schema or in each of the other static or dynamic schemas. If you do the latter, consdier that the same user ID could be repeated in multiple schemas. User ID is what makes authentication possible via the sessions app. In order to prevent session leaking, the recommended approach is to always put the user app and the session app together. This package will raise a warning check if the user app and the session app are found to not be together in the same schemas. !!! Tip You can silence this check through the code `pgschemas.W003`. ## Moving apps between schemas Regardless of which apps you have included in each schema, migrations will be tracked as being run on all of them. If you move an app between schemas, the tables will not be created in the destination schema, because migrations are considered to be run there already. In order to overcome this, you must remove all migrations of said app via manage.py migrate app zero --fake -s and then run migrations again. In order to remove the tables from the source app, you will have to actually do a zero migrate before removing the app from the said schema apps. ## Name clash between static and dynamic schemas It is possible to define a static tenant whose name clashes with an existing dynamic tenant. This is especially true for the clone reference, which can be added as an afterthought in order to speed up dynamic tenant creation. It is also possible to create a dynamic tenant with a name already present in the static tenant configuration. We do not provide an out-of-the-box validation mechanism for dynamic tenants upon creation, as attempt to prevent name clashes with static tenants. However, we do provide a system check that fails with a critical error message if a name clash is found. Since this check must query the database in order to fetch the schema name for all dynamic tenants, it is tagged as a database check, which makes it run only in database related operations and management commands. This means that the check will not be run via `runserver`, but will be run in commands like `migrate`, `cloneschema` and `createrefschema`. If absolutely needed, !!! Tip you can silence this check through the code `pgschemas.W004`. django-pgschemas-1.0.1/mkdocs.yml000066400000000000000000000012101470131117100167140ustar00rootroot00000000000000site_name: django-pgschemas theme: name: material features: - content.code.copy markdown_extensions: - admonition - pymdownx.details - pymdownx.highlight: anchor_linenums: true line_spans: __span pygments_lang_class: true - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.superfences - toc: permalink: true nav: - Intro: index.md - Multi-tenancy: overview.md - Usage: basic.md - Routing: routing.md - Advanced: advanced.md - Contrib: contrib.md - Troubleshooting: troubleshooting.md - Testing: testing.md - Settings: settings.md - Base classes: base.md - Credits: credits.md django-pgschemas-1.0.1/poetry.lock000066400000000000000000004053351470131117100171250ustar00rootroot00000000000000# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "asgiref" version = "3.8.1" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.8" files = [ {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, ] [package.dependencies] typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "attrs" version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "autobahn" version = "24.4.2" description = "WebSocket client & server library, WAMP real-time framework" optional = false python-versions = ">=3.9" files = [ {file = "autobahn-24.4.2-py2.py3-none-any.whl", hash = "sha256:c56a2abe7ac78abbfb778c02892d673a4de58fd004d088cd7ab297db25918e81"}, {file = "autobahn-24.4.2.tar.gz", hash = "sha256:a2d71ef1b0cf780b6d11f8b205fd2c7749765e65795f2ea7d823796642ee92c9"}, ] [package.dependencies] cryptography = ">=3.4.6" hyperlink = ">=21.0.0" setuptools = "*" txaio = ">=21.2.1" [package.extras] all = ["PyGObject (>=3.40.0)", "argon2-cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "flatbuffers (>=22.12.6)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2)", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "rlp (>=2.0.1)", "service-identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "twisted (>=24.3.0)", "u-msgpack-python (>=2.1)", "ujson (>=4.0.2)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] compress = ["python-snappy (>=0.6.0)"] dev = ["backports.tempfile (>=1.0)", "build (>=1.2.1)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (<5)", "humanize (>=0.5.1)", "mypy (>=0.610)", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx-rtd-theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=4.2.8)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=22.10.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] encryption = ["pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service-identity (>=18.1.0)"] nvx = ["cffi (>=1.14.5)"] scram = ["argon2-cffi (>=20.1.0)", "cffi (>=1.14.5)", "passlib (>=1.7.4)"] serialization = ["cbor2 (>=5.2.0)", "flatbuffers (>=22.12.6)", "msgpack (>=1.0.2)", "py-ubjson (>=0.16.1)", "u-msgpack-python (>=2.1)", "ujson (>=4.0.2)"] twisted = ["attrs (>=20.3.0)", "twisted (>=24.3.0)", "zope.interface (>=5.2.0)"] ui = ["PyGObject (>=3.40.0)"] xbr = ["base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "rlp (>=2.0.1)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)"] [[package]] name = "automat" version = "24.8.1" description = "Self-service finite-state machines for the programmer on the go." optional = false python-versions = ">=3.8" files = [ {file = "Automat-24.8.1-py3-none-any.whl", hash = "sha256:bf029a7bc3da1e2c24da2343e7598affaa9f10bf0ab63ff808566ce90551e02a"}, {file = "automat-24.8.1.tar.gz", hash = "sha256:b34227cf63f6325b8ad2399ede780675083e439b20c323d376373d8ee6306d88"}, ] [package.extras] visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] [[package]] name = "babel" version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "certifi" version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] pycparser = "*" [[package]] name = "channels" version = "4.1.0" description = "Brings async, event-driven capabilities to Django 3.2 and up." optional = false python-versions = ">=3.8" files = [ {file = "channels-4.1.0-py3-none-any.whl", hash = "sha256:a3c4419307f582c3f71d67bfb6eff748ae819c2f360b9b141694d84f242baa48"}, {file = "channels-4.1.0.tar.gz", hash = "sha256:e0ed375719f5c1851861f05ed4ce78b0166f9245ca0ecd836cb77d4bb531489d"}, ] [package.dependencies] asgiref = ">=3.6.0,<4" daphne = {version = ">=4.0.0", optional = true, markers = "extra == \"daphne\""} Django = ">=4.2" [package.extras] daphne = ["daphne (>=4.0.0)"] tests = ["async-timeout", "coverage (>=4.5,<5.0)", "pytest", "pytest-asyncio", "pytest-django"] [[package]] name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "constantly" version = "23.10.4" description = "Symbolic constants in Python" optional = false python-versions = ">=3.8" files = [ {file = "constantly-23.10.4-py3-none-any.whl", hash = "sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9"}, {file = "constantly-23.10.4.tar.gz", hash = "sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd"}, ] [[package]] name = "coverage" version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "cryptography" version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "daphne" version = "4.1.2" description = "Django ASGI (HTTP/WebSocket) server" optional = false python-versions = ">=3.8" files = [ {file = "daphne-4.1.2-py3-none-any.whl", hash = "sha256:618d1322bb4d875342b99dd2a10da2d9aae7ee3645f765965fdc1e658ea5290a"}, {file = "daphne-4.1.2.tar.gz", hash = "sha256:fcbcace38eb86624ae247c7ffdc8ac12f155d7d19eafac4247381896d6f33761"}, ] [package.dependencies] asgiref = ">=3.5.2,<4" autobahn = ">=22.4.2" twisted = {version = ">=22.4", extras = ["tls"]} [package.extras] tests = ["django", "hypothesis", "pytest", "pytest-asyncio"] [[package]] name = "django" version = "5.1.1" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" files = [ {file = "Django-5.1.1-py3-none-any.whl", hash = "sha256:71603f27dac22a6533fb38d83072eea9ddb4017fead6f67f2562a40402d61c3f"}, {file = "Django-5.1.1.tar.gz", hash = "sha256:021ffb7fdab3d2d388bc8c7c2434eb9c1f6f4d09e6119010bbb1694dda286bc2"}, ] [package.dependencies] asgiref = ">=3.8.1,<4" sqlparse = ">=0.3.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." optional = false python-versions = "*" files = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, ] [package.dependencies] python-dateutil = ">=2.8.1" [package.extras] dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "hyperlink" version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, ] [package.dependencies] idna = ">=2.5" [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "incremental" version = "24.7.2" description = "A small library that versions your Python projects." optional = false python-versions = ">=3.8" files = [ {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, ] [package.dependencies] setuptools = ">=61.0" tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] scripts = ["click (>=6.0)"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "markdown" version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.extras] docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." optional = false python-versions = ">=3.6" files = [ {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, ] [[package]] name = "mkdocs" version = "1.6.1" description = "Project documentation with Markdown." optional = false python-versions = ">=3.8" files = [ {file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"}, {file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"}, ] [package.dependencies] click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" jinja2 = ">=2.11.1" markdown = ">=3.3.6" markupsafe = ">=2.0.1" mergedeep = ">=1.3.4" mkdocs-get-deps = ">=0.2.0" packaging = ">=20.5" pathspec = ">=0.11.1" pyyaml = ">=5.1" pyyaml-env-tag = ">=0.1" watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] [[package]] name = "mkdocs-get-deps" version = "0.2.0" description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" optional = false python-versions = ">=3.8" files = [ {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, ] [package.dependencies] mergedeep = ">=1.3.4" platformdirs = ">=2.2.0" pyyaml = ">=5.1" [[package]] name = "mkdocs-material" version = "9.5.39" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ {file = "mkdocs_material-9.5.39-py3-none-any.whl", hash = "sha256:0f2f68c8db89523cb4a59705cd01b4acd62b2f71218ccb67e1e004e560410d2b"}, {file = "mkdocs_material-9.5.39.tar.gz", hash = "sha256:25faa06142afa38549d2b781d475a86fb61de93189f532b88e69bf11e5e5c3be"}, ] [package.dependencies] babel = ">=2.10,<3.0" colorama = ">=0.4,<1.0" jinja2 = ">=3.0,<4.0" markdown = ">=3.2,<4.0" mkdocs = ">=1.6,<2.0" mkdocs-material-extensions = ">=1.3,<2.0" paginate = ">=0.5,<1.0" pygments = ">=2.16,<3.0" pymdown-extensions = ">=10.2,<11.0" regex = ">=2022.4" requests = ">=2.26,<3.0" [package.extras] git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] name = "mkdocs-material-extensions" version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." optional = false python-versions = ">=3.8" files = [ {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, ] [[package]] name = "mypy" version = "1.11.2" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" version = "24.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "paginate" version = "0.5.7" description = "Divides large result sets into pages for easier browsing" optional = false python-versions = "*" files = [ {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, ] [package.extras] dev = ["pytest", "tox"] lint = ["black"] [[package]] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "pyasn1" version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymdown-extensions" version = "10.11.2" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ {file = "pymdown_extensions-10.11.2-py3-none-any.whl", hash = "sha256:41cdde0a77290e480cf53892f5c5e50921a7ee3e5cd60ba91bf19837b33badcf"}, {file = "pymdown_extensions-10.11.2.tar.gz", hash = "sha256:bc8847ecc9e784a098efd35e20cba772bc5a1b529dfcef9dc1972db9021a1049"}, ] [package.dependencies] markdown = ">=3.6" pyyaml = "*" [package.extras] extra = ["pygments (>=2.12)"] [[package]] name = "pyopenssl" version = "24.2.1" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, ] [package.dependencies] cryptography = ">=41.0.5,<44" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pytest" version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" version = "0.24.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, ] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-django" version = "4.9.0" description = "A Django plugin for pytest." optional = false python-versions = ">=3.8" files = [ {file = "pytest_django-4.9.0-py3-none-any.whl", hash = "sha256:1d83692cb39188682dbb419ff0393867e9904094a549a7d38a3154d5731b2b99"}, {file = "pytest_django-4.9.0.tar.gz", hash = "sha256:8bf7bc358c9ae6f6fc51b6cebb190fe20212196e6807121f11bd6a3b03428314"}, ] [package.dependencies] pytest = ">=7.0.0" [package.extras] docs = ["sphinx", "sphinx-rtd-theme"] testing = ["Django", "django-configurations (>=2.0)"] [[package]] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" [[package]] name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " optional = false python-versions = ">=3.6" files = [ {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] [package.dependencies] pyyaml = "*" [[package]] name = "regex" version = "2024.9.11" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"}, {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"}, {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"}, {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"}, {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"}, {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"}, {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"}, {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"}, {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"}, {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"}, {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"}, {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"}, {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"}, {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"}, {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"}, {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"}, {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"}, {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"}, {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"}, {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"}, {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"}, {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"}, {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"}, {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"}, {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"}, {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"}, {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"}, {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"}, {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"}, {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"}, {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"}, {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"}, {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"}, {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"}, {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"}, {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"}, {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"}, {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"}, {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"}, {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"}, {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"}, {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"}, {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"}, {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"}, {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"}, {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"}, {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"}, {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"}, {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"}, {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"}, {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"}, {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"}, {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"}, {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"}, {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"}, {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"}, {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"}, {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"}, {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"}, {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"}, {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"}, {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"}, {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"}, {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"}, {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"}, {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"}, {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"}, {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"}, {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"}, {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"}, {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"}, {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"}, {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"}, {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"}, {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"}, {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"}, {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"}, {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"}, {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"}, {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"}, {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"}, {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"}, {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"}, {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"}, {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"}, {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"}, {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"}, {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"}, {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"}, {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"}, {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"}, {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, ] [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" version = "0.6.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"}, {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"}, {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"}, {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"}, {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"}, {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"}, {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"}, {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"}, {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"}, {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"}, {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"}, {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, ] [[package]] name = "service-identity" version = "24.1.0" description = "Service identity verification for pyOpenSSL & cryptography." optional = false python-versions = ">=3.8" files = [ {file = "service_identity-24.1.0-py3-none-any.whl", hash = "sha256:a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a"}, {file = "service_identity-24.1.0.tar.gz", hash = "sha256:6829c9d62fb832c2e1c435629b0a8c476e1929881f28bee4d20bc24161009221"}, ] [package.dependencies] attrs = ">=19.1.0" cryptography = "*" pyasn1 = "*" pyasn1-modules = "*" [package.extras] dev = ["pyopenssl", "service-identity[idna,mypy,tests]"] docs = ["furo", "myst-parser", "pyopenssl", "sphinx", "sphinx-notfound-page"] idna = ["idna"] mypy = ["idna", "mypy", "types-pyopenssl"] tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] name = "sqlparse" version = "0.5.1" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" files = [ {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, ] [package.extras] dev = ["build", "hatch"] doc = ["sphinx"] [[package]] name = "tomli" version = "2.0.2" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] name = "twisted" version = "24.7.0" description = "An asynchronous networking framework written in Python" optional = false python-versions = ">=3.8.0" files = [ {file = "twisted-24.7.0-py3-none-any.whl", hash = "sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81"}, {file = "twisted-24.7.0.tar.gz", hash = "sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394"}, ] [package.dependencies] attrs = ">=21.3.0" automat = ">=0.8.0" constantly = ">=15.1" hyperlink = ">=17.1.1" idna = {version = ">=2.4", optional = true, markers = "extra == \"tls\""} incremental = ">=24.7.0" pyopenssl = {version = ">=21.0.0", optional = true, markers = "extra == \"tls\""} service-identity = {version = ">=18.1.0", optional = true, markers = "extra == \"tls\""} typing-extensions = ">=4.2.0" zope-interface = ">=5" [package.extras] all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)"] dev = ["coverage (>=7.5,<8.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "python-subunit (>=1.4,<2.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)"] dev-release = ["pydoctor (>=23.9.0,<23.10.0)", "pydoctor (>=23.9.0,<23.10.0)", "sphinx (>=6,<7)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "towncrier (>=23.6,<24.0)"] gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] test = ["cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] [[package]] name = "txaio" version = "23.1.1" description = "Compatibility API between asyncio/Twisted/Trollius" optional = false python-versions = ">=3.7" files = [ {file = "txaio-23.1.1-py2.py3-none-any.whl", hash = "sha256:aaea42f8aad50e0ecfb976130ada140797e9dcb85fad2cf72b0f37f8cefcb490"}, {file = "txaio-23.1.1.tar.gz", hash = "sha256:f9a9216e976e5e3246dfd112ad7ad55ca915606b60b84a757ac769bd404ff704"}, ] [package.extras] all = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] dev = ["pep8 (>=1.6.2)", "pyenchant (>=1.6.6)", "pytest (>=2.6.4)", "pytest-cov (>=1.8.1)", "sphinx (>=1.2.3)", "sphinx-rtd-theme (>=0.1.9)", "sphinxcontrib-spelling (>=2.1.2)", "tox (>=2.1.1)", "tox-gh-actions (>=2.2.0)", "twine (>=1.6.5)", "wheel"] twisted = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "tzdata" version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" version = "5.0.3" description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" files = [ {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:85527b882f3facda0579bce9d743ff7f10c3e1e0db0a0d0e28170a7d0e5ce2ea"}, {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53adf73dcdc0ef04f7735066b4a57a4cd3e49ef135daae41d77395f0b5b692cb"}, {file = "watchdog-5.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e25adddab85f674acac303cf1f5835951345a56c5f7f582987d266679979c75b"}, {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f01f4a3565a387080dc49bdd1fefe4ecc77f894991b88ef927edbfa45eb10818"}, {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91b522adc25614cdeaf91f7897800b82c13b4b8ac68a42ca959f992f6990c490"}, {file = "watchdog-5.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d52db5beb5e476e6853da2e2d24dbbbed6797b449c8bf7ea118a4ee0d2c9040e"}, {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:94d11b07c64f63f49876e0ab8042ae034674c8653bfcdaa8c4b32e71cfff87e8"}, {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:349c9488e1d85d0a58e8cb14222d2c51cbc801ce11ac3936ab4c3af986536926"}, {file = "watchdog-5.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:53a3f10b62c2d569e260f96e8d966463dec1a50fa4f1b22aec69e3f91025060e"}, {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:950f531ec6e03696a2414b6308f5c6ff9dab7821a768c9d5788b1314e9a46ca7"}, {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6deb336cba5d71476caa029ceb6e88047fc1dc74b62b7c4012639c0b563906"}, {file = "watchdog-5.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1021223c08ba8d2d38d71ec1704496471ffd7be42cfb26b87cd5059323a389a1"}, {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:752fb40efc7cc8d88ebc332b8f4bcbe2b5cc7e881bccfeb8e25054c00c994ee3"}, {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2e8f3f955d68471fa37b0e3add18500790d129cc7efe89971b8a4cc6fdeb0b2"}, {file = "watchdog-5.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b8ca4d854adcf480bdfd80f46fdd6fb49f91dd020ae11c89b3a79e19454ec627"}, {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:90a67d7857adb1d985aca232cc9905dd5bc4803ed85cfcdcfcf707e52049eda7"}, {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:720ef9d3a4f9ca575a780af283c8fd3a0674b307651c1976714745090da5a9e8"}, {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:223160bb359281bb8e31c8f1068bf71a6b16a8ad3d9524ca6f523ac666bb6a1e"}, {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:560135542c91eaa74247a2e8430cf83c4342b29e8ad4f520ae14f0c8a19cfb5b"}, {file = "watchdog-5.0.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dd021efa85970bd4824acacbb922066159d0f9e546389a4743d56919b6758b91"}, {file = "watchdog-5.0.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:78864cc8f23dbee55be34cc1494632a7ba30263951b5b2e8fc8286b95845f82c"}, {file = "watchdog-5.0.3-py3-none-manylinux2014_i686.whl", hash = "sha256:1e9679245e3ea6498494b3028b90c7b25dbb2abe65c7d07423ecfc2d6218ff7c"}, {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:9413384f26b5d050b6978e6fcd0c1e7f0539be7a4f1a885061473c5deaa57221"}, {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:294b7a598974b8e2c6123d19ef15de9abcd282b0fbbdbc4d23dfa812959a9e05"}, {file = "watchdog-5.0.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:26dd201857d702bdf9d78c273cafcab5871dd29343748524695cecffa44a8d97"}, {file = "watchdog-5.0.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:0f9332243355643d567697c3e3fa07330a1d1abf981611654a1f2bf2175612b7"}, {file = "watchdog-5.0.3-py3-none-win32.whl", hash = "sha256:c66f80ee5b602a9c7ab66e3c9f36026590a0902db3aea414d59a2f55188c1f49"}, {file = "watchdog-5.0.3-py3-none-win_amd64.whl", hash = "sha256:f00b4cf737f568be9665563347a910f8bdc76f88c2970121c86243c8cfdf90e9"}, {file = "watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45"}, {file = "watchdog-5.0.3.tar.gz", hash = "sha256:108f42a7f0345042a854d4d0ad0834b741d421330d5f575b81cb27b883500176"}, ] [package.extras] watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "zope-interface" version = "7.0.3" description = "Interfaces for Python" optional = false python-versions = ">=3.8" files = [ {file = "zope.interface-7.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b9369671a20b8d039b8e5a1a33abd12e089e319a3383b4cc0bf5c67bd05fe7b"}, {file = "zope.interface-7.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db6237e8fa91ea4f34d7e2d16d74741187e9105a63bbb5686c61fea04cdbacca"}, {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53d678bb1c3b784edbfb0adeebfeea6bf479f54da082854406a8f295d36f8386"}, {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3aa8fcbb0d3c2be1bfd013a0f0acd636f6ed570c287743ae2bbd467ee967154d"}, {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6195c3c03fef9f87c0dbee0b3b6451df6e056322463cf35bca9a088e564a3c58"}, {file = "zope.interface-7.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:11fa1382c3efb34abf16becff8cb214b0b2e3144057c90611621f2d186b7e1b7"}, {file = "zope.interface-7.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af94e429f9d57b36e71ef4e6865182090648aada0cb2d397ae2b3f7fc478493a"}, {file = "zope.interface-7.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dd647fcd765030638577fe6984284e0ebba1a1008244c8a38824be096e37fe3"}, {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bee1b722077d08721005e8da493ef3adf0b7908e0cd85cc7dc836ac117d6f32"}, {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2545d6d7aac425d528cd9bf0d9e55fcd47ab7fd15f41a64b1c4bf4c6b24946dc"}, {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d04b11ea47c9c369d66340dbe51e9031df2a0de97d68f442305ed7625ad6493"}, {file = "zope.interface-7.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:064ade95cb54c840647205987c7b557f75d2b2f7d1a84bfab4cf81822ef6e7d1"}, {file = "zope.interface-7.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3fcdc76d0cde1c09c37b7c6b0f8beba2d857d8417b055d4f47df9c34ec518bdd"}, {file = "zope.interface-7.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3d4b91821305c8d8f6e6207639abcbdaf186db682e521af7855d0bea3047c8ca"}, {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35062d93bc49bd9b191331c897a96155ffdad10744ab812485b6bad5b588d7e4"}, {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c96b3e6b0d4f6ddfec4e947130ec30bd2c7b19db6aa633777e46c8eecf1d6afd"}, {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0c151a6c204f3830237c59ee4770cc346868a7a1af6925e5e38650141a7f05"}, {file = "zope.interface-7.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:3de1d553ce72868b77a7e9d598c9bff6d3816ad2b4cc81c04f9d8914603814f3"}, {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab985c566a99cc5f73bc2741d93f1ed24a2cc9da3890144d37b9582965aff996"}, {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d976fa7b5faf5396eb18ce6c132c98e05504b52b60784e3401f4ef0b2e66709b"}, {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a207c6b2c58def5011768140861a73f5240f4f39800625072ba84e76c9da0b"}, {file = "zope.interface-7.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:382d31d1e68877061daaa6499468e9eb38eb7625d4369b1615ac08d3860fe896"}, {file = "zope.interface-7.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c4316a30e216f51acbd9fb318aa5af2e362b716596d82cbb92f9101c8f8d2e7"}, {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e6e58078ad2799130c14a1d34ec89044ada0e1495329d72ee0407b9ae5100d"}, {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799ef7a444aebbad5a145c3b34bff012b54453cddbde3332d47ca07225792ea4"}, {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3b7ce6d46fb0e60897d62d1ff370790ce50a57d40a651db91a3dde74f73b738"}, {file = "zope.interface-7.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:f418c88f09c3ba159b95a9d1cfcdbe58f208443abb1f3109f4b9b12fd60b187c"}, {file = "zope.interface-7.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:84f8794bd59ca7d09d8fce43ae1b571be22f52748169d01a13d3ece8394d8b5b"}, {file = "zope.interface-7.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d92920416f31786bc1b2f34cc4fc4263a35a407425319572cbf96b51e835cd3"}, {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e5913ec718010dc0e7c215d79a9683b4990e7026828eedfda5268e74e73e11"}, {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eeeb92cb7d95c45e726e3c1afe7707919370addae7ed14f614e22217a536958"}, {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd32f30f40bfd8511b17666895831a51b532e93fc106bfa97f366589d3e4e0e"}, {file = "zope.interface-7.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5112c530fa8aa2108a3196b9c2f078f5738c1c37cfc716970edc0df0414acda8"}, {file = "zope.interface-7.0.3.tar.gz", hash = "sha256:cd2690d4b08ec9eaf47a85914fe513062b20da78d10d6d789a792c0b20307fb1"}, ] [package.dependencies] setuptools = "*" [package.extras] docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" content-hash = "a5f4b61c26d6965314d1a993370f37024f2843232a3e32565f70a1f671f29645" django-pgschemas-1.0.1/py.typed000066400000000000000000000000001470131117100164040ustar00rootroot00000000000000django-pgschemas-1.0.1/pyproject.toml000066400000000000000000000033231470131117100176340ustar00rootroot00000000000000[tool.poetry] name = "django-pgschemas" version = "1.0.1" description = "Django multi-tenancy through Postgres schemas" license = "MIT" authors = ["Lorenzo Peña "] readme = "README.md" repository = "https://github.com/lorinkoz/django-pgschemas" documentation = "https://django-pgschemas.readthedocs.io/" keywords = ["django", "tenants", "schemas", "multi-tenancy", "postgres"] classifiers = [ "Development Status :: 4 - Beta", # "Development Status :: 5 - Production/Stable", "Framework :: Django", "Framework :: Django :: 5.0", "Framework :: Django :: 5.1", ] [tool.poetry.dependencies] python = "^3.10" django = "^5.0" [tool.poetry.group.dev.dependencies] mypy = "*" pytest = "*" pytest-cov = "*" pytest-django = "*" pytest-asyncio = "*" ruff = "*" channels = { extras = ["daphne"], version = "*" } mkdocs-material = "*" [tool.ruff] select = ["I", "E", "F"] line-length = 100 ignore = [ "E501", # line-too-long "W191", # tab-indentation ] [tool.ruff.isort] combine-as-imports = true known-first-party = ["django_pgschemas"] [tool.mypy] strict_optional = true disallow_untyped_defs = true check_untyped_defs = true warn_unused_ignores = true [[tool.mypy.overrides]] module = ["channels.*", "psycopg.*", "psycopg2.*", "django.*"] ignore_missing_imports = true [[tool.mypy.overrides]] module = [ "docs.*", "sandbox.*", "django_pgschemas.test.*", "django_pgschemas.contrib.*", "django_pgschemas.management.*", "django_pgschemas.postgresql.*", ] ignore_errors = true [tool.pytest.ini_options] DJANGO_SETTINGS_MODULE = "sandbox.settings" django_find_project = false markers = ["bug"] [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" django-pgschemas-1.0.1/sandbox/000077500000000000000000000000001470131117100163555ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/__init__.py000066400000000000000000000000001470131117100204540ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_blog/000077500000000000000000000000001470131117100201405ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_blog/__init__.py000066400000000000000000000000001470131117100222370ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_blog/migrations/000077500000000000000000000000001470131117100223145ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_blog/migrations/0001_initial.py000066400000000000000000000017011470131117100247560ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 04:11 import django.db.models.deletion from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name="BlogEntry", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ( "user", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="blogs", to=settings.AUTH_USER_MODEL, ), ), ], ), ] django-pgschemas-1.0.1/sandbox/app_blog/migrations/__init__.py000066400000000000000000000000001470131117100244130ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_blog/models.py000066400000000000000000000003141470131117100217730ustar00rootroot00000000000000from django.contrib.auth import get_user_model from django.db import models class BlogEntry(models.Model): user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE, related_name="blogs") django-pgschemas-1.0.1/sandbox/app_blog/urls.py000066400000000000000000000003611470131117100214770ustar00rootroot00000000000000from django.contrib import admin from django.urls import path from sandbox.views import generic urlpatterns = [ path("", generic, name="blog-home"), path("entries/", generic, name="entries"), path("admin/", admin.site.urls), ] django-pgschemas-1.0.1/sandbox/app_main/000077500000000000000000000000001470131117100201415ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_main/__init__.py000066400000000000000000000000001470131117100222400ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_main/migrations/000077500000000000000000000000001470131117100223155ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_main/migrations/0001_initial.py000066400000000000000000000010171470131117100247570ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 04:41 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="MainData", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ], ), ] django-pgschemas-1.0.1/sandbox/app_main/migrations/__init__.py000066400000000000000000000000001470131117100244140ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_main/models.py000066400000000000000000000001061470131117100217730ustar00rootroot00000000000000from django.db import models class MainData(models.Model): pass django-pgschemas-1.0.1/sandbox/app_main/urls.py000066400000000000000000000003631470131117100215020ustar00rootroot00000000000000from django.contrib import admin from django.urls import path from sandbox.views import generic urlpatterns = [ path("", generic, name="main-home"), path("register/", generic, name="register"), path("admin/", admin.site.urls), ] django-pgschemas-1.0.1/sandbox/app_main/ws_urls.py000066400000000000000000000002321470131117100222060ustar00rootroot00000000000000from django.urls import path from sandbox.consumers import EchoConsumer urlpatterns = [ path("ws/main/", EchoConsumer.as_asgi(), name="main-ws"), ] django-pgschemas-1.0.1/sandbox/app_tenants/000077500000000000000000000000001470131117100206715ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_tenants/__init__.py000066400000000000000000000000001470131117100227700ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_tenants/migrations/000077500000000000000000000000001470131117100230455ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_tenants/migrations/0001_initial.py000066400000000000000000000024471470131117100255170ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 03:07 import django.db.models.deletion from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ("shared_public", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name="TenantData", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ( "catalog", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="tenant_objects", to="shared_public.Catalog", ), ), ( "user", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="tenant_objects", to=settings.AUTH_USER_MODEL, ), ), ], ) ] django-pgschemas-1.0.1/sandbox/app_tenants/migrations/0002_tenantdata_active.py000066400000000000000000000005761470131117100275460ustar00rootroot00000000000000# Generated by Django 3.0.5 on 2021-02-22 21:07 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("app_tenants", "0001_initial"), ] operations = [ migrations.AddField( model_name="tenantdata", name="active", field=models.BooleanField(default=True), ), ] django-pgschemas-1.0.1/sandbox/app_tenants/migrations/__init__.py000066400000000000000000000000001470131117100251440ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/app_tenants/models.py000066400000000000000000000006241470131117100225300ustar00rootroot00000000000000from django.contrib.auth import get_user_model from django.db import models class TenantData(models.Model): catalog = models.ForeignKey( "shared_public.Catalog", on_delete=models.CASCADE, related_name="tenant_objects" ) user = models.ForeignKey( get_user_model(), on_delete=models.CASCADE, related_name="tenant_objects" ) active = models.BooleanField(default=True) django-pgschemas-1.0.1/sandbox/app_tenants/urls.py000066400000000000000000000006511470131117100222320ustar00rootroot00000000000000from django.contrib import admin from django.contrib.auth.decorators import login_required from django.urls import path from sandbox.views import generic urlpatterns = [ path("", generic, name="tenant-home"), path("profile/", generic, name="profile"), path("profile/advanced/", login_required(generic), name="advanced-profile"), path("login/", generic, name="login"), path("admin/", admin.site.urls), ] django-pgschemas-1.0.1/sandbox/app_tenants/ws_urls.py000066400000000000000000000002361470131117100227420ustar00rootroot00000000000000from django.urls import path from sandbox.consumers import EchoConsumer urlpatterns = [ path("ws/tenant/", EchoConsumer.as_asgi(), name="tenant-ws"), ] django-pgschemas-1.0.1/sandbox/asgi.py000066400000000000000000000002501470131117100176470ustar00rootroot00000000000000import os from django.core.asgi import get_asgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sandbox.settings") application = get_asgi_application() django-pgschemas-1.0.1/sandbox/consumers.py000066400000000000000000000007541470131117100207530ustar00rootroot00000000000000import json from channels.generic.websocket import AsyncWebsocketConsumer class EchoConsumer(AsyncWebsocketConsumer): async def connect(self): await self.accept() async def disconnect(self, close_code): pass async def receive(self, text_data): data = json.loads(text_data) message = data["message"] schema_name = self.scope["tenant"].schema_name await self.send(text_data=json.dumps({"message": f"{schema_name}: {message}"})) django-pgschemas-1.0.1/sandbox/manage.py000077500000000000000000000010031470131117100201540ustar00rootroot00000000000000#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") try: from django.core.management import execute_from_command_line except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) execute_from_command_line(sys.argv) django-pgschemas-1.0.1/sandbox/routing.py000066400000000000000000000012151470131117100204150ustar00rootroot00000000000000from channels.auth import AuthMiddlewareStack from channels.routing import ProtocolTypeRouter from django_pgschemas.contrib.channels import ( DomainRoutingMiddleware, HeadersRoutingMiddleware, TenantURLRouter, ) domain_application = ProtocolTypeRouter( { "websocket": DomainRoutingMiddleware( AuthMiddlewareStack( TenantURLRouter(), ), ), } ) headers_application = ProtocolTypeRouter( { "websocket": HeadersRoutingMiddleware( AuthMiddlewareStack( TenantURLRouter(), ), ), } ) application = domain_application django-pgschemas-1.0.1/sandbox/settings.py000066400000000000000000000116241470131117100205730ustar00rootroot00000000000000""" Django settings for sandbox project. Generated by 'django-admin startproject' using Django 2.1.4. For more information on this file, see https://docs.djangoproject.com/en/2.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.1/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "asd#$#ae)^gegm6m9omvic^ct@*@bkf!0afe*+4h$5-zmf^h&$u4(1vr" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [".localhost"] TENANTS = { "public": { "APPS": [ "sandbox.shared_public", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.staticfiles", "django.contrib.messages", ], }, "www": { "APPS": [ "sandbox.shared_common", "sandbox.app_main", "django.contrib.sessions", "django.contrib.admin", ], "URLCONF": "sandbox.app_main.urls", "WS_URLCONF": "sandbox.app_main.ws_urls", "DOMAINS": ["localhost"], "SESSION_KEY": "main", "HEADER": "main", "FALLBACK_DOMAINS": ["tenants.localhost"], }, "blog": { "APPS": [ "sandbox.shared_common", "sandbox.app_blog", "django.contrib.sessions", "django.contrib.admin", ], "URLCONF": "sandbox.app_blog.urls", "DOMAINS": ["blog.localhost"], }, "default": { "TENANT_MODEL": "shared_public.Tenant", "DOMAIN_MODEL": "shared_public.Domain", "APPS": [ "sandbox.shared_common", "sandbox.app_tenants", "django.contrib.sessions", "django.contrib.admin", ], "URLCONF": "sandbox.app_tenants.urls", "WS_URLCONF": "sandbox.app_tenants.ws_urls", "CLONE_REFERENCE": "sample", }, } # Application definition INSTALLED_APPS = ["django_pgschemas"] for schema in TENANTS: INSTALLED_APPS += [app for app in TENANTS[schema]["APPS"] if app not in INSTALLED_APPS] ROOT_URLCONF = TENANTS["default"]["URLCONF"] ASGI_APPLICATION = "routing.application" AUTH_USER_MODEL = "shared_common.User" LOGIN_URL = "login" MIDDLEWARE = [ "django_pgschemas.routing.middleware.DomainRoutingMiddleware", "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", ] TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": [ f"{BASE_DIR}/sandbox/templates", ], "APP_DIRS": True, "OPTIONS": { "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", ] }, } ] # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { "default": { "ENGINE": "django_pgschemas.postgresql", "NAME": "sandbox", "USER": "postgres", "PASSWORD": os.environ.get("DATABASE_PASSWORD", "postgres"), "HOST": os.environ.get("DATABASE_HOST", "localhost"), "PORT": "", } } DATABASE_ROUTERS = ("django_pgschemas.routers.TenantAppsRouter",) CACHES = { "default": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", "KEY_FUNCTION": "django_pgschemas.contrib.cache.make_key", } } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = "en-us" TIME_ZONE = "UTC" USE_I18N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = "/static/" DEFAULT_AUTO_FIELD = "django.db.models.AutoField" django-pgschemas-1.0.1/sandbox/shared_common/000077500000000000000000000000001470131117100211735ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_common/__init__.py000066400000000000000000000000001470131117100232720ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_common/management/000077500000000000000000000000001470131117100233075ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_common/management/__init__.py000066400000000000000000000000001470131117100254060ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_common/management/commands/000077500000000000000000000000001470131117100251105ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_common/management/commands/__init__.py000066400000000000000000000000001470131117100272070ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_common/management/commands/reverse_url.py000066400000000000000000000020731470131117100300210ustar00rootroot00000000000000from django.conf import settings from django.urls import reverse from django_pgschemas.management.commands import SchemaCommand from django_pgschemas.routing.info import DomainInfo from django_pgschemas.routing.models import get_primary_domain_for_tenant from django_pgschemas.routing.urlresolvers import get_urlconf_from_schema from django_pgschemas.schema import Schema class Command(SchemaCommand): def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( dest="url_name", help="Url name to resolve in the specified schema", ) def handle_schema(self, schema: Schema, *args, **options): if schema.is_dynamic: primary_domain = get_primary_domain_for_tenant(schema) schema.routing = DomainInfo(domain=primary_domain.domain, folder=primary_domain.folder) else: schema.routing = DomainInfo(domain=settings.TENANTS[schema.schema_name]["DOMAINS"][0]) self.stdout.write(reverse(options["url_name"], urlconf=get_urlconf_from_schema(schema))) django-pgschemas-1.0.1/sandbox/shared_common/migrations/000077500000000000000000000000001470131117100233475ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_common/migrations/0001_initial.py000066400000000000000000000017351470131117100260200ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 03:07 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="User", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("password", models.CharField(max_length=128, verbose_name="password")), ( "last_login", models.DateTimeField(blank=True, null=True, verbose_name="last login"), ), ("email", models.EmailField(max_length=254, unique=True)), ("display_name", models.CharField(max_length=50)), ], options={ "abstract": False, }, ), ] django-pgschemas-1.0.1/sandbox/shared_common/migrations/__init__.py000066400000000000000000000000001470131117100254460ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_common/models.py000066400000000000000000000005661470131117100230370ustar00rootroot00000000000000from django.contrib.auth.models import AbstractBaseUser, BaseUserManager from django.db import models class UserManager(BaseUserManager): pass class User(AbstractBaseUser): email = models.EmailField(unique=True) display_name = models.CharField(max_length=50) USERNAME_FIELD = "email" REQUIRED_FIELDS = ("display_name",) objects = UserManager() django-pgschemas-1.0.1/sandbox/shared_public/000077500000000000000000000000001470131117100211615ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_public/__init__.py000066400000000000000000000000001470131117100232600ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_public/migrations/000077500000000000000000000000001470131117100233355ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_public/migrations/0001_initial.py000066400000000000000000000044151470131117100260040ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 03:07 import django.db.models.deletion from django.db import migrations, models import django_pgschemas.schema import django_pgschemas.utils class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="Domain", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("domain", models.CharField(db_index=True, max_length=253)), ("folder", models.SlugField(blank=True, max_length=253)), ("is_primary", models.BooleanField(default=True)), ], options={"abstract": False}, ), migrations.CreateModel( name="Catalog", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ) ], ), migrations.CreateModel( name="Tenant", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ( "schema_name", models.CharField( max_length=63, unique=True, validators=[django_pgschemas.utils.check_schema_name], ), ), ], options={"abstract": False}, bases=(django_pgschemas.schema.Schema, models.Model), ), migrations.AddField( model_name="domain", name="tenant", field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="domains", to="shared_public.Tenant", ), ), migrations.AlterUniqueTogether(name="domain", unique_together={("domain", "folder")}), ] django-pgschemas-1.0.1/sandbox/shared_public/migrations/0002_domain_redirect_to_primary.py000066400000000000000000000006121470131117100317440ustar00rootroot00000000000000# Generated by Django 3.2.4 on 2021-10-27 19:53 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("shared_public", "0001_initial"), ] operations = [ migrations.AddField( model_name="domain", name="redirect_to_primary", field=models.BooleanField(default=False), ), ] django-pgschemas-1.0.1/sandbox/shared_public/migrations/__init__.py000066400000000000000000000000001470131117100254340ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/shared_public/models.py000066400000000000000000000005621470131117100230210ustar00rootroot00000000000000from django.conf import settings from django.db import models from django_pgschemas.models import TenantModel from django_pgschemas.routing.models import DomainModel class Tenant(TenantModel): pass if settings.TENANTS.get("default", {}).get("DOMAIN_MODEL", None) is not None: class Domain(DomainModel): pass class Catalog(models.Model): pass django-pgschemas-1.0.1/sandbox/templates/000077500000000000000000000000001470131117100203535ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/templates/index.html000066400000000000000000000003661470131117100223550ustar00rootroot00000000000000
Path:
{{ path }}
User:
{{ user }}
Schema:
{{ schema }}
Routing:
{{ routing }}
Admin URL:
{{ admin_url }}
django-pgschemas-1.0.1/sandbox/tests/000077500000000000000000000000001470131117100175175ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/__init__.py000066400000000000000000000000001470131117100216160ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/bugs/000077500000000000000000000000001470131117100204575ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/bugs/__init__.py000066400000000000000000000000001470131117100225560ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/bugs/test_migrations_in_base_models.py000066400000000000000000000056301470131117100272730ustar00rootroot00000000000000import warnings from unittest.mock import patch import pytest from django.apps import apps from django.core import management from django.core.management.base import CommandError from django.db import models from django.db.utils import ProgrammingError from django_pgschemas.checks import check_schema_names from django_pgschemas.models import TenantModel as BaseTenantModel from django_pgschemas.utils import get_tenant_model def patched_get_tenant_model(*args, **kwargs): if RealTenantModel := get_tenant_model(): class TenantModel(BaseTenantModel): dummy = models.TextField() class Meta: app_label = RealTenantModel._meta.app_label return TenantModel return None @pytest.mark.bug def test_database_checks_with_zero_migrations(transactional_db): """ Provoke a handled ProgrammingError by migrating models from empty database. """ management.call_command("migrate", "shared_public", "zero", verbosity=0) # The goal is that the next line doesn't raise ProgrammingError check_schema_names(apps.get_app_config("django_pgschemas")) management.call_command("migrate", verbosity=0) @pytest.mark.bug @patch("django_pgschemas.management.commands.get_tenant_model", patched_get_tenant_model) def test_whowill_with_pending_migrations(TenantModel, db): """ Provoke a handled ProgrammingError by running tenant command with pending model changes. """ if TenantModel is None: pytest.skip("Dynamic tenants are not in use") with warnings.catch_warnings(): warnings.simplefilter("ignore") # Avoid warnings about model being registered twice with pytest.raises(CommandError) as ctx: management.call_command("whowill", all_schemas=True, verbosity=0) assert str(ctx.value) == ( "Error while attempting to retrieve dynamic schemas. " "Perhaps you need to migrate the 'public' schema first?" ) @pytest.mark.bug def test_migrate_with_exclusions(TenantModel, db): if TenantModel is None: pytest.skip("Dynamic tenants are not in use") # We first unapply a migration with fake so we can reapply it without fake # This should work without errors management.call_command( "migrate", "app_tenants", "0001_initial", fake=True, schemas=["tenant1"], verbosity=0 ) # We then migrate on all schemas except for tenant1, THIS IS THE CASE WE WANT TO TEST # This should work without errors management.call_command("migrate", all_schemas=True, excluded_schemas=["tenant1"], verbosity=0) # If we try to global migrate now, we should get a ProgrammingError with pytest.raises(ProgrammingError): management.call_command("migrate", all_schemas=True, verbosity=0) # We finally apply the migration again with fake # This should work without errors management.call_command("migrate", fake=True, all_schemas=True, verbosity=0) django-pgschemas-1.0.1/sandbox/tests/bugs/test_url_cache.py000066400000000000000000000031251470131117100240160ustar00rootroot00000000000000import pytest from django.apps import apps from django.test import Client @pytest.fixture def UserModel(): return apps.get_model("shared_common.User") @pytest.fixture(autouse=True) def _setup(tenant1, tenant2, DomainModel, UserModel): if DomainModel is None: pytest.skip("Domain model is not in use") DomainModel.objects.create( tenant=tenant1, domain="everyone.localhost", folder="tenant1", is_primary=True ) DomainModel.objects.create( tenant=tenant2, domain="everyone.localhost", folder="tenant2", is_primary=True ) with tenant1: UserModel.objects.create(email="user1@localhost", display_name="Admin") with tenant2: UserModel.objects.create(email="user2@localhost", display_name="Admin") @pytest.fixture def client(): return Client(headers={"host": "everyone.localhost"}) @pytest.mark.bug def test_bug_in_cached_urls_1(client): # Provoke redirect to login on tenant2 client.get("/tenant2/profile/advanced/") # Provoke redirect to login on tenant1 buggy_response = client.get("/tenant1/profile/advanced/") assert buggy_response.status_code == 302 assert buggy_response.url == "/tenant1/login/?next=/tenant1/profile/advanced/" @pytest.mark.bug def test_bug_in_cached_urls_2(client): # Provoke redirect to login on tenant1 client.get("/tenant1/profile/advanced/") # Provoke redirect to login on tenant2 buggy_response = client.get("/tenant2/profile/advanced/") assert buggy_response.status_code == 302 assert buggy_response.url == "/tenant2/login/?next=/tenant2/profile/advanced/" django-pgschemas-1.0.1/sandbox/tests/commands/000077500000000000000000000000001470131117100213205ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/commands/__init__.py000066400000000000000000000000001470131117100234170ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/commands/test_executors.py000066400000000000000000000017071470131117100247570ustar00rootroot00000000000000import pytest from django.core import management @pytest.fixture(autouse=True) def _setup(TenantModel, DomainModel, db): if TenantModel is None: pytest.skip("Dynamic tenants are not in use") tenants = [] for i in range(10, 20): tenant = TenantModel(schema_name=f"tenant{i + 1}") tenant.save(verbosity=0) if DomainModel: DomainModel.objects.create( tenant=tenant, domain=f"tenant{i + 1}.localhost", is_primary=True ) tenants.append(tenant) yield for tenant in tenants: tenant.delete(force_drop=True) def test_all_schemas_in_sequential(): # If there are no errors, then this test passed management.call_command("migrate", all_schemas=True, parallel=False, verbosity=0) def test_all_schemas_in_parallel(): # If there are no errors, then this test passed management.call_command("migrate", all_schemas=True, parallel=True, verbosity=0) django-pgschemas-1.0.1/sandbox/tests/commands/test_reverse_url.py000066400000000000000000000014441470131117100252710ustar00rootroot00000000000000import pytest from django.core import management from django.urls.exceptions import NoReverseMatch def test_urls_for_main_error(db): with pytest.raises(NoReverseMatch): management.call_command("reverse_url", "entries", schemas=["www"]) def test_urls_for_main_success(stdout, db): management.call_command("reverse_url", "register", schemas=["www"], stdout=stdout) stdout.seek(0) assert stdout.read().strip() == "/register/" def test_urls_for_blog_error(db): with pytest.raises(NoReverseMatch): management.call_command("reverse_url", "register", schemas=["blog"]) def test_urls_for_blog_success(stdout, db): management.call_command("reverse_url", "entries", schemas=["blog"], stdout=stdout) stdout.seek(0) assert stdout.read().strip() == "/entries/" django-pgschemas-1.0.1/sandbox/tests/commands/test_schema_creation.py000066400000000000000000000037451470131117100260660ustar00rootroot00000000000000from unittest.mock import patch import pytest from django.core.management import call_command from django.core.management.base import CommandError from django_pgschemas import utils @pytest.fixture(autouse=True) def _setup(TenantModel): if TenantModel is None: pytest.skip("Dynamic tenants are not in use") def test_cloneschema(transactional_db): utils._create_clone_schema_function() assert not utils.schema_exists("cloned") call_command("cloneschema", "sample", "cloned", verbosity=0) # All good assert utils.schema_exists("cloned") with pytest.raises(CommandError): # Existing destination call_command("cloneschema", "sample", "cloned", verbosity=0) with pytest.raises(CommandError): # Not existing source call_command("cloneschema", "nonexisting", "newschema", verbosity=0) utils.drop_schema("cloned") def test_createrefschema(transactional_db): utils.drop_schema("cloned") call_command("createrefschema", verbosity=0) # All good assert utils.schema_exists("sample") utils.drop_schema("cloned") call_command("createrefschema", recreate=True, verbosity=0) # All good too assert utils.schema_exists("sample") utils.drop_schema("cloned") call_command("createrefschema", recreate=True, verbosity=0) # All good too assert utils.schema_exists("sample") def test_interactive_cloneschema(transactional_db): answer_provider = ( n for n in [ "y", # Would you like to create a database entry? "", # Domain name, simulated wrong answer "tenant1copy.localhost", # Domain name, good answer ] ) def patched_input(*args, **kwargs): return next(answer_provider) with patch("builtins.input", patched_input): call_command( "cloneschema", "tenant1", "tenant1copy", verbosity=0, ) assert utils.schema_exists("tenant1copy") utils.drop_schema("tenant1copy") django-pgschemas-1.0.1/sandbox/tests/commands/test_tenant_related.py000066400000000000000000000147121470131117100257270ustar00rootroot00000000000000from unittest.mock import patch import pytest from django.core import management from django.core.management.base import CommandError from django_pgschemas.management.commands import CommandScope from django_pgschemas.management.commands.whowill import Command as WhoWillCommand @pytest.fixture(autouse=True) def _setup(tenant1, tenant2, TenantModel, DomainModel): if TenantModel is None: pytest.skip("Dynamic tenants are not in use") if DomainModel: DomainModel.objects.create( tenant=tenant1, domain="tenant1.localhost", is_primary=True, ) DomainModel.objects.create( tenant=tenant1, domain="everyone.localhost", folder="tenant1", is_primary=False, ) DomainModel.objects.create( tenant=tenant2, domain="tenant2.localhost", is_primary=True, ) DomainModel.objects.create( tenant=tenant2, domain="everyone.localhost", folder="tenant2", is_primary=False, ) def test_no_schema_provided(): command = WhoWillCommand() with pytest.raises(CommandError) as ctx: management.call_command(command, interactive=False, verbosity=0) assert str(ctx.value) == "No schema provided" def test_no_all_schemas_allowed(): command = WhoWillCommand() command.allow_wildcards = False with pytest.raises(TypeError): management.call_command(command, all_schemas=True, verbosity=0) def test_no_static_schemas_allowed(): command = WhoWillCommand() command.scope = CommandScope.DYNAMIC with pytest.raises(CommandError) as ctx: management.call_command(command, static_schemas=True, verbosity=0) assert str(ctx.value) == "Including static schemas is NOT allowed" command = WhoWillCommand() command.allow_wildcards = False with pytest.raises(TypeError): management.call_command(command, static_schemas=True, verbosity=0) def test_no_dynamic_schemas_allowed(): command = WhoWillCommand() command.scope = CommandScope.STATIC with pytest.raises(CommandError) as ctx: management.call_command(command, dynamic_schemas=True, verbosity=0) assert str(ctx.value) == "Including dynamic schemas is NOT allowed" command = WhoWillCommand() command.allow_wildcards = False with pytest.raises(TypeError): management.call_command(command, dynamic_schemas=True, verbosity=0) def test_no_tenant_like_schemas_allowed(): command = WhoWillCommand() command.scope = CommandScope.STATIC with pytest.raises(CommandError) as ctx: management.call_command(command, tenant_schemas=True, verbosity=0) assert str(ctx.value) == "Including tenant-like schemas is NOT allowed" command = WhoWillCommand() command.allow_wildcards = False with pytest.raises(TypeError): management.call_command(command, tenant_schemas=True, verbosity=0) def test_nonexisting_schema(): with pytest.raises(CommandError) as ctx: management.call_command("whowill", schemas=["unknown"], verbosity=0) assert str(ctx.value) == "No schema found for 'unknown'" def test_ambiguous_schema(DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") with pytest.raises(CommandError) as ctx: management.call_command("whowill", schemas=["tenant"], verbosity=0) assert ( str(ctx.value) == "More than one tenant found for schema 'tenant' by domain, please, narrow down the filter" ) def test_specific_schemas(): command = WhoWillCommand() command.specific_schemas = ["blog"] with pytest.raises(CommandError) as ctx: management.call_command(command, schemas=["www"], verbosity=0) assert str(ctx.value) == "This command can only run in ['blog']" def test_nonexisting_schema_excluded(): with pytest.raises(CommandError) as ctx: management.call_command( "whowill", all_schemas=True, excluded_schemas=["unknown"], verbosity=0 ) assert str(ctx.value) == "No schema found for 'unknown' (excluded)" def test_ambiguous_schema_excluded(DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") with pytest.raises(CommandError) as ctx: management.call_command( "whowill", all_schemas=True, excluded_schemas=["tenant"], verbosity=0 ) assert ( str(ctx.value) == "More than one tenant found for schema 'tenant' by domain (excluded), please, narrow down the filter" ) def test_existing_schema_excluded_ok(): management.call_command("whowill", all_schemas=True, excluded_schemas=["tenant1"], verbosity=0) def test_interactive_ok(): def patched_input(*args, **kwargs): return "blog" with patch("builtins.input", patched_input): management.call_command("whowill", schemas=[], verbosity=0) def test_interactive_nonexisting(): def patched_input(*args, **kwargs): return "unknown" with patch("builtins.input", patched_input): with pytest.raises(CommandError) as ctx: management.call_command("whowill", schemas=[], verbosity=0) assert str(ctx.value) == "No schema found for 'unknown'" def test_mixed_ok(DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") management.call_command("whowill", all_schemas=True, verbosity=0) management.call_command("whowill", static_schemas=True, verbosity=0) management.call_command("whowill", dynamic_schemas=True, verbosity=0) management.call_command("whowill", tenant_schemas=True, verbosity=0) management.call_command("whowill", schemas=["public", "sample"], verbosity=0) management.call_command( "whowill", all_schemas=True, static_schemas=True, dynamic_schemas=True, tenant_schemas=True, schemas=["public", "sample"], verbosity=0, ) management.call_command( "whowill", all_schemas=True, excluded_schemas=["public", "sample"], verbosity=0 ) management.call_command("whowill", schemas=["everyone.localhost/tenant1"], verbosity=0) management.call_command("whowill", schemas=["tenant1"], verbosity=0) management.call_command( "whowill", all_schemas=True, excluded_schemas=["everyone.localhost/tenant1"], verbosity=0, ) management.call_command("whowill", all_schemas=True, excluded_schemas=["tenant1"], verbosity=0) django-pgschemas-1.0.1/sandbox/tests/commands/test_whowill.py000066400000000000000000000101241470131117100244140ustar00rootroot00000000000000import pytest from django.core import management @pytest.fixture(autouse=True) def _setup(tenant1, tenant2, tenant3, TenantModel, DomainModel): if TenantModel is None: pytest.skip("Dynamic tenants are not in use") if DomainModel: for tenant in [tenant1, tenant2, tenant3]: DomainModel.objects.create( tenant=tenant, domain=f"{tenant.schema_name}.localhost", is_primary=True ) def split_output(buffer): buffer.seek(0) return set(buffer.read().strip().splitlines()) def test_all_schemas(DomainModel, stdout): management.call_command("whowill", all_schemas=True, stdout=stdout) expected_dynamic = ( {"tenant1.localhost", "tenant2.localhost", "tenant3.localhost"} if DomainModel else {"tenant1", "tenant2", "tenant3"} ) assert ( split_output(stdout) == {"public", "sample", "localhost", "blog.localhost"} | expected_dynamic ) def test_static_schemas(stdout): management.call_command("whowill", static_schemas=True, stdout=stdout) assert split_output(stdout) == {"public", "sample", "localhost", "blog.localhost"} def test_tenant_like_schemas(DomainModel, stdout): management.call_command("whowill", tenant_schemas=True, stdout=stdout) expected_dynamic = ( {"tenant1.localhost", "tenant2.localhost", "tenant3.localhost"} if DomainModel else {"tenant1", "tenant2", "tenant3"} ) assert split_output(stdout) == {"sample"} | expected_dynamic def test_dynamic_schemas(DomainModel, stdout): management.call_command("whowill", dynamic_schemas=True, stdout=stdout) expected_dynamic = ( {"tenant1.localhost", "tenant2.localhost", "tenant3.localhost"} if DomainModel else {"tenant1", "tenant2", "tenant3"} ) assert split_output(stdout) == expected_dynamic def test_specific_schemas(DomainModel, stdout): management.call_command("whowill", schemas=["www", "blog", "tenant1"], stdout=stdout) expected_dynamic = {"tenant1.localhost"} if DomainModel else {"tenant1"} assert split_output(stdout) == {"localhost", "blog.localhost"} | expected_dynamic # Same test cases as before, but excluding one def test_all_schemas_minus_one(DomainModel, stdout): management.call_command("whowill", all_schemas=True, excluded_schemas=["blog"], stdout=stdout) expected_dynamic = ( { "tenant1.localhost", "tenant2.localhost", "tenant3.localhost", } if DomainModel else {"tenant1", "tenant2", "tenant3"} ) assert split_output(stdout) == {"public", "sample", "localhost"} | expected_dynamic def test_static_schemas_minus_one(stdout): management.call_command( "whowill", static_schemas=True, excluded_schemas=["sample"], stdout=stdout ) assert split_output(stdout) == {"public", "localhost", "blog.localhost"} def test_tenant_like_schemas_minus_one(DomainModel, stdout): management.call_command( "whowill", tenant_schemas=True, excluded_schemas=["tenant1"], stdout=stdout ) expected_dynamic = ( {"tenant2.localhost", "tenant3.localhost"} if DomainModel else {"tenant2", "tenant3"} ) assert split_output(stdout) == {"sample"} | expected_dynamic def test_dynamic_schemas_minus_one(DomainModel, stdout): management.call_command( "whowill", dynamic_schemas=True, excluded_schemas=["public"], stdout=stdout ) expected_dynamic = ( { "tenant1.localhost", "tenant2.localhost", "tenant3.localhost", } if DomainModel else { "tenant1", "tenant2", "tenant3", } ) assert split_output(stdout) == expected_dynamic def test_specific_schemas_minus_one(DomainModel, stdout): management.call_command( "whowill", schemas=["www", "blog", "tenant1"], excluded_schemas=["www"], stdout=stdout, ) expected_dynamic = {"tenant1.localhost"} if DomainModel else {"tenant1"} assert split_output(stdout) == {"blog.localhost"} | expected_dynamic django-pgschemas-1.0.1/sandbox/tests/conftest.py000066400000000000000000000031651470131117100217230ustar00rootroot00000000000000from io import StringIO import pytest @pytest.fixture(scope="session", autouse=True) def setup(django_db_setup, django_db_blocker): from sandbox.shared_public.models import Tenant with django_db_blocker.unblock(): Tenant.objects.get_or_create(schema_name="tenant1") Tenant.objects.get_or_create(schema_name="tenant2") Tenant.objects.get_or_create(schema_name="tenant3") @pytest.fixture(autouse=True, params=["static-only", "tenants-no-domains", "tenants-and-domains"]) def tenants_settings(request, settings): from copy import deepcopy current = deepcopy(settings.TENANTS) if request.param == "static-only": del settings.TENANTS["default"] if request.param == "tenants-no-domains": del settings.TENANTS["default"]["DOMAIN_MODEL"] yield settings.TENANTS settings.TENANTS.clear() settings.TENANTS.update(current) @pytest.fixture def TenantModel(): from django_pgschemas.utils import get_tenant_model return get_tenant_model() @pytest.fixture def DomainModel(): from django_pgschemas.utils import get_domain_model return get_domain_model() @pytest.fixture def tenant1(db): from sandbox.shared_public.models import Tenant return Tenant.objects.get(schema_name="tenant1") @pytest.fixture def tenant2(db): from sandbox.shared_public.models import Tenant return Tenant.objects.get(schema_name="tenant2") @pytest.fixture def tenant3(db): from sandbox.shared_public.models import Tenant return Tenant.objects.get(schema_name="tenant3") @pytest.fixture def stdout(): with StringIO() as buffer: yield buffer django-pgschemas-1.0.1/sandbox/tests/contrib/000077500000000000000000000000001470131117100211575ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/contrib/__init__.py000066400000000000000000000000001470131117100232560ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/contrib/test_cache.py000066400000000000000000000013301470131117100236300ustar00rootroot00000000000000from django_pgschemas.contrib.cache import make_key, reverse_key from django_pgschemas.schema import Schema def test_make_key_with_dynamic_tenant(tenant1): with tenant1: key = make_key(key="foo", key_prefix="", version=1) chunks = key.split(":") assert len(chunks) == 4 assert str(tenant1.schema_name) == chunks[0] def test_make_key_with_static_tenant(): with Schema.create(schema_name="www"): key = make_key(key="foo", key_prefix="", version=1) chunks = key.split(":") assert len(chunks) == 4 assert "www" == chunks[0] def test_reverse_key(tenant1): key = "some-key" with tenant1: assert key == reverse_key(make_key(key=key, key_prefix="", version=1)) django-pgschemas-1.0.1/sandbox/tests/contrib/test_channels.py000066400000000000000000000065511470131117100243720ustar00rootroot00000000000000import pytest from channels.testing import WebsocketCommunicator from sandbox.routing import domain_application, headers_application @pytest.mark.asyncio async def test_on_main_domain(): communicator = WebsocketCommunicator( domain_application, "/ws/main/", headers=[(b"host", b"localhost")], ) connected, subprotocol = await communicator.connect() assert connected # Send a message await communicator.send_json_to({"message": "hello"}) # Receive the message response = await communicator.receive_json_from() assert response["message"] == "www: hello" # Close the connection await communicator.disconnect() @pytest.mark.asyncio async def test_on_tenant_subdomain(tenant3, DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") await DomainModel.objects.acreate(tenant=tenant3, domain="ws_tenant3.localhost") communicator = WebsocketCommunicator( domain_application, "/ws/tenant/", headers=[(b"host", b"ws_tenant3.localhost")], ) connected, subprotocol = await communicator.connect() assert connected # Send a message await communicator.send_json_to({"message": "hello"}) # Receive the message response = await communicator.receive_json_from() assert response["message"] == "tenant3: hello" # Close the connection await communicator.disconnect() @pytest.mark.asyncio async def test_on_tenant_subfolder(tenant3, DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") await DomainModel.objects.acreate( tenant=tenant3, domain="ws_tenants.localhost", folder="tenant3" ) communicator = WebsocketCommunicator( domain_application, "/tenant3/ws/tenant/", headers=[(b"host", b"ws_tenants.localhost")], ) connected, subprotocol = await communicator.connect() assert connected # Send a message await communicator.send_json_to({"message": "hello"}) # Receive the message response = await communicator.receive_json_from() assert response["message"] == "tenant3: hello" # Close the connection await communicator.disconnect() @pytest.mark.asyncio async def test_on_main_header(): communicator = WebsocketCommunicator( headers_application, "/ws/main/", headers=[(b"tenant", b"main")], ) connected, subprotocol = await communicator.connect() assert connected # Send a message await communicator.send_json_to({"message": "hello"}) # Receive the message response = await communicator.receive_json_from() assert response["message"] == "www: hello" # Close the connection await communicator.disconnect() @pytest.mark.asyncio async def test_on_tenant_header(TenantModel, db): if TenantModel is None: pytest.skip("Dynamic tenants are not in use") communicator = WebsocketCommunicator( headers_application, "/ws/tenant/", headers=[(b"tenant", b"tenant3")], ) connected, subprotocol = await communicator.connect() assert connected # Send a message await communicator.send_json_to({"message": "hello"}) # Receive the message response = await communicator.receive_json_from() assert response["message"] == "tenant3: hello" # Close the connection await communicator.disconnect() django-pgschemas-1.0.1/sandbox/tests/contrib/test_storage.py000066400000000000000000000067741470131117100242520ustar00rootroot00000000000000import os import shutil import tempfile import pytest from django.core.files.base import ContentFile from django_pgschemas.contrib.storage import TenantFileSystemStorage from django_pgschemas.routing.info import DomainInfo from django_pgschemas.schema import Schema STORAGE_BASE_URL = "/base-url/" @pytest.fixture def temp_dir(): value = tempfile.mkdtemp() yield value shutil.rmtree(value) @pytest.fixture def storage(temp_dir): return TenantFileSystemStorage(location=temp_dir, base_url=STORAGE_BASE_URL) @pytest.fixture def settings_pathname(settings): settings.PGSCHEMAS_PATHNAME_FUNCTION = lambda tenant: f"custom-pathname-{tenant.schema_name}" class TestPathIdentifier: def test_basic_dynamic(self, storage, tenant1): with tenant1: assert storage.get_schema_path_identifier() == tenant1.schema_name def test_basic_static(self, storage): with Schema.create(schema_name="www"): assert storage.get_schema_path_identifier() == "www" def test_method_in_tenant(self, storage, tenant1): tenant1.schema_pathname = lambda: "custom-pathname" with tenant1: assert storage.get_schema_path_identifier() == "custom-pathname" del tenant1.schema_pathname def test_function_in_settings(self, tenant1, storage, settings_pathname): with tenant1: assert storage.get_schema_path_identifier() == f"custom-pathname-{tenant1.schema_name}" def test_base_location(storage, temp_dir, tenant1, settings_pathname): with tenant1: assert storage.base_location == f"{temp_dir}/custom-pathname-{tenant1.schema_name}/" def test_base_url(storage, tenant1): tenant1.routing = DomainInfo(domain="irrelevant", folder="tenant1") with tenant1: assert storage.base_url == STORAGE_BASE_URL tenant1.routing = None def test_file_path(storage, temp_dir, tenant1): assert not storage.exists("test.file") with tenant1: f = ContentFile("random content") f_name = storage.save("test.file", f) assert os.path.join(temp_dir, tenant1.schema_name, f_name) == storage.path(f_name) storage.delete(f_name) assert not storage.exists("test.file") def test_file_save_with_path(storage, temp_dir, tenant1): assert not storage.exists("path/to") with tenant1: storage.save("path/to/test.file", ContentFile("file saved with path")) assert storage.exists("path/to") with storage.open("path/to/test.file") as f: assert f.read() == b"file saved with path" assert os.path.exists( os.path.join(temp_dir, tenant1.schema_name, "path", "to", "test.file") ) storage.delete("path/to/test.file") assert not storage.exists("test.file") def test_file_url_simple(storage, tenant1): tenant1.routing = DomainInfo(domain="irrelevant", folder="tenant1") with tenant1: assert storage.url("test.file") == "/base-url/test.file" tenant1.routing = None def test_file_url_complex(storage, tenant1): with tenant1: assert ( storage.url(r"~!*()'@#$%^&*abc`+ =.file") == f"/base-url/{tenant1.schema_name}/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file" ) assert storage.url("ab\0c") == f"/base-url/{tenant1.schema_name}/ab%00c" assert storage.url("a/b\\c.file") == f"/base-url/{tenant1.schema_name}/a/b/c.file" assert storage.url("") == f"/base-url/{tenant1.schema_name}/" assert storage.url(None) == f"/base-url/{tenant1.schema_name}/" django-pgschemas-1.0.1/sandbox/tests/routing/000077500000000000000000000000001470131117100212065ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/routing/__init__.py000066400000000000000000000000001470131117100233050ustar00rootroot00000000000000django-pgschemas-1.0.1/sandbox/tests/routing/test_middleware.py000066400000000000000000000212431470131117100247360ustar00rootroot00000000000000from itertools import permutations from unittest.mock import MagicMock import pytest from django.http import Http404 from django_pgschemas.routing.info import DomainInfo, HeadersInfo, SessionInfo from django_pgschemas.routing.middleware import ( DomainRoutingMiddleware, HeadersRoutingMiddleware, SessionRoutingMiddleware, strip_tenant_from_path_factory, ) @pytest.mark.parametrize( "path, prefix, expected", [ ("/some/path/", "", "/some/path/"), ("/some/path/", "path", "/some/path/"), ("/some/path/", "some", "/path/"), ], ) def test_strip_tenant_from_path_factory(path, prefix, expected): actual = strip_tenant_from_path_factory(prefix)(path) assert actual == expected class FakeRequest: def __init__( self, *, domain: str = "", path: str = "", session_tenant_ref: str | None = None, headers_tenant_ref: str | None = None, ) -> None: self.domain = domain self.path = path self.session_tenant_ref = session_tenant_ref self.headers_tenant_ref = headers_tenant_ref def get_host(self) -> str: return self.domain @property def session(self) -> dict: return { "tenant": self.session_tenant_ref, } @property def headers(self) -> dict: return { "tenant": self.headers_tenant_ref, } class TestDomainRoutingMiddleware: @pytest.fixture(autouse=True) def _setup(self, tenant1, tenant2, DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") DomainModel.objects.create( tenant=tenant1, domain="tenant1.localhost", is_primary=True, ) DomainModel.objects.create( tenant=tenant1, domain="tenants.localhost", folder="tenant1", is_primary=False, ) DomainModel.objects.create( tenant=tenant2, domain="tenant2.localhost", is_primary=True, ) DomainModel.objects.create( tenant=tenant2, domain="tenants.localhost", folder="tenant2", is_primary=False, ) @pytest.mark.parametrize( "domain, path, schema_name", [ ("tenant1.localhost", "", "tenant1"), ("tenants.localhost", "tenant1", "tenant1"), ("tenant2.localhost", "", "tenant2"), ("tenants.localhost", "tenant2", "tenant2"), ("tenant3.localhost", "", None), ("localhost", "", "www"), ("blog.localhost", "", "blog"), ("tenants.localhost", "", "www"), # fallback domains ], ) def test_tenant_matching(self, domain, path, schema_name, db): request = FakeRequest(domain=domain, path=f"/{path}/some/path/") get_response = MagicMock() handler = DomainRoutingMiddleware(get_response) if schema_name is None: with pytest.raises(Http404): handler(request) else: handler(request) assert request.tenant is not None assert request.tenant.schema_name == schema_name assert isinstance(request.tenant.routing, DomainInfo) assert request.tenant.routing.domain == domain assert request.tenant.routing.folder == (path if path else None) class TestDomainRoutingMiddlewareRedirection: @pytest.fixture(autouse=True) def _setup(self, tenant1, tenant2, DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") DomainModel(domain="tenant1.localhost", tenant=tenant1).save() DomainModel( domain="tenant1redirect.localhost", tenant=tenant1, is_primary=False, redirect_to_primary=True, ).save() DomainModel( domain="everyone.localhost", folder="tenant1redirect", tenant=tenant1, is_primary=False, redirect_to_primary=True, ).save() DomainModel(domain="everyone.localhost", folder="tenant2", tenant=tenant2).save() DomainModel( domain="tenant2redirect.localhost", tenant=tenant2, is_primary=False, redirect_to_primary=True, ).save() DomainModel( domain="everyone.localhost", folder="tenant2redirect", tenant=tenant2, is_primary=False, redirect_to_primary=True, ).save() @pytest.mark.parametrize( "domain, path, expected_redirection", [ ( "tenant1redirect.localhost", "/some/random/url/", "//tenant1.localhost/some/random/url/", ), ( "everyone.localhost", "/tenant1redirect/some/random/url/", "//tenant1.localhost/some/random/url/", ), ( "tenant2redirect.localhost", "/some/random/url/", "//everyone.localhost/tenant2/some/random/url/", ), ( "everyone.localhost", "/tenant2redirect/some/random/url/", "//everyone.localhost/tenant2/some/random/url/", ), ], ) def test_redirection(self, domain, path, expected_redirection): request = FakeRequest(domain=domain, path=path) get_response = MagicMock() response = DomainRoutingMiddleware(get_response)(request) assert response.status_code == 301 assert response.url == expected_redirection assert response["Location"] == expected_redirection class TestSessionRoutingMiddleware: @pytest.mark.parametrize( "session_key, schema_name", [ ("www", "www"), ("main", "www"), ("blog", "blog"), ("tenant1", "tenant1"), ], ) def test_tenant_matching(self, DomainModel, session_key, schema_name, db): if DomainModel is None and "tenant" in schema_name: pytest.skip("Domain model is not in use") request = FakeRequest(session_tenant_ref=session_key) get_response = MagicMock() handler = SessionRoutingMiddleware(get_response) handler(request) assert request.tenant is not None assert request.tenant.schema_name == schema_name assert isinstance(request.tenant.routing, SessionInfo) assert request.tenant.routing.reference == session_key class TestHeadersRoutingMiddleware: @pytest.mark.parametrize( "header, schema_name", [ ("www", "www"), ("main", "www"), ("blog", "blog"), ("tenant1", "tenant1"), ], ) def test_tenant_matching(self, DomainModel, header, schema_name, db): if DomainModel is None and "tenant" in schema_name: pytest.skip("Domain model is not in use") request = FakeRequest(headers_tenant_ref=header) get_response = MagicMock() handler = HeadersRoutingMiddleware(get_response) handler(request) assert request.tenant is not None assert request.tenant.schema_name == schema_name assert isinstance(request.tenant.routing, HeadersInfo) assert request.tenant.routing.reference == header @pytest.mark.parametrize( "first_middleware, second_middleware, last_middleware", permutations([DomainRoutingMiddleware, SessionRoutingMiddleware, HeadersRoutingMiddleware]), ) def test_last_middleware_prevails( first_middleware, second_middleware, last_middleware, tenant1, tenant2, tenant3, DomainModel ): if DomainModel is None: pytest.skip("Domain model is not in use") DomainModel.objects.create( domain="tenants.localhost", tenant=tenant1, folder="tenant1", is_primary=True, ) request = FakeRequest( domain="tenants.localhost", path="/tenant1/some/path/", session_tenant_ref=tenant2.schema_name, headers_tenant_ref=tenant3.schema_name, ) get_response = MagicMock() handler = first_middleware(second_middleware(last_middleware(get_response))) handler(request) if last_middleware is DomainRoutingMiddleware: assert request.tenant == tenant1 assert isinstance(request.tenant.routing, DomainInfo) if last_middleware is SessionRoutingMiddleware: assert request.tenant == tenant2 assert isinstance(request.tenant.routing, SessionInfo) if last_middleware is HeadersRoutingMiddleware: assert request.tenant == tenant3 assert isinstance(request.tenant.routing, HeadersInfo) django-pgschemas-1.0.1/sandbox/tests/routing/test_models.py000066400000000000000000000053611470131117100241070ustar00rootroot00000000000000import pytest from django_pgschemas.routing.models import get_primary_domain_for_tenant @pytest.fixture(autouse=True) def _setup(DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") @pytest.mark.parametrize( "domain, folder, expected", [ ("tenants.localhost", "", "tenants.localhost"), ("tenants.localhost", "tenant1", "tenants.localhost/tenant1"), ], ) def test_str(tenant1, domain, folder, expected, DomainModel): item = DomainModel.objects.create( tenant=tenant1, domain=domain, folder=folder, ) assert str(item) == expected def test_only_one_primary(tenant1, DomainModel): domain1 = DomainModel.objects.create( tenant=tenant1, domain="tenant1.localhost", folder="", is_primary=True, ) assert domain1.is_primary domain2 = DomainModel.objects.create( tenant=tenant1, domain="tenants.localhost", folder="tenant1", is_primary=True, ) domain1.refresh_from_db() assert not domain1.is_primary assert domain2.is_primary @pytest.mark.parametrize("is_primary", [True, False]) def test_redirect_to_primary_if_primary(tenant1, is_primary, DomainModel): domain1 = DomainModel.objects.create( domain="tenant1.localhost", folder="", is_primary=is_primary, redirect_to_primary=True, tenant=tenant1, ) assert domain1.redirect_to_primary is not domain1.is_primary @pytest.mark.parametrize( "domain, folder, path, expected", [ ("tenants.localhost", "", "", "//tenants.localhost/"), ("tenants.localhost", "tenant1", "", "//tenants.localhost/tenant1/"), ("tenants.localhost", "tenant1", "some/path", "//tenants.localhost/tenant1/some/path"), ("tenants.localhost", "tenant1", "/some/path", "//tenants.localhost/tenant1/some/path"), ("tenants.localhost", "tenant1", "/some/path/", "//tenants.localhost/tenant1/some/path/"), ], ) def test_absolute_url(tenant1, domain, folder, path, expected, DomainModel): item = DomainModel.objects.create( tenant=tenant1, domain=domain, folder=folder, ) assert item.absolute_url(path) == expected @pytest.mark.parametrize("is_primary", [True, False, None]) def test_get_primary_domain_for_tenant(tenant1, is_primary, DomainModel): if is_primary is not None: item = DomainModel.objects.create( tenant=tenant1, domain="tenant1.localhost", ) DomainModel.objects.update(is_primary=is_primary) item.refresh_from_db() if is_primary: assert get_primary_domain_for_tenant(tenant1) == item else: assert get_primary_domain_for_tenant(tenant1) is None django-pgschemas-1.0.1/sandbox/tests/routing/test_urlresolvers.py000066400000000000000000000014701470131117100253700ustar00rootroot00000000000000from django.conf import settings from django.urls import reverse from django_pgschemas.routing.info import DomainInfo from django_pgschemas.routing.urlresolvers import get_dynamic_tenant_prefixed_urlconf def test_no_tenant(): url = reverse("profile") assert url == "/profile/" def test_tenant_with_no_folder(tenant1): with tenant1: url = reverse("profile") assert url == "/profile/" def test_tenant_with_folder(tenant1): tenant1.routing = DomainInfo(domain="irrelevant", folder="tenant1") dynamic_path = settings.ROOT_URLCONF + "_dynamically_tenant_prefixed" urlconf = get_dynamic_tenant_prefixed_urlconf(settings.ROOT_URLCONF, dynamic_path) with tenant1: url = reverse("profile", urlconf=urlconf) tenant1.routing = None assert url == "/tenant1/profile/" django-pgschemas-1.0.1/sandbox/tests/test_apps.py000066400000000000000000000010121470131117100220650ustar00rootroot00000000000000from unittest.mock import patch from django.apps import apps @patch("django_pgschemas.checks.ensure_tenant_dict") @patch("django_pgschemas.checks.ensure_public_schema") @patch("django_pgschemas.checks.ensure_default_schemas") @patch("django_pgschemas.checks.ensure_overall_schemas") @patch("django_pgschemas.checks.ensure_extra_search_paths") def test_all_checkers_called(*checkers): config = apps.get_app_config("django_pgschemas") config.ready() for checker in checkers: checker.assert_called() django-pgschemas-1.0.1/sandbox/tests/test_checks.py000066400000000000000000000242241470131117100223740ustar00rootroot00000000000000import pytest from django.apps import apps from django.core.checks import Critical, Error, Warning from django.core.exceptions import ImproperlyConfigured from django_pgschemas import checks @pytest.fixture def app_config(): return apps.get_app_config("django_pgschemas") @pytest.fixture def tenant_manager(TenantModel, db): if TenantModel is None: yield None else: backup = TenantModel.auto_create_schema TenantModel.auto_create_schema = False yield TenantModel._default_manager TenantModel.auto_create_schema = backup def test_get_tenant_app(tenants_settings, TenantModel): if TenantModel: assert checks.get_tenant_app() == "sandbox.shared_public" del tenants_settings["default"] assert checks.get_tenant_app() is None def test_get_domain_app(tenants_settings, DomainModel): if DomainModel: assert checks.get_domain_app() == "sandbox.shared_public" del tenants_settings["default"]["DOMAIN_MODEL"] assert checks.get_domain_app() is None def test_get_user_app(settings): assert checks.get_user_app() == "sandbox.shared_common" del settings.AUTH_USER_MODEL assert checks.get_user_app() is None def test_get_session_app(settings): assert checks.get_session_app() == "django.contrib.sessions" def test_ensure_tenant_dict(settings): backup = settings.TENANTS del settings.TENANTS with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_tenant_dict() assert str(ctx.value) == "TENANTS dict setting not set." settings.TENANTS = backup class TestEnsurePublicSchema: @pytest.mark.parametrize("value", [None, "", 1]) def test_no_dict(self, tenants_settings, value): tenants_settings["public"] = value with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_public_schema() assert str(ctx.value) == "TENANTS must contain a 'public' dict." @pytest.mark.parametrize( "member", [ "URLCONF", "WS_URLCONF", "DOMAINS", "FALLBACK_DOMAINS", "SESSION_KEY", "HEADER", ], ) def test_invalid_members(self, tenants_settings, member): tenants_settings["public"][member] = None with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_public_schema() assert str(ctx.value) == f"TENANTS['public'] cannot contain a '{member}' key." class TestEnsureDefaultSchema: def test_static_only(self, tenants_settings): if "default" in tenants_settings: del tenants_settings["default"] checks.ensure_default_schemas() @pytest.mark.parametrize("value", [None, "", 1]) def test_no_dict(self, tenants_settings, value): tenants_settings["default"] = value with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_default_schemas() assert str(ctx.value) == "TENANTS must contain a 'default' dict." @pytest.mark.parametrize( "member", [ "DOMAINS", "FALLBACK_DOMAINS", "SESSION_KEY", "HEADER", ], ) def test_invalid_members(self, tenants_settings, member): if "default" not in tenants_settings: pytest.skip("default not in tenant settings") tenants_settings["default"][member] = None with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_default_schemas() assert str(ctx.value) == f"TENANTS['default'] cannot contain a '{member}' key." @pytest.mark.parametrize( "member", [ "TENANT_MODEL", "URLCONF", ], ) def test_required_members(self, tenants_settings, member): if "default" not in tenants_settings: pytest.skip("default not in tenant settings") del tenants_settings["default"][member] with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_default_schemas() assert str(ctx.value) == f"TENANTS['default'] must contain a '{member}' key." @pytest.mark.parametrize( "name", [ "public", "default", "blog", "www", ], ) def test_clone_reference_invalid_name(self, tenants_settings, name): if "default" not in tenants_settings: pytest.skip("default not in tenant settings") tenants_settings["default"]["CLONE_REFERENCE"] = name with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_default_schemas() assert ( str(ctx.value) == "TENANTS['default']['CLONE_REFERENCE'] must be a unique schema name." ) class TestEnsureOverallSchema: @pytest.mark.parametrize( "name", [ "pg_something", "1something", ".something", "-something", "tomanycharacters0123456789abcdef0123456789abcef0123456789abcdef0", ], ) def test_invalid_names(self, tenants_settings, name): tenants_settings[name] = {} with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_overall_schemas() assert str(ctx.value) == f"'{name}' is not a valid schema name." @pytest.mark.parametrize( "extra", [ "public", "blog", "www", # "default", ], ) def test_ensure_extra_search_paths(settings, extra, db): settings.PGSCHEMAS_EXTRA_SEARCH_PATHS = [extra] with pytest.raises(ImproperlyConfigured) as ctx: checks.ensure_extra_search_paths() invalid = ", ".join([extra]) assert str(ctx.value) == f"Do not include '{invalid}' on PGSCHEMAS_EXTRA_SEARCH_PATHS." class TestCheckPrincipalApps: BASE_DEFAULT = {"TENANT_MODEL": "shared_public.Tenant", "DOMAIN_MODEL": "shared_public.DOMAIN"} def test_location_wrong(self, tenants_settings, app_config): tenants_settings.update( { "public": {"APPS": []}, "default": self.BASE_DEFAULT, } ) expected_errors = [ Error( "Your tenant app 'sandbox.shared_public' must be on the 'public' schema.", id="pgschemas.W001", ), Error( "Your domain app 'sandbox.shared_public' must be on the 'public' schema.", id="pgschemas.W001", ), ] errors = checks.check_principal_apps(app_config) assert errors == expected_errors def test_location_twice(self, tenants_settings, app_config): tenants_settings.update( { "public": {"APPS": ["sandbox.shared_public"]}, "default": {**self.BASE_DEFAULT, "APPS": ["sandbox.shared_public"]}, } ) expected_errors = [ Error( "Your tenant app 'sandbox.shared_public' in TENANTS['default']['APPS'] " "must be on the 'public' schema only.", id="pgschemas.W001", ), Error( "Your domain app 'sandbox.shared_public' in TENANTS['default']['APPS'] " "must be on the 'public' schema only.", id="pgschemas.W001", ), ] errors = checks.check_principal_apps(app_config) assert errors == expected_errors class TestCheckOtherApps: def test_contenttypes_location_wrong(self, tenants_settings, app_config): tenants_settings.update( { "default": {"APPS": ["django.contrib.contenttypes"]}, } ) expected_errors = [ Warning( "'django.contrib.contenttypes' in TENANTS['default']['APPS'] " "must be on 'public' schema only.", id="pgschemas.W002", ) ] errors = checks.check_other_apps(app_config) assert errors == expected_errors def test_contenttypes_location_twice(self, tenants_settings, app_config): tenants_settings.update( { "default": {}, "www": {"APPS": ["django.contrib.contenttypes"]}, } ) expected_errors = [ Warning( "'django.contrib.contenttypes' in TENANTS['www']['APPS'] " "must be on 'public' schema only.", id="pgschemas.W002", ) ] errors = checks.check_other_apps(app_config) assert errors == expected_errors def test_user_location_wrong(self, tenants_settings, app_config): user_app = checks.get_user_app() tenants_settings.update( { "default": {"APPS": ["django.contrib.sessions"]}, } ) expected_errors = [ Warning( f"'{user_app}' must be together with 'django.contrib.sessions' " "in TENANTS['default']['APPS'].", id="pgschemas.W003", ) ] errors = checks.check_other_apps(app_config) assert errors == expected_errors def test_session_location_wrong(self, tenants_settings, app_config): user_app = checks.get_user_app() tenants_settings.update( { "www": {"APPS": ["shared_common", user_app]}, "default": {"APPS": ["shared_common"]}, } ) expected_errors = [ Warning( f"'django.contrib.sessions' must be together with '{user_app}' " "in TENANTS['www']['APPS'].", id="pgschemas.W003", ) ] errors = checks.check_other_apps(app_config) assert errors == expected_errors @pytest.mark.parametrize("schema", ["public", "www", "blog", "sample"]) def test_check_schema_names(schema, app_config, tenant_manager): if tenant_manager is None: pytest.skip("Dynamic tenants are not in use") tenant_manager.create(schema_name=schema) expected_errors = [ Critical( f"Name clash found between static and dynamic tenants: {{'{schema}'}}", id="pgschemas.W004", ), ] errors = checks.check_schema_names(app_config) assert errors == expected_errors django-pgschemas-1.0.1/sandbox/tests/test_log.py000066400000000000000000000027001470131117100217100ustar00rootroot00000000000000from django_pgschemas.log import SchemaContextFilter from django_pgschemas.routing.info import DomainInfo, HeadersInfo, SessionInfo from django_pgschemas.schema import Schema class FakeRecord: pass class TestSchemaContextFilter: def test_filter_with_domain(self): record = FakeRecord() scf = SchemaContextFilter() with Schema.create( schema_name="some-tenant", routing=DomainInfo( domain="some-tenant.some-url.com", folder="folder1", ), ): scf.filter(record) assert record.schema_name == "some-tenant" assert record.domain == "some-tenant.some-url.com" assert record.folder == "folder1" def test_filter_with_session(self): record = FakeRecord() scf = SchemaContextFilter() with Schema.create( schema_name="some-tenant", routing=SessionInfo(reference="tenant1"), ): scf.filter(record) assert record.schema_name == "some-tenant" assert record.reference == "tenant1" def test_filter_with_header(self): record = FakeRecord() scf = SchemaContextFilter() with Schema.create( schema_name="some-tenant", routing=HeadersInfo(reference="tenant1"), ): scf.filter(record) assert record.schema_name == "some-tenant" assert record.reference == "tenant1" django-pgschemas-1.0.1/sandbox/tests/test_schema.py000066400000000000000000000065221470131117100223750ustar00rootroot00000000000000import pytest from django.template import Context, Template from django_pgschemas.routing.info import DomainInfo, HeadersInfo, SessionInfo from django_pgschemas.schema import ( Schema, deactivate, get_current_schema, get_default_schema, override, shallow_equal, ) @pytest.mark.parametrize( "schema1, schema2, equals", [ ( get_default_schema(), get_default_schema(), True, ), ( Schema.create("test1"), Schema.create("test1"), True, ), ( Schema.create("test1"), Schema.create("test2"), False, ), ( Schema.create("test1"), Schema.create("test1", SessionInfo("ref1")), False, ), ( Schema.create("test1", HeadersInfo("ref1")), Schema.create("test1", SessionInfo("ref1")), False, ), ( Schema.create("test1", HeadersInfo("ref1")), Schema.create("test1", HeadersInfo("ref1")), True, ), ( Schema.create("test1", DomainInfo("domain1")), Schema.create("test1", DomainInfo("domain1", "folder")), False, ), ( Schema.create("test1", DomainInfo("domain1", "folder")), Schema.create("test1", DomainInfo("domain1", "folder")), True, ), ], ) def test_shallow_equal(schema1, schema2, equals): assert shallow_equal(schema1, schema2) == equals def test_nested_override(): deactivate() schema1 = Schema.create(schema_name="schema_1") schema2 = Schema.create(schema_name="schema_2") assert get_current_schema().schema_name == get_default_schema().schema_name with override(schema1): assert get_current_schema().schema_name == schema1.schema_name with override(schema2): assert get_current_schema().schema_name == schema2.schema_name with override(schema1): assert get_current_schema().schema_name == schema1.schema_name assert get_current_schema().schema_name == schema2.schema_name assert get_current_schema().schema_name == schema1.schema_name assert get_current_schema().schema_name == get_default_schema().schema_name def test_nested_class_override(): deactivate() schema1 = Schema.create(schema_name="schema_1") schema2 = Schema.create(schema_name="schema_2") assert get_current_schema().schema_name == get_default_schema().schema_name with schema1: assert get_current_schema().schema_name == schema1.schema_name with schema2: assert get_current_schema().schema_name == schema2.schema_name with schema1: assert get_current_schema().schema_name == schema1.schema_name assert get_current_schema().schema_name == schema2.schema_name assert get_current_schema().schema_name == schema1.schema_name assert get_current_schema().schema_name == get_default_schema().schema_name def test_schema_is_template_renderable(): schema = Schema.create(schema_name="template_schema") context = Context({"schema": schema}) template = Template("{{ schema.schema_name }}") rendered = template.render(context) assert rendered == "template_schema" django-pgschemas-1.0.1/sandbox/tests/test_signals.py000066400000000000000000000042641470131117100225760ustar00rootroot00000000000000from unittest.mock import MagicMock import pytest from django_pgschemas.schema import Schema, activate, deactivate, get_default_schema from django_pgschemas.signals import schema_activate from django_pgschemas.utils import schema_exists def test_schema_activate(): deactivate() schema = Schema.create(schema_name="test") receiver = MagicMock() schema_activate.connect(receiver) activate(schema) schema_activate.disconnect(receiver) receiver.assert_called_once_with(signal=schema_activate, sender=Schema, schema=schema) def test_schema_double_activate(): deactivate() schema = Schema.create(schema_name="test") receiver = MagicMock() schema_activate.connect(receiver) activate(schema) activate(schema) schema_activate.disconnect(receiver) receiver.assert_called_once_with(signal=schema_activate, sender=Schema, schema=schema) def test_schema_deactivate(): schema = Schema.create(schema_name="test") activate(schema) receiver = MagicMock() schema_activate.connect(receiver) deactivate() schema_activate.disconnect(receiver) receiver.assert_called_once_with( signal=schema_activate, sender=Schema, schema=get_default_schema() ) def test_schema_override(): deactivate() schema = Schema.create(schema_name="test") receiver = MagicMock() schema_activate.connect(receiver) with schema: pass schema_activate.disconnect(receiver) receiver.assert_called_once_with(signal=schema_activate, sender=Schema, schema=schema) def test_tenant_delete_callback(TenantModel, db): if TenantModel is None: pytest.skip("Dynamic tenants are not in use") backup_create, backup_drop = TenantModel.auto_create_schema, TenantModel.auto_drop_schema TenantModel.auto_create_schema = False TenantModel.auto_drop_schema = True tenant = TenantModel(schema_name="tenant_signal") tenant.save() tenant.create_schema(sync_schema=False) assert schema_exists("tenant_signal") TenantModel.objects.all().delete() assert not schema_exists("tenant_signal") TenantModel.auto_create_schema, TenantModel.auto_drop_schema = backup_create, backup_drop django-pgschemas-1.0.1/sandbox/tests/test_tenant_integration.py000066400000000000000000000223771470131117100250370ustar00rootroot00000000000000from contextlib import contextmanager import pytest from django.apps import apps from django.contrib.auth import authenticate from django.db import ProgrammingError, transaction from django_pgschemas.schema import Schema, deactivate from django_pgschemas.signals import dynamic_tenant_post_sync from django_pgschemas.utils import schema_exists @pytest.fixture def BlogEntryModel(): return apps.get_model("app_blog.BlogEntry") @pytest.fixture def CatalogModel(): return apps.get_model("shared_public.Catalog") @pytest.fixture def MainDataModel(): return apps.get_model("app_main.MainData") @pytest.fixture def UserModel(): return apps.get_model("shared_common.User") @pytest.fixture def TenantDataModel(TenantModel): return apps.get_model("app_tenants.TenantData") if TenantModel is not None else None class ControlledException(Exception): pass @contextmanager def controlled_raises(exception): """ Since we are expecting database errors, we must use savepoints in order to make sure multiple errors can be caught in the same test case. """ sid = transaction.savepoint() with pytest.raises(exception): yield transaction.savepoint_rollback(sid) class TestTenantAutomaticOperations: @pytest.fixture(autouse=True) def _setup(self, TenantModel): if TenantModel is None: pytest.skip("Dynamic tenants are not in use") def test_new_creation_deletion(self, TenantModel, db): assert not schema_exists("new_tenant1") tenant = TenantModel(schema_name="new_tenant1") tenant.save(verbosity=0) assert schema_exists("new_tenant1") tenant.delete(force_drop=True) assert not schema_exists("new_tenant1") def test_existing_creation(self, TenantModel, db): assert not schema_exists("new_tenant1") tenant = TenantModel(schema_name="new_tenant1") tenant.auto_create_schema = False tenant.save(verbosity=0) assert not schema_exists("new_tenant1") tenant.auto_create_schema = True tenant.save(verbosity=0) assert schema_exists("new_tenant1") tenant.delete(force_drop=True) assert not schema_exists("new_tenant1") def test_new_aborted_creation(self, TenantModel, db): def signal_receiver(*args, **kwargs): raise ControlledException assert not schema_exists("new_tenant1") tenant = TenantModel(schema_name="new_tenant1") dynamic_tenant_post_sync.connect(signal_receiver) with pytest.raises(ControlledException): tenant.save(verbosity=0) assert not schema_exists("new_tenant1") assert not TenantModel.objects.filter(schema_name="new_tenant1").exists() dynamic_tenant_post_sync.disconnect(signal_receiver) def test_existing_aborted_creation(self, TenantModel, db): def signal_receiver(*args, **kwargs): raise ControlledException assert not schema_exists("new_tenant1") tenant = TenantModel(schema_name="new_tenant1") tenant.auto_create_schema = False tenant.save(verbosity=0) tenant.auto_create_schema = True dynamic_tenant_post_sync.connect(signal_receiver) with pytest.raises(ControlledException): tenant.save(verbosity=0) assert not schema_exists("new_tenant1") assert TenantModel.objects.filter(schema_name="new_tenant1").exists() dynamic_tenant_post_sync.disconnect(signal_receiver) tenant.delete(force_drop=True) assert not TenantModel.objects.filter(schema_name="new_tenant1").exists() class TestTenantIntegration: @pytest.fixture(autouse=True) def _setup( self, tenant1, CatalogModel, UserModel, MainDataModel, BlogEntryModel, TenantDataModel ): catalog = CatalogModel.objects.create() CatalogModel.objects.create() with Schema.create(schema_name="www"): user = UserModel.objects.create(email="main@localhost", display_name="Main User") user.set_password("weakpassword") user.save() MainDataModel.objects.create() with Schema.create(schema_name="blog"): user = UserModel.objects.create(email="blog@localhost", display_name="Blog User") user.set_password("weakpassword") user.save() BlogEntryModel.objects.create(user=user) if TenantDataModel is not None: with tenant1: user = UserModel.objects.create( email="tenant@localhost", display_name="Tenant User" ) user.set_password("weakpassword") user.save() TenantDataModel.objects.create(user=user, catalog=catalog) def test_migrated_public_apps( self, CatalogModel, UserModel, MainDataModel, BlogEntryModel, TenantDataModel ): deactivate() # Apps expected to be migrated assert CatalogModel.objects.count() == 2 # Apps expected to NOT be migrated with controlled_raises(ProgrammingError): list(UserModel.objects.all()) with controlled_raises(ProgrammingError): list(MainDataModel.objects.all()) with controlled_raises(ProgrammingError): list(BlogEntryModel.objects.all()) if TenantDataModel is not None: with controlled_raises(ProgrammingError): list(TenantDataModel.objects.all()) def test_migrated_main_apps( self, CatalogModel, UserModel, MainDataModel, BlogEntryModel, TenantDataModel ): with Schema.create(schema_name="www"): # Apps expected to be migrated assert CatalogModel.objects.count() == 2 assert UserModel.objects.count() == 1 assert MainDataModel.objects.count() == 1 # Apps expected to NOT be migrated with controlled_raises(ProgrammingError): list(BlogEntryModel.objects.all()) if TenantDataModel is not None: with controlled_raises(ProgrammingError): list(TenantDataModel.objects.all()) def test_migrated_blog_apps( self, CatalogModel, UserModel, MainDataModel, BlogEntryModel, TenantDataModel ): with Schema.create(schema_name="blog"): # Apps expected to be migrated assert CatalogModel.objects.count() == 2 assert UserModel.objects.count() == 1 assert BlogEntryModel.objects.count() == 1 # Direct and reverse relations assert UserModel.objects.first() == BlogEntryModel.objects.first().user assert UserModel.objects.first().blogs.first() == BlogEntryModel.objects.first() # Apps expected to NOT be migrated with controlled_raises(ProgrammingError): list(MainDataModel.objects.all()) if TenantDataModel is not None: with controlled_raises(ProgrammingError): list(TenantDataModel.objects.all()) def test_migrated_tenant_apps( self, tenant1, CatalogModel, UserModel, MainDataModel, BlogEntryModel, TenantDataModel ): if not TenantDataModel: pytest.skip("Dynamic tenants are not in use") with tenant1: # Apps expected to be migrated assert CatalogModel.objects.count() == 2 assert UserModel.objects.count() == 1 assert TenantDataModel.objects.count() == 1 # Direct and reverse relations assert UserModel.objects.first() == TenantDataModel.objects.first().user assert ( UserModel.objects.first().tenant_objects.first() == TenantDataModel.objects.first() ) assert CatalogModel.objects.first() == TenantDataModel.objects.first().catalog assert ( CatalogModel.objects.first().tenant_objects.first() == TenantDataModel.objects.first() ) # Apps expected to NOT be migrated with controlled_raises(ProgrammingError): list(MainDataModel.objects.all()) with controlled_raises(ProgrammingError): list(BlogEntryModel.objects.all()) def test_cross_authentication(self, tenant1, TenantModel): with Schema.create(schema_name="www"): assert authenticate(email="main@localhost", password="weakpassword") assert not authenticate(email="blog@localhost", password="weakpassword") assert not authenticate(email="tenant@localhost", password="weakpassword") with Schema.create(schema_name="blog"): assert not authenticate(email="main@localhost", password="weakpassword") assert authenticate(email="blog@localhost", password="weakpassword") assert not authenticate(email="tenant@localhost", password="weakpassword") if TenantModel is not None: with tenant1: assert not authenticate(email="main@localhost", password="weakpassword") assert not authenticate(email="blog@localhost", password="weakpassword") assert authenticate(email="tenant@localhost", password="weakpassword") with controlled_raises(ProgrammingError): authenticate(email="irrelevant@localhost", password="irrelevant") django-pgschemas-1.0.1/sandbox/tests/test_tenant_views.py000066400000000000000000000062771470131117100236520ustar00rootroot00000000000000import pytest from django.apps import apps from django.test import Client from django_pgschemas.routing.info import DomainInfo from django_pgschemas.schema import Schema @pytest.fixture def UserModel(): return apps.get_model("shared_common.User") @pytest.fixture(autouse=True) def _setup(UserModel, db): with Schema.create("www"): UserModel.objects.create(email="user_www@localhost", display_name="Admin") with Schema.create("blog"): UserModel.objects.create(email="user_blog@localhost", display_name="Admin") @pytest.fixture def _setup_dynamic(tenant1, UserModel, DomainModel): if DomainModel is None: pytest.skip("Domain model is not in use") DomainModel.objects.create(tenant=tenant1, domain="tenant1.localhost", is_primary=True) DomainModel.objects.create(tenant=tenant1, domain="everyone.localhost", folder="tenant1") with tenant1: yield UserModel.objects.create(email="user1@localhost", display_name="Admin") @pytest.mark.parametrize( "url, expected_status", [ ("/", 200), ("/register/", 200), ("/admin/", 302), ("/non-existing/", 404), ], ) def test_views_www(url, expected_status): client = Client(headers={"host": "localhost"}) response = client.get(url) assert response.status_code == expected_status @pytest.mark.parametrize( "url, expected_status", [ ("/", 200), ("/entries/", 200), ("/admin/", 302), ("/non-existing/", 404), ], ) def test_views_blog(url, expected_status): client = Client(headers={"host": "blog.localhost"}) response = client.get(url) assert response.status_code == expected_status @pytest.mark.parametrize( "url, expected_status", [ ("/", 200), ("/profile/", 200), ("/profile/advanced/", 302), ("/login/", 200), ("/admin/", 302), ("/non-existing/", 404), ], ) def test_tenants_domain(url, expected_status, _setup_dynamic): user = _setup_dynamic client = Client(headers={"host": "tenant1.localhost"}) response = client.get(url) assert response.status_code == expected_status if expected_status == "200": assert response.context == { "path": url, "user": user, "schema": "tenant1", "routing": DomainInfo(domain="tenant1.localhost", folder=""), "admin_url": "/admin/", } @pytest.mark.parametrize( "url, expected_status", [ ("/tenant1/", 200), ("/tenant1/profile/", 200), ("/tenant1/profile/advanced/", 302), ("/tenant1/login/", 200), ("/tenant1/admin/", 302), ("/tenant1/non-existing/", 404), ], ) def test_tenants_folder(url, expected_status, _setup_dynamic): user = _setup_dynamic client = Client(headers={"host": "everyone.localhost"}) response = client.get(url) assert response.status_code == expected_status if expected_status == "200": assert response.context == { "path": url, "user": user, "schema": "tenant1", "routing": DomainInfo(domain="everyone.localhost", folder="tenant1"), "admin_url": "/tenant1/admin/", } django-pgschemas-1.0.1/sandbox/tests/test_utils.py000066400000000000000000000123231470131117100222710ustar00rootroot00000000000000import pytest from django.core.exceptions import ValidationError from django.db import connection from django.db.utils import DatabaseError from django_pgschemas import schema, utils VALID_IDENTIFIERS = ["___", "a_a0", "_a0_", "a" * 63] INVALID_IDENTIFIERS = ["", " ", "^", ".", "&", "{", "(", "@", "!", "a" * 64] VALID_SCHEMA_NAMES = ["a_pg", "w_pg_a", "_pg_awa", "pgwa"] + VALID_IDENTIFIERS INVALID_SCHEMA_NAMES = ["pg_a", "pg_"] + INVALID_IDENTIFIERS def test_get_tenant_model(tenants_settings): TenantModel = utils.get_tenant_model() if "default" in tenants_settings: assert TenantModel is not None assert TenantModel._meta.model_name == "tenant" else: assert TenantModel is None def test_get_domain_model(tenants_settings): DomainModel = utils.get_domain_model() if "default" in tenants_settings and "DOMAIN_MODEL" in tenants_settings["default"]: assert DomainModel is not None assert DomainModel._meta.model_name == "domain" else: assert DomainModel is None @pytest.mark.parametrize("has_value", [True, False]) def test_get_tenant_database_alias(settings, has_value): if has_value: settings.PGSCHEMAS_TENANT_DB_ALIAS = "something" assert utils.get_tenant_database_alias() == "something" else: assert utils.get_tenant_database_alias() == "default" @pytest.mark.parametrize("has_value", [True, False]) def test_get_limit_set_calls(settings, has_value): if has_value: settings.PGSCHEMAS_LIMIT_SET_CALLS = True assert utils.get_limit_set_calls() else: assert not utils.get_limit_set_calls() def test_get_clone_reference(tenants_settings): clone_reference = utils.get_clone_reference() if "default" in tenants_settings: assert clone_reference == "sample" else: assert clone_reference is None @pytest.mark.parametrize( "identifier, is_valid", [(identifier, True) for identifier in VALID_IDENTIFIERS] + [(identifier, False) for identifier in INVALID_IDENTIFIERS], ) def test_is_valid_identifier(identifier, is_valid): assert utils.is_valid_identifier(identifier) == is_valid @pytest.mark.parametrize( "name, is_valid", [(name, True) for name in VALID_SCHEMA_NAMES] + [(name, False) for name in INVALID_SCHEMA_NAMES], ) def test_is_valid_schema_name(name, is_valid): assert utils.is_valid_schema_name(name) == is_valid @pytest.mark.parametrize( "name, is_valid", [(name, True) for name in VALID_SCHEMA_NAMES] + [(name, False) for name in INVALID_SCHEMA_NAMES], ) def test_check_schema_name(name, is_valid): if is_valid: utils.check_schema_name(name) else: with pytest.raises(ValidationError): utils.check_schema_name(name) def test_run_in_public_schema(db): @utils.run_in_public_schema def inner(): with connection.cursor() as cursor: cursor.execute("SHOW search_path") assert cursor.fetchone() == ("public",) with schema.Schema.create(schema_name="test"): inner() with connection.cursor() as cursor: cursor.execute("SHOW search_path") cursor.fetchone() == ("test, public",) def test_schema_exists(db): assert utils.schema_exists("public") assert utils.schema_exists("www") assert utils.schema_exists("blog") assert not utils.schema_exists("default") def test_dynamic_models_exist(tenants_settings, db): if "default" in tenants_settings: assert utils.dynamic_models_exist() else: assert not utils.dynamic_models_exist() utils.drop_schema("public") assert not utils.dynamic_models_exist() def test_create_drop_schema(db): assert not utils.create_schema("public", check_if_exists=True) # Schema existed already assert utils.schema_exists("public") # Schema exists assert utils.drop_schema("public") # Schema was dropped assert not utils.drop_schema("public") # Schema no longer exists assert not utils.schema_exists("public") # Schema doesn't exist assert utils.create_schema("public", sync_schema=False) # Schema was created assert utils.schema_exists("public") # Schema exists def test_clone_schema(db): utils._create_clone_schema_function() assert not utils.schema_exists("sample2") # Schema doesn't exist previously utils.clone_schema("sample", "sample2", dry_run=True) # Dry run assert not utils.schema_exists("sample2") # Schema won't exist, dry run utils.clone_schema("sample", "sample2") # Real run, schema was cloned assert utils.schema_exists("sample2") # Schema exists with pytest.raises(DatabaseError): utils.clone_schema("sample", "sample2") # Schema already exists, error assert utils.schema_exists("sample2") # Schema still exists def test_create_or_clone_schema(db): assert not utils.create_or_clone_schema("sample") # Schema existed @pytest.mark.parametrize( "path, expected", [ ("", ""), ("www", "www"), ("www.", ""), ("www.test.com", "test.com"), ("www.test.com/complex/path", "test.com/complex/path"), ("1www.test.com", "1www.test.com"), ], ) def test_remove_www(path, expected): actual = utils.remove_www(path) assert actual == expected django-pgschemas-1.0.1/sandbox/views.py000066400000000000000000000005611470131117100200660ustar00rootroot00000000000000from django.shortcuts import render from django.urls import reverse def generic(request): context = { "path": request.get_full_path(), "user": request.user, "schema": request.tenant.schema_name, "routing": request.tenant.routing, "admin_url": reverse("admin:index"), } return render(request, "index.html", context) django-pgschemas-1.0.1/sandbox/wsgi.py000066400000000000000000000002501470131117100176750ustar00rootroot00000000000000import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sandbox.settings") application = get_wsgi_application()