pax_global_header00006660000000000000000000000064146363356650014533gustar00rootroot0000000000000052 comment=394c8f4c97ec3d7c576a1af6cd3de81c003380af django-pgschemas-0.15.2/000077500000000000000000000000001463633566500150325ustar00rootroot00000000000000django-pgschemas-0.15.2/.dockerignore000066400000000000000000000001211463633566500175000ustar00rootroot00000000000000.git .github .mypy_cache .postgres .ruff_cache .venv .vscode .coverage .DS_Store django-pgschemas-0.15.2/.editorconfig000066400000000000000000000002531463633566500175070ustar00rootroot00000000000000root = true [*] end_of_line = lf insert_final_newline = true [*.{toml,rst,yml,sh,py}] charset = utf-8 indent_size = 4 indent_style = space trim_trailing_whitespace = true django-pgschemas-0.15.2/.flake8000066400000000000000000000001061463633566500162020ustar00rootroot00000000000000[flake8] max-line-length = 120 ignore = B007,B305,E203,E731,F405,W503 django-pgschemas-0.15.2/.github/000077500000000000000000000000001463633566500163725ustar00rootroot00000000000000django-pgschemas-0.15.2/.github/FUNDING.yml000066400000000000000000000012151463633566500202060ustar00rootroot00000000000000# These are supported funding model platforms github: [lorinkoz] #patreon: # Replace with a single Patreon username #open_collective: # Replace with a single Open Collective username #ko_fi: # Replace with a single Ko-fi username #tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel #community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry #liberapay: # Replace with a single Liberapay username #issuehunt: # Replace with a single IssueHunt username #otechie: # Replace with a single Otechie username #custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] django-pgschemas-0.15.2/.github/workflows/000077500000000000000000000000001463633566500204275ustar00rootroot00000000000000django-pgschemas-0.15.2/.github/workflows/code.yaml000066400000000000000000000044211463633566500222260ustar00rootroot00000000000000name: code on: pull_request: push: branches: - master paths: - pyproject.toml - poetry.lock - "**.py" jobs: django-tests: runs-on: ubuntu-latest strategy: max-parallel: 4 matrix: python-version: ["3.8", "3.9", "3.10", "3.11"] django-version: ["~=4.0.0", "~=4.1.0", "~=4.2.0"] psycopg-version: ["psycopg", "psycopg2"] exclude: - django-version: "~=4.0.0" psycopg-version: "psycopg" - django-version: "~=4.1.0" psycopg-version: "psycopg" services: postgres: image: postgres:latest env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: dpgs_sandbox ports: - 5432:5432 options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install Dependencies run: poetry install - name: Install Django specific version ${{ matrix.django-version }} run: poetry run pip install "Django${{ matrix.django-version }}" - name: Install psycopg specific version ${{ matrix.psycopg-version }} run: poetry run pip install ${{ matrix.psycopg-version }} - name: Run Tests run: | poetry run coverage run dpgs_sandbox/manage.py test tests -r poetry run dpgs_sandbox/manage.py test tests --settings settings.static_only -r poetry run coverage lcov -o ./coverage/lcov.info - name: Upload coverage to Coveralls in parallel uses: coverallsapp/github-action@master with: github-token: ${{ secrets.GITHUB_TOKEN }} flag-name: run-py${{ matrix.python-version }}-Django${{ matrix.django-version }} parallel: true finish: needs: django-tests runs-on: ubuntu-latest steps: - name: Finish report to Coveralls uses: coverallsapp/github-action@master with: github-token: ${{ secrets.GITHUB_TOKEN }} parallel-finished: true django-pgschemas-0.15.2/.github/workflows/deploy.yaml000066400000000000000000000011461463633566500226110ustar00rootroot00000000000000name: deploy on: push: tags: - "v*" jobs: build-n-publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.x" - name: Install Dependencies run: poetry install - name: Build Package run: poetry build - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@master with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} django-pgschemas-0.15.2/.github/workflows/linters.yaml000066400000000000000000000006431463633566500227760ustar00rootroot00000000000000name: linters on: [push] jobs: pre-commit: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.x" - name: Install Dependencies run: poetry install - name: Run linters uses: pre-commit/action@v2.0.0 django-pgschemas-0.15.2/.github/workflows/postgres.yaml000066400000000000000000000022621463633566500231630ustar00rootroot00000000000000name: postgres on: pull_request: push: branches: - master paths: - pyproject.toml - poetry.lock - "**.py" jobs: postgres-version: runs-on: ubuntu-latest strategy: max-parallel: 4 matrix: postgres-version: [12, 13, 14, 15] psycopg-version: ["psycopg", "psycopg2"] services: postgres: image: postgres:${{ matrix.postgres-version }} env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: dpgs_sandbox ports: - 5432:5432 options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 steps: - uses: actions/checkout@v3 - name: Install poetry run: pipx install poetry - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.x" - name: Install Dependencies run: poetry install - name: Install psycopg specific version ${{ matrix.psycopg-version }} run: poetry run pip install ${{ matrix.psycopg-version }} - name: Run Tests run: poetry run coverage run dpgs_sandbox/manage.py test tests django-pgschemas-0.15.2/.gitignore000066400000000000000000000024051463633566500170230ustar00rootroot00000000000000# MacOS .DS_Store # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ # vscode .vscode/ # local only by design setup.py .postgres/ django-pgschemas-0.15.2/.pre-commit-config.yaml000066400000000000000000000015111463633566500213110ustar00rootroot00000000000000repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.4.0 hooks: - id: check-added-large-files - id: check-case-conflict - id: check-merge-conflict - id: check-symlinks - id: check-toml - id: check-yaml - id: end-of-file-fixer - id: mixed-line-ending args: ["--fix=lf"] - id: trailing-whitespace - repo: local hooks: - id: ruff name: ruff entry: bash -c 'poetry run ruff check --fix $0 $@' language: system types: [python] - id: black name: black entry: bash -c 'poetry run black $0 $@' language: system types: [python] - repo: https://github.com/adamchainz/django-upgrade rev: "1.13.0" hooks: - id: django-upgrade args: [--target-version, "4.2"] django-pgschemas-0.15.2/.readthedocs.yml000066400000000000000000000001721463633566500201200ustar00rootroot00000000000000version: 2 python: version: "3.8" install: - requirements: docs/requirements.txt - method: pip path: . django-pgschemas-0.15.2/CODE_OF_CONDUCT.md000066400000000000000000000064261463633566500176410ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at lorinkoz@gmail.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq django-pgschemas-0.15.2/Dockerfile000066400000000000000000000005671463633566500170340ustar00rootroot00000000000000FROM python:3.10.4-slim WORKDIR /app RUN apt update && \ apt install -y gcc libpq-dev python3-dev libffi-dev musl-dev python3-pip && \ apt autoremove -y && apt autoclean -y ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONUNBUFFERED 1 ENV POETRY_VIRTUALENVS_CREATE 0 RUN pip install poetry gunicorn uvicorn COPY . . RUN poetry install RUN poetry run pip install psycopg django-pgschemas-0.15.2/LICENSE000066400000000000000000000020561463633566500160420ustar00rootroot00000000000000MIT License Copyright (c) 2020 Lorenzo Peña Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. django-pgschemas-0.15.2/Makefile000066400000000000000000000007511463633566500164750ustar00rootroot00000000000000# Makefile for django-pgschemas .PHONY: test test: poetry run dpgs_sandbox/manage.py test tests .PHONY: coverage coverage: poetry run coverage run dpgs_sandbox/manage.py test tests .PHONY: coverage-html coverage-html: poetry run coverage run dpgs_sandbox/manage.py test tests && poetry run coverage html .PHONY: update-clone-schema update-clone-schema: curl https://raw.githubusercontent.com/denishpatel/pg-clone-schema/master/clone_schema.sql -o django_pgschemas/clone_schema.sql django-pgschemas-0.15.2/README.rst000066400000000000000000000052171463633566500165260ustar00rootroot00000000000000django-pgschemas ================ .. image:: https://img.shields.io/badge/packaging-poetry-purple.svg :alt: Packaging: poetry :target: https://github.com/sdispater/poetry .. image:: https://img.shields.io/badge/code%20style-black-black.svg :alt: Code style: black :target: https://github.com/ambv/black .. image:: https://github.com/lorinkoz/django-pgschemas/workflows/code/badge.svg :alt: Build status :target: https://github.com/lorinkoz/django-pgschemas/actions .. image:: https://readthedocs.org/projects/django-pgschemas/badge/?version=latest :alt: Documentation status :target: https://django-pgschemas.readthedocs.io/ .. image:: https://coveralls.io/repos/github/lorinkoz/django-pgschemas/badge.svg?branch=master :alt: Code coverage :target: https://coveralls.io/github/lorinkoz/django-pgschemas?branch=master .. image:: https://badge.fury.io/py/django-pgschemas.svg :alt: PyPi version :target: http://badge.fury.io/py/django-pgschemas .. image:: https://pepy.tech/badge/django-pgschemas/month :alt: Downloads :target: https://pepy.tech/project/django-pgschemas/ | This app uses PostgreSQL schemas to support data multi-tenancy in a single Django project. It is a fork of `django-tenants`_ with some conceptual changes: - There are static tenants and dynamic tenants. Static tenants can have their own apps and urlconf. - Tenants can be simultaneously routed via subdomain and via subfolder on shared subdomain. - Public is no longer the schema for storing the main site data. Public should be used only for true shared data across all tenants. Table "overriding" via search path is no longer encouraged. - Management commands can be run on multiple schemas via wildcards - the multiproc behavior of migrations was extended to just any tenant command. .. _django-tenants: https://github.com/tomturner/django-tenants Documentation ------------- https://django-pgschemas.readthedocs.io/ Contributing ------------ - Join the discussion at https://github.com/lorinkoz/django-pgschemas/discussions. - PRs are welcome! If you have questions or comments, please use the discussions link above. - To run the test suite run ``make`` or ``make coverage``. The tests for this project live inside a small django project called ``dpgs_sandbox``. Database password and database host can be set through the environment variables ``DATABASE_PASSWORD`` and ``DATABASE_HOST``. Credits ------- * Tom Turner for `django-tenants`_. * Bernardo Pires for `django-tenant-schemas`_. .. _django-tenants: https://github.com/tomturner/django-tenants .. _django-tenant-schemas: https://github.com/bernardopires/django-tenant-schemas django-pgschemas-0.15.2/SECURITY.md000066400000000000000000000012101463633566500166150ustar00rootroot00000000000000# Security Policy ## Supported Versions | Version | Supported | | ------- | ------------------ | | < 1.0 | :white_check_mark: | As soon as we reach a stable version, we will not support beta releases anymore. ## Reporting a Vulnerability In order to report a vulnerability, please DO NOT create an issue on this repository. Instead, write an email to lorinkoz@gmail.com with full details of the finding. Expect a response in 24 to 48 hours. If you report is accepted, we will work to publish a patch as soon as possible. We will also provide an advisory covering the details of the vulnerability, as well as the affected versions. django-pgschemas-0.15.2/compose.yaml000066400000000000000000000020561463633566500173660ustar00rootroot00000000000000version: "3.9" services: postgres: image: postgres:13-alpine environment: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: dpgs_sandbox ports: - 5432:5432 volumes: - ./.postgres:/var/lib/postgresql/data healthcheck: test: ["CMD", "pg_isready", "-U", "postgres"] build_image: image: app build: . working_dir: /app/dpgs_sandbox environment: DATABASE_HOST: postgres command: python3 manage.py migrate depends_on: - postgres app_wsgi: image: app working_dir: /app/dpgs_sandbox environment: DATABASE_HOST: postgres command: gunicorn wsgi:application --bind 0.0.0.0:8001 ports: - 8001:8001 depends_on: - build_image - postgres app_asgi: image: app working_dir: /app/dpgs_sandbox environment: DATABASE_HOST: postgres command: gunicorn asgi:application --bind 0.0.0.0:8002 -k uvicorn.workers.UvicornWorker ports: - 8002:8002 depends_on: - build_image - postgres django-pgschemas-0.15.2/django_pgschemas/000077500000000000000000000000001463633566500203265ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/__init__.py000066400000000000000000000003631463633566500224410ustar00rootroot00000000000000from .schema import ( SchemaDescriptor, activate, activate_public, deactivate, get_current_schema, ) __all__ = [ "SchemaDescriptor", "activate", "activate_public", "deactivate", "get_current_schema", ] django-pgschemas-0.15.2/django_pgschemas/apps.py000066400000000000000000000115461463633566500216520ustar00rootroot00000000000000from django.apps import AppConfig from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db import connection from .utils import get_tenant_model, is_valid_schema_name class DjangoPGSchemasConfig(AppConfig): name = "django_pgschemas" verbose_name = "Django PostgreSQL Schemas" def _check_tenant_dict(self): if not isinstance(getattr(settings, "TENANTS", None), dict): raise ImproperlyConfigured("TENANTS dict setting not set.") def _check_public_schema(self): if not isinstance(settings.TENANTS.get("public"), dict): raise ImproperlyConfigured("TENANTS must contain a 'public' dict.") if "URLCONF" in settings.TENANTS["public"]: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'URLCONF' key.") if "WS_URLCONF" in settings.TENANTS["public"]: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'WS_URLCONF' key.") if "DOMAINS" in settings.TENANTS["public"]: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'DOMAINS' key.") if "FALLBACK_DOMAINS" in settings.TENANTS["public"]: raise ImproperlyConfigured("TENANTS['public'] cannot contain a 'FALLBACK_DOMAINS' key.") def _check_default_schemas(self): if "default" not in settings.TENANTS: return # Escape hatch for static only configs if not isinstance(settings.TENANTS["default"], dict): raise ImproperlyConfigured("TENANTS must contain a 'default' dict.") if "TENANT_MODEL" not in settings.TENANTS["default"]: raise ImproperlyConfigured("TENANTS['default'] must contain a 'TENANT_MODEL' key.") if "DOMAIN_MODEL" not in settings.TENANTS["default"]: raise ImproperlyConfigured("TENANTS['default'] must contain a 'DOMAIN_MODEL' key.") if "URLCONF" not in settings.TENANTS["default"]: raise ImproperlyConfigured("TENANTS['default'] must contain a 'URLCONF' key.") if "DOMAINS" in settings.TENANTS["default"]: raise ImproperlyConfigured("TENANTS['default'] cannot contain a 'DOMAINS' key.") if "FALLBACK_DOMAINS" in settings.TENANTS["default"]: raise ImproperlyConfigured( "TENANTS['default'] cannot contain a 'FALLBACK_DOMAINS' key." ) if ( "CLONE_REFERENCE" in settings.TENANTS["default"] and settings.TENANTS["default"]["CLONE_REFERENCE"] in settings.TENANTS ): raise ImproperlyConfigured( "TENANTS['default']['CLONE_REFERENCE'] must be a unique schema name." ) def _check_overall_schemas(self): for schema in settings.TENANTS: if schema not in ["public", "default"]: if not is_valid_schema_name(schema): raise ImproperlyConfigured("'%s' is not a valid schema name." % schema) if not isinstance(settings.TENANTS[schema].get("DOMAINS"), list): raise ImproperlyConfigured( "TENANTS['%s'] must contain a 'DOMAINS' list." % schema ) def _check_complementary_settings(self): if "django_pgschemas.routers.SyncRouter" not in settings.DATABASE_ROUTERS: raise ImproperlyConfigured( "DATABASE_ROUTERS setting must contain 'django_pgschemas.routers.SyncRouter'." ) def _check_extra_search_paths(self): if hasattr(settings, "PGSCHEMAS_EXTRA_SEARCH_PATHS"): TenantModel = get_tenant_model() if TenantModel is None: return cursor = connection.cursor() cursor.execute( "SELECT 1 FROM information_schema.tables WHERE table_name = %s;", [TenantModel._meta.db_table], ) dynamic_tenants = [] if "CLONE_REFERENCE" in settings.TENANTS["default"]: dynamic_tenants.append(settings.TENANTS["default"]["CLONE_REFERENCE"]) if cursor.fetchone(): dynamic_tenants += list( TenantModel.objects.all().values_list("schema_name", flat=True) ) cursor.close() invalid_schemas = set(settings.PGSCHEMAS_EXTRA_SEARCH_PATHS).intersection( set(settings.TENANTS.keys()).union(dynamic_tenants) ) if invalid_schemas: raise ImproperlyConfigured( "Do not include '%s' on PGSCHEMAS_EXTRA_SEARCH_PATHS." % ", ".join(invalid_schemas) ) def ready(self): from . import checks # noqa self._check_tenant_dict() self._check_public_schema() self._check_default_schemas() self._check_overall_schemas() self._check_complementary_settings() self._check_extra_search_paths() django-pgschemas-0.15.2/django_pgschemas/checks.py000066400000000000000000000132211463633566500221370ustar00rootroot00000000000000from typing import Any, Optional from django.conf import settings from django.contrib.auth import get_user_model from django.contrib.sessions.base_session import AbstractBaseSession from django.core import checks from django.core.exceptions import ImproperlyConfigured from django.db.utils import ProgrammingError from django.utils.module_loading import import_module from .utils import get_clone_reference, get_domain_model, get_tenant_model def get_tenant_app() -> Optional[str]: TenantModel = get_tenant_model(require_ready=False) if TenantModel is None: return None return TenantModel._meta.app_config.name def get_domain_app() -> Optional[str]: DomainModel = get_domain_model(require_ready=False) if DomainModel is None: return None return DomainModel._meta.app_config.name def get_user_app() -> Optional[str]: try: return get_user_model()._meta.app_config.name except ImproperlyConfigured: return None def get_session_app() -> Optional[str]: engine = import_module(settings.SESSION_ENGINE) store = engine.SessionStore if hasattr(store, "get_model_class"): session_model = store.get_model_class() if issubclass(session_model, AbstractBaseSession): return session_model._meta.app_config.name return None @checks.register() def check_principal_apps(app_configs: Any, **kwargs: Any) -> None: errors = [] tenant_app = get_tenant_app() domain_app = get_domain_app() if tenant_app is None or domain_app is None: return [] if tenant_app not in settings.TENANTS["public"].get("APPS", []): errors.append( checks.Error( "Your tenant app '%s' must be on the 'public' schema." % tenant_app, id="pgschemas.W001", ) ) if domain_app not in settings.TENANTS["public"].get("APPS", []): errors.append( checks.Error( "Your domain app '%s' must be on the 'public' schema." % domain_app, id="pgschemas.W001", ) ) for schema in settings.TENANTS: schema_apps = settings.TENANTS[schema].get("APPS", []) if schema == "public": continue if tenant_app in schema_apps: errors.append( checks.Error( "Your tenant app '%s' in TENANTS['%s']['APPS'] must be on the 'public' schema only." % (tenant_app, schema), id="pgschemas.W001", ) ) if domain_app in schema_apps: errors.append( checks.Error( "Your domain app '%s' in TENANTS['%s']['APPS'] must be on the 'public' schema only." % (domain_app, schema), id="pgschemas.W001", ) ) return errors @checks.register() def check_other_apps(app_configs: Any, **kwargs: Any) -> None: errors = [] user_app = get_user_app() session_app = get_session_app() if "django.contrib.contenttypes" in settings.TENANTS.get("default", {}).get("APPS", []): errors.append( checks.Warning( "'django.contrib.contenttypes' in TENANTS['default']['APPS'] must be on 'public' schema only.", id="pgschemas.W002", ) ) for schema in settings.TENANTS: schema_apps = settings.TENANTS[schema].get("APPS", []) if schema not in ["public", "default"]: if "django.contrib.contenttypes" in schema_apps: errors.append( checks.Warning( "'django.contrib.contenttypes' in TENANTS['%s']['APPS'] must be on 'public' schema only." % schema, id="pgschemas.W002", ) ) if user_app and session_app: if session_app in schema_apps and user_app not in schema_apps: errors.append( checks.Warning( "'%s' must be together with '%s' in TENANTS['%s']['APPS']." % (user_app, session_app, schema), id="pgschemas.W003", ) ) elif ( user_app in schema_apps and session_app not in schema_apps and session_app in settings.INSTALLED_APPS ): errors.append( checks.Warning( "'%s' must be together with '%s' in TENANTS['%s']['APPS']." % (session_app, user_app, schema), id="pgschemas.W003", ) ) return errors @checks.register(checks.Tags.database) def check_schema_names(app_configs: Any, **kwargs: Any) -> None: errors = [] static_names = set(settings.TENANTS.keys()) clone_reference = get_clone_reference() TenantModel = get_tenant_model() if TenantModel is None: return [] if clone_reference: static_names.add(clone_reference) try: dynamic_names = set(TenantModel.objects.values_list("schema_name", flat=True)) except ProgrammingError: # This happens on the first run of migrate, with empty database. # It can also happen when the tenant model contains unapplied migrations that break. dynamic_names = set() intersection = static_names & dynamic_names if intersection: errors.append( checks.Critical( "Name clash found between static and dynamic tenants: %s" % intersection, id="pgschemas.W004", ) ) return errors django-pgschemas-0.15.2/django_pgschemas/clone_schema.sql000066400000000000000000003313221463633566500234730ustar00rootroot00000000000000-- Change History: -- 2021-03-03 MJV FIX: Fixed population of tables with rows section. "buffer" variable was not initialized correctly. Used new variable, tblname, to fix it. -- 2021-03-03 MJV FIX: Fixed Issue#34 where user-defined types in declare section of functions caused runtime errors. -- 2021-03-04 MJV FIX: Fixed Issue#35 where privileges for functions were not being set correctly causing the program to bomb and giving privileges to other users that should not have gotten them. -- 2021-03-05 MJV FIX: Fixed Issue#36 Fixed table and other object permissions -- 2021-03-05 MJV FIX: Fixed Issue#37 Fixed function grants again for case where parameters have default values. -- 2021-03-08 MJV FIX: Fixed Issue#38 fixed issue where source schema specified for executed trigger function action -- 2021-03-08 MJV FIX: Fixed Issue#39 Add warnings for table columns that are user-defined since the probably refer back to the source schema! No fix for it at this time. -- 2021-03-09 MJV FIX: Fixed Issue#40 Rewrote trigger SQL instead to simply things for all cases -- 2021-03-19 MJV FIX: Fixed Issue#39 Added new function to generate table ddl instead of using the CREATE TABLE LIKE statement only for use cases with user-defined column datatypes. -- 2021-04-02 MJV FIX: Fixed Issue#43 Fixed views case where view was created successfully in target schema, but referenced table was not. -- 2021-06-30 MJV FIX: Fixed Issue#46 Invalid record reference, tbl_ddl. Changed to tbl_dcl in PRIVS section. -- 2021-06-30 MJV FIX: Fixed Issue#46 Invalid record reference, tbl_ddl. Changed to tbl_dcl in PRIVS section. Thanks to dpmillerau for this fix. -- 2021-07-21 MJV FIX: Fixed Issue#47 Fixed resetting search path to what it was before. Thanks to dpmillerau for this fix. -- 2022-03-01 MJV FIX: Fixed Issue#61 Fixed more search_path problems. Modified get_table_ddl() to hard code search_path to public. Using set_config() for empty string instead of trying to set empty string directly and incorrectly. -- 2022-03-01 MJV FIX: Fixed Issue#62 Added comments for indexes only (Thanks to @guignonv). Still need to add comments for other objects. -- 2022-03-24 MJV FIX: Fixed Issue#63 Use last used value for sequence not the start value -- 2022-03-24 MJV FIX: Fixed Issue#59 Implement Rules -- 2022-03-26 MJV FIX: Fixed Issue#65 Check column availability in selecting query to use for pg_proc table. Also do some explicit datatype mappings for certain aggregate functions. Also fixed inheritance derived tables. -- 2022-03-31 MJV FIX: Fixed Issue#66 Implement Security Policies for RLS -- 2022-04-02 MJV FIX: Fixed Issue#62 Fixed all comments and reworked the way we generate index comments by @guignonv -- 2022-04-02 MJV FIX: Fixed Issue#67 Reworked get_table_ddl() so we are not dependent on outside function, pg_get_tabledef(). -- 2022-04-02 MJV FIX: Fixed Issue#42 Fixed copying rows logic with exception of tables with user-defined datatypes in them that have to be done manually, documented in README. -- 2022-05-01 MJV FIX: Fixed Issue#53 Applied coding style fixes, using pgFormatter as basis for SQL. -- 2022-05-02 MJV FIX: Fixed Issue#72 Remove original schema references from materialized view definition -- 2022-05-14 MJV FIX: Fixed Issue#73 Fix dependency order for views depending on other views. Also removed duplicate comment logic for views. -- 2022-06-12 MJV FIX: Fixed Issue#74 Change comments ddl from source_scshema to dest_schema. Policies fix using quote_literal(d.description) instead of hard-coded ticks and escape ticks. -- 2022-06-13 MJV FIX: Fixed Issue#75 Rows were not being copied correctly for parents. Needed to move copy rows logic to end, after all DDL is done. -- 2022-06-15 MJV FIX: Fixed Issue#76 RLS is not being enabled for cloned tables. Enable it right after the policy for the table is created -- 2022-06-16 MJV FIX: Fixed Issue#78 Fix case-sensitive object names by using quote_ident() all over the place. Also added restriction to not allow case-sensitive target schemas. -- 2022-06-16 MJV FIX: Fixed Issue#78 Also, since we deferred row copies until the end, we must also defer foreign key constraints to the end as well. -- 2022-06-18 MJV FIX: Fixed Issue#79 Fix copying of rows in tables with user-defined column datatypes using COPY method. -- 2022-06-29 MJV FIX: Fixed Issue#80 Fix copying of rows reported error due to arrays not being initialized properly. -- 2022-07-15 MJV FIX: Fixed Issue#81 Fix COPY import format for handling NULLs correctly. -- 2022-09-16 MJV FIX: Fixed Issue#82 Set search_path to public when creating user-defined columns in tables to handle public datatypes like PostGIS. Also fixed a bug in DDL only mode. -- 2022-09-19 MJV FIX: Fixed Issue#83 Tables with CONSTRAINT DEFs are duplicated as CREATE INDEX statements. Removed CREATE INDEX statements if already defined as CONSTRAINTS. -- 2022-09-27 MJV FIX: Fixed Issue#85 v13 postgres needs stricter type casting than v14 -- 2022-09-29 MJV FIX: Fixed Issue#86 v12+ handle generated columns by not trying to insert rows into them -- 2022-09-29 MJV FIX: Fixed Issue#87 v10 requires double quotes around collation name, 11+ doesnt care -- 2022-12-02 MJV FIX: Fixed Issue#90 Clone functions before views to avoid cloning error for views that call functions. -- 2022-12-02 MJV FIX: Fixed Issue#91 Fix ownership of objects. Currently it is defaulting to the one running this script. Let it be the same owner as the source schema to preserve access control. -- 2022-12-02 MJV FIX: Fixed Issue#92 Default privileges error: Must set the role before executing the command. -- 2022-12-03 MJV FIX: Fixed Issue#94 Make parameters variadic -- 2022-12-04 MJV FIX: Fixed Issue#96 PG15 may not populate the collcollate and collctype columns of the pg_collation table. Handle this. -- 2022-12-04 MJV FIX: Fixed Issue#97 Regression testing: invalid CASE STATEMENT syntax found. PG13 is stricter than PG14 and up. Remove CASE from END CASE to terminate CASE statements. -- 2022-12-05 MJV FIX: Fixed Issue#95 Implemented owner/ACL rules. -- 2022-12-06 MJV FIX: Fixed Issue#98 Materialized Views are not populated because they are created before the regular tables are populated. Defer until after tables are populated. -- 2022-12-07 MJV FIX: Fixed Issue#99 Tables and indexes should mimic the same tablespace used in the source schema. Only indexes where doing this. Fixed now so both use the same source tablespace. -- 2022-12-22 MJV FIX: Fixed Issue#100 Fixed case for user-defined type in public schema not handled: citext. See #82 issue that missed this one. -- 2022-12-22 MJV FIX: Fixed Issue#101 Enhancement: More debugging info, exceptions print out version. do $$ <> DECLARE cnt int; BEGIN SELECT count(*) into cnt FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el WHERE el.oid = t.typelem AND el.typarray = t.oid) AND n.nspname <> 'pg_catalog' AND n.nspname <> 'information_schema' AND pg_catalog.pg_type_is_visible(t.oid) AND pg_catalog.format_type(t.oid, NULL) in ('cloneparms'); IF cnt = 0 THEN RAISE NOTICE 'Creating custom types.'; CREATE TYPE public.cloneparms AS ENUM ('DATA', 'NODATA','DDLONLY','NOOWNER','NOACL','VERBOSE','DEBUG'); END IF; end first_block $$; -- SELECT * FROM public.get_table_ddl('sample', 'address', True); CREATE OR REPLACE FUNCTION public.get_table_ddl( in_schema varchar, in_table varchar, bfkeys boolean ) RETURNS text LANGUAGE plpgsql VOLATILE AS $$ DECLARE -- the ddl we're building v_table_ddl text; -- data about the target table v_table_oid int; -- records for looping v_colrec record; v_constraintrec record; v_indexrec record; v_primary boolean := False; v_constraint_name text; v_src_path_old text := ''; v_src_path_new text := ''; v_dummy text; v_partbound text; v_pgversion int; v_parent text := ''; v_relopts text := ''; v_tablespace text; v_partition_key text := ''; v_temp text; bPartitioned bool := False; bInheritance bool := False; bRelispartition bool; constraintarr text[] := '{}'; constraintelement text; bSkip boolean; BEGIN SELECT c.oid, ( SELECT setting FROM pg_settings WHERE name = 'server_version_num') INTO v_table_oid, v_pgversion FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind IN ('r', 'p') AND c.relname = in_table AND n.nspname = in_schema; IF (v_table_oid IS NULL) THEN RAISE EXCEPTION 'table does not exist'; END IF; -- get user-defined tablespaces if applicable SELECT TABLESPACE INTO v_temp FROM pg_tables WHERE schemaname = in_schema AND tablename = in_table AND TABLESPACE IS NOT NULL; -- Issue#99 Fix: simple coding error! -- IF v_tablespace IS NULL THEN IF v_temp IS NULL THEN v_tablespace := 'TABLESPACE pg_default'; ELSE v_tablespace := 'TABLESPACE ' || v_temp; END IF; -- also see if there are any SET commands for this table, ie, autovacuum_enabled=off, fillfactor=70 WITH relopts AS ( SELECT unnest(c.reloptions) relopts FROM pg_class c, pg_namespace n WHERE n.nspname = in_schema AND n.oid = c.relnamespace AND c.relname = in_table ) SELECT string_agg(r.relopts, ', ') AS relopts INTO v_temp FROM relopts r; IF v_temp IS NULL THEN v_relopts := ''; ELSE v_relopts := ' WITH (' || v_temp || ')'; END IF; -- Issue#61 FIX: set search_path = public before we do anything to force explicit schema qualification but dont forget to set it back before exiting... SELECT setting INTO v_src_path_old FROM pg_settings WHERE name = 'search_path'; SELECT REPLACE(REPLACE(setting, '"$user"', '$user'), '$user', '"$user"') INTO v_src_path_old FROM pg_settings WHERE name = 'search_path'; -- RAISE INFO 'DEBUG tableddl: saving old search_path: ***%***', v_src_path_old; EXECUTE 'SET search_path = "public"'; SELECT setting INTO v_src_path_new FROM pg_settings WHERE name = 'search_path'; -- grab the oid of the table; https://www.postgresql.org/docs/8.3/catalog-pg-class.html SELECT c.oid INTO v_table_oid FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE 1 = 1 AND c.relkind = 'r' AND c.relname = in_table AND n.nspname = in_schema; IF (v_table_oid IS NULL) THEN -- Dont give up yet. It might be a partitioned table SELECT c.oid INTO v_table_oid FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE 1 = 1 AND c.relkind = 'p' AND c.relname = in_table AND n.nspname = in_schema; IF (v_table_oid IS NULL) THEN RAISE EXCEPTION 'table does not exist'; END IF; bPartitioned := True; END IF; IF v_pgversion < 100000 THEN SELECT c2.relname parent INTO v_parent FROM pg_class c1, pg_namespace n, pg_inherits i, pg_class c2 WHERE n.nspname = in_schema AND n.oid = c1.relnamespace AND c1.relname = in_table AND c1.oid = i.inhrelid AND i.inhparent = c2.oid AND c1.relkind = 'r'; IF (v_parent IS NOT NULL) THEN bPartitioned := True; bInheritance := True; END IF; ELSE SELECT c2.relname parent, c1.relispartition, pg_get_expr(c1.relpartbound, c1.oid, TRUE) INTO v_parent, bRelispartition, v_partbound FROM pg_class c1, pg_namespace n, pg_inherits i, pg_class c2 WHERE n.nspname = in_schema AND n.oid = c1.relnamespace AND c1.relname = in_table AND c1.oid = i.inhrelid AND i.inhparent = c2.oid AND c1.relkind = 'r'; IF (v_parent IS NOT NULL) THEN bPartitioned := True; IF bRelispartition THEN bInheritance := False; ELSE bInheritance := True; END IF; END IF; END IF; -- RAISE NOTICE 'version=% schema=% parent=% relopts=% tablespace=% partitioned=% inherited=% relispartition=%',v_pgversion, in_schema, v_parent, v_relopts, v_tablespace, bPartitioned, bInheritance, bRelispartition; -- start the create definition v_table_ddl := 'CREATE TABLE ' || in_schema || '.' || in_table || ' (' || E'\n'; -- define all of the columns in the table; https://stackoverflow.com/a/8153081/3068233 FOR v_colrec IN SELECT c.column_name, c.data_type, c.udt_name, c.character_maximum_length, c.is_nullable, c.column_default, c.numeric_precision, c.numeric_scale, c.is_identity, c.identity_generation FROM information_schema.columns c WHERE (table_schema, table_name) = (in_schema, in_table) ORDER BY ordinal_position LOOP v_table_ddl := v_table_ddl || ' ' -- note: two char spacer to start, to indent the column || v_colrec.column_name || ' ' --FIX #82, FIX #100 as well by adding 'citext' to the list -- EXECUTE. 42704. type ".citext" does not exist -- || CASE WHEN v_colrec.data_type = 'USER-DEFINED' THEN in_schema || '.' || v_colrec.udt_name ELSE v_colrec.data_type END -- || CASE WHEN v_colrec.udt_name in ('geometry', 'box2d', 'box2df', 'box3d', 'geography', 'geometry_dump', 'gidx', 'spheroid', 'valid_detail') || CASE WHEN v_colrec.udt_name in ('geometry', 'box2d', 'box2df', 'box3d', 'geography', 'geometry_dump', 'gidx', 'spheroid', 'valid_detail','citext') THEN v_colrec.udt_name WHEN v_colrec.data_type = 'USER-DEFINED' THEN in_schema || '.' || v_colrec.udt_name ELSE v_colrec.data_type END || CASE WHEN v_colrec.is_identity = 'YES' THEN CASE WHEN v_colrec.identity_generation = 'ALWAYS' THEN ' GENERATED ALWAYS AS IDENTITY' ELSE ' GENERATED BY DEFAULT AS IDENTITY' END ELSE '' END || CASE WHEN v_colrec.character_maximum_length IS NOT NULL THEN ('(' || v_colrec.character_maximum_length || ')') WHEN v_colrec.numeric_precision > 0 AND v_colrec.numeric_scale > 0 THEN '(' || v_colrec.numeric_precision || ',' || v_colrec.numeric_scale || ')' ELSE '' END || ' ' || CASE WHEN v_colrec.is_nullable = 'NO' THEN 'NOT NULL' ELSE 'NULL' END || CASE WHEN v_colrec.column_default IS NOT null THEN (' DEFAULT ' || v_colrec.column_default) ELSE '' END || ',' || E'\n'; END LOOP; -- define all the constraints in the; https://www.postgresql.org/docs/9.1/catalog-pg-constraint.html && https://dba.stackexchange.com/a/214877/75296 FOR v_constraintrec IN SELECT con.conname as constraint_name, con.contype as constraint_type, CASE WHEN con.contype = 'p' THEN 1 -- primary key constraint WHEN con.contype = 'u' THEN 2 -- unique constraint WHEN con.contype = 'f' THEN 3 -- foreign key constraint WHEN con.contype = 'c' THEN 4 ELSE 5 END as type_rank, pg_get_constraintdef(con.oid) as constraint_definition FROM pg_catalog.pg_constraint con JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid JOIN pg_catalog.pg_namespace nsp ON nsp.oid = connamespace WHERE nsp.nspname = in_schema AND rel.relname = in_table ORDER BY type_rank LOOP -- Issue#85 fix -- constraintarr := constraintarr || v_constraintrec.constraint_name; constraintarr := constraintarr || v_constraintrec.constraint_name::text; IF v_constraintrec.type_rank = 1 THEN v_primary := True; v_constraint_name := v_constraintrec.constraint_name; END IF; IF NOT bfkeys AND v_constraintrec.constraint_type = 'f' THEN continue; END IF; v_table_ddl := v_table_ddl || ' ' -- note: two char spacer to start, to indent the column || 'CONSTRAINT' || ' ' || v_constraintrec.constraint_name || ' ' || v_constraintrec.constraint_definition || ',' || E'\n'; END LOOP; -- drop the last comma before ending the create statement v_table_ddl = substr(v_table_ddl, 0, length(v_table_ddl) - 1) || E'\n'; -- end the create table def but add inherits clause if valid IF bPartitioned and bInheritance THEN v_table_ddl := v_table_ddl || ') INHERITS (' || in_schema || '.' || v_parent || ') ' || v_relopts || ' ' || v_tablespace || ';' || E'\n'; ELSIF v_pgversion >= 100000 AND bPartitioned and NOT bInheritance THEN -- See if this is a partitioned table (pg_class.relkind = 'p') and add the partitioned key SELECT pg_get_partkeydef (c1.oid) AS partition_key INTO v_partition_key FROM pg_class c1 JOIN pg_namespace n ON (n.oid = c1.relnamespace) LEFT JOIN pg_partitioned_table p ON (c1.oid = p.partrelid) WHERE n.nspname = in_schema AND n.oid = c1.relnamespace AND c1.relname = in_table AND c1.relkind = 'p'; END IF; IF v_partition_key IS NOT NULL AND v_partition_key <> '' THEN -- add partition clause -- NOTE: cannot specify default tablespace for partitioned relations v_table_ddl := v_table_ddl || ') PARTITION BY ' || v_partition_key || ';' || E'\n'; ELSIF bPartitioned AND not bInheritance THEN IF v_relopts <> '' THEN v_table_ddl := 'CREATE TABLE ' || in_schema || '.' || in_table || ' PARTITION OF ' || in_schema || '.' || v_parent || ' ' || v_partbound || v_relopts || ' ' || v_tablespace || '; ' || E'\n'; ELSE v_table_ddl := 'CREATE TABLE ' || in_schema || '.' || in_table || ' PARTITION OF ' || in_schema || '.' || v_parent || ' ' || v_partbound || ' ' || v_tablespace || '; ' || E'\n'; END IF; ELSIF bPartitioned and bInheritance THEN -- we already did this above v_table_ddl := v_table_ddl; ELSIF v_relopts <> '' THEN v_table_ddl := v_table_ddl || ') ' || v_relopts || ' ' || v_tablespace || ';' || E'\n'; ELSE v_table_ddl := v_table_ddl || ') ' || v_tablespace || ';' || E'\n'; END IF; -- suffix create statement with all of the indexes on the table FOR v_indexrec IN SELECT indexdef, indexname FROM pg_indexes WHERE (schemaname, tablename) = (in_schema, in_table) LOOP -- Issue#83 fix: loop through constraints and skip ones already defined bSkip = False; FOREACH constraintelement IN ARRAY constraintarr LOOP IF constraintelement = v_indexrec.indexname THEN bSkip = True; EXIT; END IF; END LOOP; if bSkip THEN CONTINUE; END IF; v_table_ddl := v_table_ddl || v_indexrec.indexdef || ';' || E'\n'; END LOOP; -- reset search_path back to what it was IF v_src_path_old = '' THEN SELECT set_config('search_path', '', false) into v_dummy; ELSE EXECUTE 'SET search_path = ' || v_src_path_old; END IF; -- RAISE NOTICE 'DEBUG tableddl: reset search_path back to ***%***', v_src_path_old; -- return the ddl RETURN v_table_ddl; END; $$; -- Function: clone_schema(text, text, boolean, boolean, boolean) -- DROP FUNCTION clone_schema(text, text, boolean, boolean, boolean); DROP FUNCTION IF EXISTS public.clone_schema(text, text, boolean, boolean); CREATE OR REPLACE FUNCTION public.clone_schema( source_schema text, dest_schema text, VARIADIC arr public.cloneparms[] DEFAULT '{}':: public.cloneparms[]) RETURNS void AS $BODY$ -- This function will clone all sequences, tables, data, views & functions from any existing schema to a new one -- SAMPLE CALL: -- SELECT clone_schema('sample', 'sample_clone2'); DECLARE src_oid oid; tbl_oid oid; func_oid oid; object text; buffer text; buffer2 text; buffer3 text; srctbl text; aname text; default_ text; column_ text; qry text; ix_old_name text; ix_new_name text; relpersist text; udt_name text; bRelispart bool; bChild bool; relknd text; data_type text; ocomment text; adef text; dest_qry text; v_def text; part_range text; src_path_old text; src_path_new text; aclstr text; -- issue#80 initialize arrays properly tblarray text[] := '{}'; tblarray2 text[] := '{}'; tblelement text; grantor text; grantee text; privs text; seqval bigint; sq_last_value bigint; sq_max_value bigint; sq_start_value bigint; sq_increment_by bigint; sq_min_value bigint; sq_cache_value bigint; sq_is_called boolean := True; sq_is_cycled boolean; is_prokind boolean; abool boolean; sq_data_type text; sq_cycled char(10); sq_owned text; sq_version text; sq_server_version text; sq_server_version_num integer; bWindows boolean; arec RECORD; cnt integer; cnt2 integer; cnt3 integer; pos integer; tblscopied integer := 0; l_child integer; action text := 'N/A'; tblname text; v_ret text; v_diag1 text; v_diag2 text; v_diag3 text; v_diag4 text; v_diag5 text; v_diag6 text; v_dummy text; -- issue#86 fix isGenerated text; -- issue#91 fix tblowner text; func_owner text; func_name text; func_args text; func_argno integer; view_owner text; -- issue#92 calleruser text; -- issue#94 bData boolean := False; bDDLOnly boolean := False; bVerbose boolean := False; bDebug boolean := False; bNoACL boolean := False; bNoOwner boolean := False; arglen integer; vargs text; avarg public.cloneparms; -- issue#98 mvarray text[] := '{}'; mvscopied integer := 0; -- issue#99 tablespaces tblspace text; t timestamptz := clock_timestamp(); r timestamptz; s timestamptz; v_version text := '1.15 December 22, 2022'; BEGIN -- Make sure NOTICE are shown SET client_min_messages = 'notice'; RAISE NOTICE 'clone_schema version %', v_version; IF 'DEBUG' = ANY ($3) THEN bDebug = True; END IF; IF 'VERBOSE' = ANY ($3) THEN bVerbose = True; END IF; -- IF bVerbose THEN RAISE NOTICE 'START: %',clock_timestamp() - t; END IF; arglen := array_length($3, 1); IF arglen IS NULL THEN -- nothing to do, so defaults are assumed NULL; ELSE -- loop thru args -- IF 'NO_TRIGGERS' = ANY ($3) -- select array_to_string($3, ',', '***') INTO vargs; IF bDebug THEN RAISE NOTICE 'arguments=%', $3; END IF; FOREACH avarg IN ARRAY $3 LOOP IF bDebug THEN RAISE NOTICE 'arg=%', avarg; END IF; IF avarg = 'DATA' THEN bData = True; ELSEIF avarg = 'NODATA' THEN -- already set to that by default bData = False; ELSEIF avarg = 'DDLONLY' THEN bDDLOnly = True; ELSEIF avarg = 'NOACL' THEN bNoACL = True; ELSEIF avarg = 'NOOWNER' THEN bNoOwner = True; END IF; END LOOP; IF bData and bDDLOnly THEN RAISE WARNING 'You can only specify DDLONLY or DATA, but not both.'; RETURN; END IF; END IF; -- Get server version info to handle certain things differently based on the version. SELECT setting INTO sq_server_version FROM pg_settings WHERE name = 'server_version'; SELECT version() INTO sq_version; IF POSITION('compiled by Visual C++' IN sq_version) > 0 THEN bWindows = True; RAISE NOTICE 'Windows: %', sq_version; ELSE bWindows = False; RAISE NOTICE 'Linux: %', sq_version; END IF; SELECT setting INTO sq_server_version_num FROM pg_settings WHERE name = 'server_version_num'; IF sq_server_version_num < 100000 THEN RAISE WARNING 'Server Version:% Number:% PG Versions older than v10 are not supported.', sq_server_version, sq_server_version_num; RETURN; END IF; -- Check that source_schema exists SELECT oid INTO src_oid FROM pg_namespace WHERE nspname = quote_ident(source_schema); IF NOT FOUND THEN RAISE NOTICE ' source schema % does not exist!', source_schema; RETURN ; END IF; -- Check for case-sensitive target schemas and reject them for now. SELECT lower(dest_schema) = dest_schema INTO abool; IF not abool THEN RAISE NOTICE 'Case-sensitive target schemas are not supported at this time.'; RETURN; END IF; -- Check that dest_schema does not yet exist PERFORM nspname FROM pg_namespace WHERE nspname = quote_ident(dest_schema); IF FOUND THEN RAISE NOTICE ' dest schema % already exists!', dest_schema; RETURN ; END IF; IF bDDLOnly and bData THEN RAISE WARNING 'You cannot specify to clone data and generate ddl at the same time.'; RETURN ; END IF; -- Issue#92 SELECT current_user into calleruser; -- Set the search_path to source schema. Before exiting set it back to what it was before. -- In order to avoid issues with the special schema name "$user" that may be -- returned unquoted by some applications, we ensure it remains double quoted. -- MJV FIX: #47 SELECT setting INTO v_dummy FROM pg_settings WHERE name='search_path'; IF bDebug THEN RAISE NOTICE 'search_path=%', v_dummy; END IF; SELECT REPLACE(REPLACE(setting, '"$user"', '$user'), '$user', '"$user"') INTO src_path_old FROM pg_settings WHERE name = 'search_path'; IF bDebug THEN RAISE NOTICE 'src_path_old=%', src_path_old; END IF; EXECUTE 'SET search_path = ' || quote_ident(source_schema) ; SELECT setting INTO src_path_new FROM pg_settings WHERE name='search_path'; IF bDebug THEN RAISE NOTICE 'new search_path=%', src_path_new; END IF; -- Validate required types exist. If not, create them. SELECT a.objtypecnt, b.permtypecnt INTO cnt, cnt2 FROM ( SELECT count(*) AS objtypecnt FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE (t.typrelid = 0 OR ( SELECT c.relkind = 'c' FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) AND NOT EXISTS ( SELECT 1 FROM pg_catalog.pg_type el WHERE el.oid = t.typelem AND el.typarray = t.oid) AND n.nspname <> 'pg_catalog' AND n.nspname <> 'information_schema' AND pg_catalog.pg_type_is_visible(t.oid) AND pg_catalog.format_type(t.oid, NULL) = 'obj_type') a, ( SELECT count(*) AS permtypecnt FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE (t.typrelid = 0 OR ( SELECT c.relkind = 'c' FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) AND NOT EXISTS ( SELECT 1 FROM pg_catalog.pg_type el WHERE el.oid = t.typelem AND el.typarray = t.oid) AND n.nspname <> 'pg_catalog' AND n.nspname <> 'information_schema' AND pg_catalog.pg_type_is_visible(t.oid) AND pg_catalog.format_type(t.oid, NULL) = 'perm_type') b; IF cnt = 0 THEN CREATE TYPE obj_type AS ENUM ('TABLE','VIEW','COLUMN','SEQUENCE','FUNCTION','SCHEMA','DATABASE'); END IF; IF cnt2 = 0 THEN CREATE TYPE perm_type AS ENUM ('SELECT','INSERT','UPDATE','DELETE','TRUNCATE','REFERENCES','TRIGGER','USAGE','CREATE','EXECUTE','CONNECT','TEMPORARY'); END IF; -- Issue#95 SELECT pg_catalog.pg_get_userbyid(nspowner) INTO buffer FROM pg_namespace WHERE nspname = quote_ident(source_schema); IF bDDLOnly THEN RAISE NOTICE ' Only generating DDL, not actually creating anything...'; -- issue#95 IF bNoOwner THEN RAISE INFO 'CREATE SCHEMA %;', quote_ident(dest_schema); ELSE RAISE INFO 'CREATE SCHEMA % AUTHORIZATION %;', quote_ident(dest_schema), buffer; END IF; RAISE NOTICE 'SET search_path=%;', quote_ident(dest_schema); ELSE -- issue#95 IF bNoOwner THEN EXECUTE 'CREATE SCHEMA ' || quote_ident(dest_schema) ; ELSE EXECUTE 'CREATE SCHEMA ' || quote_ident(dest_schema) || ' AUTHORIZATION ' || buffer; END IF; END IF; -- Do system table validations for subsequent system table queries -- Issue#65 Fix SELECT count(*) into cnt FROM pg_attribute WHERE attrelid = 'pg_proc'::regclass AND attname = 'prokind'; IF cnt = 0 THEN is_prokind = False; ELSE is_prokind = True; END IF; -- MV: Create Collations action := 'Collations'; cnt := 0; -- Issue#96 Handle differently based on PG Versions (PG15 rely on colliculocale, not collcolocate) -- perhaps use this logic instead: COALESCE(c.collcollate, c.colliculocale) AS lc_collate, COALESCE(c.collctype, c.colliculocale) AS lc_type IF sq_server_version_num < 100000 THEN RAISE NOTICE ' Collation cloning is are not supported in PG versions older than v10. Current version is %-%', sq_server_version, sq_server_version_num; ELSEIF sq_server_version_num > 150000 THEN FOR arec IN SELECT n.nspname AS schemaname, a.rolname AS ownername, c.collname, c.collprovider, c.collcollate AS locale, 'CREATE COLLATION ' || quote_ident(dest_schema) || '."' || c.collname || '" (provider = ' || CASE WHEN c.collprovider = 'i' THEN 'icu' WHEN c.collprovider = 'c' THEN 'libc' ELSE '' END || ', locale = ''' || c.colliculocale || ''');' AS COLL_DDL FROM pg_collation c JOIN pg_namespace n ON (c.collnamespace = n.oid) JOIN pg_roles a ON (c.collowner = a.oid) WHERE n.nspname = quote_ident(source_schema) ORDER BY c.collname LOOP BEGIN cnt := cnt + 1; IF bDDLOnly THEN RAISE INFO '%', arec.coll_ddl; ELSE EXECUTE arec.coll_ddl; END IF; END; END LOOP; ELSE FOR arec IN SELECT n.nspname AS schemaname, a.rolname AS ownername, c.collname, c.collprovider, c.collcollate AS locale, 'CREATE COLLATION ' || quote_ident(dest_schema) || '."' || c.collname || '" (provider = ' || CASE WHEN c.collprovider = 'i' THEN 'icu' WHEN c.collprovider = 'c' THEN 'libc' ELSE '' END || ', locale = ''' || c.collcollate || ''');' AS COLL_DDL FROM pg_collation c JOIN pg_namespace n ON (c.collnamespace = n.oid) JOIN pg_roles a ON (c.collowner = a.oid) WHERE n.nspname = quote_ident(source_schema) ORDER BY c.collname LOOP BEGIN cnt := cnt + 1; IF bDDLOnly THEN RAISE INFO '%', arec.coll_ddl; ELSE EXECUTE arec.coll_ddl; END IF; END; END LOOP; END IF; RAISE NOTICE ' COLLATIONS cloned: %', LPAD(cnt::text, 5, ' '); -- MV: Create Domains action := 'Domains'; cnt := 0; FOR arec IN SELECT n.nspname AS "Schema", t.typname AS "Name", pg_catalog.format_type(t.typbasetype, t.typtypmod) AS "Type", ( SELECT c.collname FROM pg_catalog.pg_collation c, pg_catalog.pg_type bt WHERE c.oid = t.typcollation AND bt.oid = t.typbasetype AND t.typcollation <> bt.typcollation) AS "Collation", CASE WHEN t.typnotnull THEN 'not null' END AS "Nullable", t.typdefault AS "Default", pg_catalog.array_to_string(ARRAY ( SELECT pg_catalog.pg_get_constraintdef(r.oid, TRUE) FROM pg_catalog.pg_constraint r -- Issue#78 FIX: handle case-sensitive names with quote_ident() on t.typename WHERE t.oid = r.contypid), ' ') AS "Check", 'CREATE DOMAIN ' || quote_ident(dest_schema) || '.' || quote_ident(t.typname) || ' AS ' || pg_catalog.format_type(t.typbasetype, t.typtypmod) || CASE WHEN t.typnotnull IS NOT NULL THEN ' NOT NULL ' ELSE ' ' END || CASE WHEN t.typdefault IS NOT NULL THEN 'DEFAULT ' || t.typdefault || ' ' ELSE ' ' END || pg_catalog.array_to_string(ARRAY ( SELECT pg_catalog.pg_get_constraintdef(r.oid, TRUE) FROM pg_catalog.pg_constraint r WHERE t.oid = r.contypid), ' ') || ';' AS DOM_DDL FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE t.typtype = 'd' AND n.nspname = quote_ident(source_schema) AND pg_catalog.pg_type_is_visible(t.oid) ORDER BY 1, 2 LOOP BEGIN cnt := cnt + 1; IF bDDLOnly THEN RAISE INFO '%', arec.dom_ddl; ELSE EXECUTE arec.dom_ddl; END IF; END; END LOOP; RAISE NOTICE ' DOMAINS cloned: %', LPAD(cnt::text, 5, ' '); -- MV: Create types action := 'Types'; cnt := 0; FOR arec IN SELECT c.relkind, n.nspname AS schemaname, t.typname AS typname, t.typcategory, pg_catalog.pg_get_userbyid(t.typowner) AS owner, CASE WHEN t.typcategory = 'C' THEN 'CREATE TYPE ' || quote_ident(dest_schema) || '.' || t.typname || ' AS (' || array_to_string(array_agg(a.attname || ' ' || pg_catalog.format_type(a.atttypid, a.atttypmod) ORDER BY c.relname, a.attnum), ', ') || ');' WHEN t.typcategory = 'E' THEN 'CREATE TYPE ' || quote_ident(dest_schema) || '.' || t.typname || ' AS ENUM (' || REPLACE(quote_literal(array_to_string(array_agg(e.enumlabel ORDER BY e.enumsortorder), ',')), ',', ''',''') || ');' ELSE '' END AS type_ddl FROM pg_type t JOIN pg_namespace n ON (n.oid = t.typnamespace) LEFT JOIN pg_enum e ON (t.oid = e.enumtypid) LEFT JOIN pg_class c ON (c.reltype = t.oid) LEFT JOIN pg_attribute a ON (a.attrelid = c.oid) WHERE n.nspname = quote_ident(source_schema) AND (c.relkind IS NULL OR c.relkind = 'c') AND t.typcategory IN ('C', 'E') GROUP BY 1, 2, 3, 4, 5 ORDER BY n.nspname, t.typcategory, t.typname LOOP BEGIN cnt := cnt + 1; -- Keep composite and enum types in separate branches for fine tuning later if needed. IF arec.typcategory = 'E' THEN IF bDDLOnly THEN RAISE INFO '%', arec.type_ddl; --issue#95 IF NOT bNoOwner THEN RAISE INFO 'ALTER TYPE % OWNER TO %;', quote_ident(dest_schema) || '.' || arec.typname, arec.owner; END IF; ELSE EXECUTE arec.type_ddl; --issue#95 IF NOT bNoOwner THEN EXECUTE 'ALTER TYPE ' || quote_ident(dest_schema) || '.' || arec.typname || ' OWNER TO ' || arec.owner; END IF; END IF; ELSIF arec.typcategory = 'C' THEN IF bDDLOnly THEN RAISE INFO '%', arec.type_ddl; --issue#95 IF NOT bNoOwner THEN RAISE INFO 'ALTER TYPE % OWNER TO %;', quote_ident(dest_schema) || '.' || arec.typname, arec.owner; END IF; ELSE EXECUTE arec.type_ddl; --issue#95 IF NOT bNoOwner THEN EXECUTE 'ALTER TYPE ' || quote_ident(dest_schema) || '.' || arec.typname || ' OWNER TO ' || arec.owner; END IF; END IF; ELSE RAISE NOTICE ' Unhandled type:%-%', arec.typcategory, arec.typname; END IF; END; END LOOP; RAISE NOTICE ' TYPES cloned: %', LPAD(cnt::text, 5, ' '); -- Create sequences action := 'Sequences'; cnt := 0; -- fix#63 get from pg_sequences not information_schema -- fix#63 take 2: get it from information_schema.sequences since we need to treat IDENTITY columns differently. -- fix#95 get owner as well by joining to pg_sequences FOR object, buffer IN SELECT s1.sequence_name::text, s2.sequenceowner FROM information_schema.sequences s1 JOIN pg_sequences s2 ON (s1.sequence_schema = s2.schemaname AND s1.sequence_name = s2.sequencename) AND s1.sequence_schema = quote_ident(source_schema) LOOP cnt := cnt + 1; IF bDDLOnly THEN -- issue#95 RAISE INFO '%', 'CREATE SEQUENCE ' || quote_ident(dest_schema) || '.' || quote_ident(object) || ';'; IF NOT bNoOwner THEN RAISE INFO '%', 'ALTER SEQUENCE ' || quote_ident(dest_schema) || '.' || quote_ident(object) || ' OWNER TO ' || buffer || ';'; END IF; ELSE EXECUTE 'CREATE SEQUENCE ' || quote_ident(dest_schema) || '.' || quote_ident(object); -- issue#95 IF NOT bNoOwner THEN EXECUTE 'ALTER SEQUENCE ' || quote_ident(dest_schema) || '.' || quote_ident(object) || ' OWNER TO ' || buffer; END IF; END IF; srctbl := quote_ident(source_schema) || '.' || quote_ident(object); IF sq_server_version_num < 100000 THEN EXECUTE 'SELECT last_value, is_called FROM ' || quote_ident(source_schema) || '.' || quote_ident(object) || ';' INTO sq_last_value, sq_is_called; EXECUTE 'SELECT maximum_value, start_value, increment, minimum_value, 1 cache_size, cycle_option, data_type FROM information_schema.sequences WHERE sequence_schema='|| quote_literal(source_schema) || ' AND sequence_name=' || quote_literal(object) || ';' INTO sq_max_value, sq_start_value, sq_increment_by, sq_min_value, sq_cache_value, sq_is_cycled, sq_data_type; IF sq_is_cycled THEN sq_cycled := 'CYCLE'; ELSE sq_cycled := 'NO CYCLE'; END IF; qry := 'ALTER SEQUENCE ' || quote_ident(dest_schema) || '.' || quote_ident(object) || ' INCREMENT BY ' || sq_increment_by || ' MINVALUE ' || sq_min_value || ' MAXVALUE ' || sq_max_value -- will update current sequence value after this || ' START WITH ' || sq_start_value || ' RESTART ' || sq_min_value || ' CACHE ' || sq_cache_value || ' ' || sq_cycled || ' ;' ; ELSE EXECUTE 'SELECT max_value, start_value, increment_by, min_value, cache_size, cycle, data_type, COALESCE(last_value, 1) FROM pg_catalog.pg_sequences WHERE schemaname='|| quote_literal(source_schema) || ' AND sequencename=' || quote_literal(object) || ';' INTO sq_max_value, sq_start_value, sq_increment_by, sq_min_value, sq_cache_value, sq_is_cycled, sq_data_type, sq_last_value; IF sq_is_cycled THEN sq_cycled := 'CYCLE'; ELSE sq_cycled := 'NO CYCLE'; END IF; qry := 'ALTER SEQUENCE ' || quote_ident(dest_schema) || '.' || quote_ident(object) || ' AS ' || sq_data_type || ' INCREMENT BY ' || sq_increment_by || ' MINVALUE ' || sq_min_value || ' MAXVALUE ' || sq_max_value -- will update current sequence value after this || ' START WITH ' || sq_start_value || ' RESTART ' || sq_min_value || ' CACHE ' || sq_cache_value || ' ' || sq_cycled || ' ;' ; END IF; IF bDDLOnly THEN RAISE INFO '%', qry; ELSE EXECUTE qry; END IF; buffer := quote_ident(dest_schema) || '.' || quote_ident(object); IF bData THEN EXECUTE 'SELECT setval( ''' || buffer || ''', ' || sq_last_value || ', ' || sq_is_called || ');' ; ELSE if bDDLOnly THEN -- fix#63 -- RAISE INFO '%', 'SELECT setval( ''' || buffer || ''', ' || sq_start_value || ', ' || sq_is_called || ');' ; RAISE INFO '%', 'SELECT setval( ''' || buffer || ''', ' || sq_last_value || ', ' || sq_is_called || ');' ; ELSE -- fix#63 -- EXECUTE 'SELECT setval( ''' || buffer || ''', ' || sq_start_value || ', ' || sq_is_called || ');' ; EXECUTE 'SELECT setval( ''' || buffer || ''', ' || sq_last_value || ', ' || sq_is_called || ');' ; END IF; END IF; END LOOP; RAISE NOTICE ' SEQUENCES cloned: %', LPAD(cnt::text, 5, ' '); -- Create tables including partitioned ones (parent/children) and unlogged ones. Order by is critical since child partition range logic is dependent on it. action := 'Tables'; SELECT setting INTO v_dummy FROM pg_settings WHERE name='search_path'; IF bDebug THEN RAISE NOTICE 'search_path=%', v_dummy; END IF; cnt := 0; -- Issue#61 FIX: use set_config for empty string -- SET search_path = ''; SELECT set_config('search_path', '', false) into v_dummy; IF bDebug THEN RAISE NOTICE 'setting search_path to empty string:%', v_dummy; END IF; -- Fix#86 add isgenerated to column list -- Fix#91 add tblowner for setting the table ownership to that of the source -- Fix#99 added join to pg_tablespace FOR tblname, relpersist, bRelispart, relknd, data_type, udt_name, ocomment, l_child, isGenerated, tblowner, tblspace IN -- 2021-03-08 MJV #39 fix: change sql to get indicator of user-defined columns to issue warnings -- select c.relname, c.relpersistence, c.relispartition, c.relkind -- FROM pg_class c, pg_namespace n where n.oid = c.relnamespace and n.nspname = quote_ident(source_schema) and c.relkind in ('r','p') and -- order by c.relkind desc, c.relname --Fix#65 add another left join to distinguish child tables by inheritance -- Fix#86 add is_generated to column select -- Fix#91 add tblowner to the select SELECT DISTINCT c.relname, c.relpersistence, c.relispartition, c.relkind, co.data_type, co.udt_name, obj_description(c.oid), i.inhrelid, COALESCE(co.is_generated, ''), pg_catalog.pg_get_userbyid(c.relowner) as "Owner", CASE WHEN reltablespace = 0 THEN 'pg_default' ELSE ts.spcname END as tablespace FROM pg_class c JOIN pg_namespace n ON (n.oid = c.relnamespace AND n.nspname = quote_ident(source_schema) AND c.relkind IN ('r', 'p')) LEFT JOIN information_schema.columns co ON (co.table_schema = n.nspname AND co.table_name = c.relname -- AND co.data_type = 'USER-DEFINED') AND (co.data_type = 'USER-DEFINED' OR co.is_generated = 'ALWAYS')) LEFT JOIN pg_inherits i ON (c.oid = i.inhrelid) -- issue#99 added join LEFT JOIN pg_tablespace ts ON (c.reltablespace = ts.oid) ORDER BY c.relkind DESC, c.relname LOOP cnt := cnt + 1; IF l_child IS NULL THEN bChild := False; ELSE bChild := True; END IF; -- RAISE NOTICE 'table=% bRelispart=% relkind=% bChild=%',tblname, bRelispart, relknd, bChild; IF data_type = 'USER-DEFINED' THEN -- RAISE NOTICE ' Table (%) has column(s) with user-defined types so using get_table_ddl() instead of CREATE TABLE LIKE construct.',tblname; cnt :=cnt; END IF; buffer := quote_ident(dest_schema) || '.' || quote_ident(tblname); buffer2 := ''; IF relpersist = 'u' THEN buffer2 := 'UNLOGGED '; END IF; IF relknd = 'r' THEN IF bDDLOnly THEN IF data_type = 'USER-DEFINED' THEN -- FIXED #65, #67 -- SELECT * INTO buffer3 FROM public.pg_get_tabledef(quote_ident(source_schema), tblname); SELECT * INTO buffer3 FROM public.get_table_ddl(quote_ident(source_schema), tblname, False); buffer3 := REPLACE(buffer3, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.'); RAISE INFO '%', buffer3; -- issue#91 fix -- issue#95 IF NOT bNoOwner THEN RAISE INFO 'ALTER TABLE IF EXISTS % OWNER TO %;', quote_ident(dest_schema) || '.' || tblname, tblowner; END IF; ELSE IF NOT bChild THEN RAISE INFO '%', 'CREATE ' || buffer2 || 'TABLE ' || buffer || ' (LIKE ' || quote_ident(source_schema) || '.' || quote_ident(tblname) || ' INCLUDING ALL);'; -- issue#91 fix -- issue#95 IF NOT bNoOwner THEN RAISE INFO 'ALTER TABLE IF EXISTS % OWNER TO %;', quote_ident(dest_schema) || '.' || tblname, tblowner; END IF; -- issue#99 IF tblspace <> 'pg_default' THEN -- replace with user-defined tablespace -- ALTER TABLE myschema.mytable SET TABLESPACE usrtblspc; RAISE INFO 'ALTER TABLE IF EXISTS % SET TABLESPACE %;', quote_ident(dest_schema) || '.' || tblname, tblspace; END IF; ELSE -- FIXED #65, #67 -- SELECT * INTO buffer3 FROM public.pg_get_tabledef(quote_ident(source_schema), tblname); SELECT * INTO buffer3 FROM public.get_table_ddl(quote_ident(source_schema), tblname, False); buffer3 := REPLACE(buffer3, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.'); RAISE INFO '%', buffer3; -- issue#91 fix -- issue#95 IF NOT bNoOwner THEN RAISE INFO 'ALTER TABLE IF EXISTS % OWNER TO %;', quote_ident(dest_schema) || '.' || tblname, tblowner; END IF; END IF; END IF; ELSE IF data_type = 'USER-DEFINED' THEN -- FIXED #65, #67 -- SELECT * INTO buffer3 FROM public.pg_get_tabledef(quote_ident(source_schema), tblname); SELECT * INTO buffer3 FROM public.get_table_ddl(quote_ident(source_schema), tblname, False); buffer3 := REPLACE(buffer3, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.'); IF bDebug THEN RAISE NOTICE 'tabledef01:%', buffer3; END IF; -- #82: Table def should be fully qualified with target schema, -- so just make search path = public to handle extension types that should reside in public schema v_dummy = 'public'; SELECT set_config('search_path', v_dummy, false) into v_dummy; EXECUTE buffer3; -- issue#91 fix -- issue#95 IF NOT bNoOwner THEN buffer3 = 'ALTER TABLE IF EXISTS ' || quote_ident(dest_schema) || '.' || tblname || ' OWNER TO ' || tblowner; EXECUTE buffer3; END IF; ELSE IF (NOT bChild OR bRelispart) THEN buffer3 := 'CREATE ' || buffer2 || 'TABLE ' || buffer || ' (LIKE ' || quote_ident(source_schema) || '.' || quote_ident(tblname) || ' INCLUDING ALL)'; IF bDebug THEN RAISE NOTICE 'tabledef02:%', buffer3; END IF; EXECUTE buffer3; -- issue#91 fix -- issue#95 IF NOT bNoOwner THEN buffer3 = 'ALTER TABLE IF EXISTS ' || quote_ident(dest_schema) || '.' || quote_ident(tblname) || ' OWNER TO ' || tblowner; EXECUTE buffer3; END IF; -- issue#99 IF tblspace <> 'pg_default' THEN -- replace with user-defined tablespace -- ALTER TABLE myschema.mytable SET TABLESPACE usrtblspc; buffer3 = 'ALTER TABLE IF EXISTS ' || quote_ident(dest_schema) || '.' || tblname || ' SET TABLESPACE ' || tblspace; EXECUTE buffer3; END IF; ELSE -- FIXED #65, #67 -- SELECT * INTO buffer3 FROM public.pg_get_tabledef(quote_ident(source_schema), tblname); SELECT * INTO buffer3 FROM public.get_table_ddl(quote_ident(source_schema), tblname, False); buffer3 := REPLACE(buffer3, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.'); -- set client_min_messages higher to avoid messages like this: -- NOTICE: merging column "city_id" with inherited definition set client_min_messages = 'WARNING'; IF bDebug THEN RAISE NOTICE 'tabledef03:%', buffer3; END IF; EXECUTE buffer3; -- issue#91 fix -- issue#95 IF NOT bNoOwner THEN buffer3 = 'ALTER TABLE IF EXISTS ' || quote_ident(dest_schema) || '.' || tblname || ' OWNER TO ' || tblowner; EXECUTE buffer3; END IF; -- reset it back, only get these for inheritance-based tables set client_min_messages = 'notice'; END IF; END IF; -- Add table comment. IF ocomment IS NOT NULL THEN EXECUTE 'COMMENT ON TABLE ' || buffer || ' IS ' || quote_literal(ocomment); END IF; END IF; ELSIF relknd = 'p' THEN -- define parent table and assume child tables have already been created based on top level sort order. SELECT 'CREATE TABLE ' || quote_ident(dest_schema) || '.' || pc.relname || E'(\n' || string_agg( pa.attname || ' ' || pg_catalog.format_type(pa.atttypid, pa.atttypmod) || coalesce( ' DEFAULT ' || ( SELECT pg_catalog.pg_get_expr(d.adbin, d.adrelid) FROM pg_catalog.pg_attrdef d WHERE d.adrelid = pa.attrelid AND d.adnum = pa.attnum AND pa.atthasdef ), '' ) || ' ' || CASE pa.attnotnull WHEN TRUE THEN 'NOT NULL' ELSE 'NULL' END, E',\n' ) || coalesce( ( SELECT E',\n' || string_agg( 'CONSTRAINT ' || pc1.conname || ' ' || pg_get_constraintdef(pc1.oid), E',\n' ORDER BY pc1.conindid ) FROM pg_constraint pc1 WHERE pc1.conrelid = pa.attrelid ), '' ) INTO buffer FROM pg_catalog.pg_attribute pa JOIN pg_catalog.pg_class pc ON pc.oid = pa.attrelid AND pc.relname = quote_ident(tblname) JOIN pg_catalog.pg_namespace pn ON pn.oid = pc.relnamespace AND pn.nspname = quote_ident(source_schema) WHERE pa.attnum > 0 AND NOT pa.attisdropped GROUP BY pn.nspname, pc.relname, pa.attrelid; -- append partition keyword to it SELECT pg_catalog.pg_get_partkeydef(c.oid::pg_catalog.oid) into buffer2 FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relname = quote_ident(tblname) COLLATE pg_catalog.default AND n.nspname = quote_ident(source_schema) COLLATE pg_catalog.default; qry := buffer || ') PARTITION BY ' || buffer2 || ';'; IF bDDLOnly THEN RAISE INFO '%', qry; -- issue#95 IF NOT bNoOwner THEN RAISE INFO 'ALTER TABLE IF EXISTS % OWNER TO %;', quote_ident(dest_schema) || '.' || quote_ident(tblname), tblowner; END IF; ELSE IF bDebug THEN RAISE NOTICE 'tabledef04:%', buffer3; END IF; EXECUTE qry; -- issue#91 fix -- issue#95 IF NOT bNoOwner THEN buffer3 = 'ALTER TABLE IF EXISTS ' || quote_ident(dest_schema) || '.' || quote_ident(tblname) || ' OWNER TO ' || tblowner; EXECUTE buffer3; END IF; END IF; -- loop for child tables and alter them to attach to parent for specific partition method. FOR aname, part_range, object IN SELECT quote_ident(dest_schema) || '.' || c1.relname as tablename, pg_catalog.pg_get_expr(c1.relpartbound, c1.oid) as partrange, quote_ident(dest_schema) || '.' || c2.relname as object FROM pg_catalog.pg_class c1, pg_namespace n, pg_catalog.pg_inherits i, pg_class c2 WHERE n.nspname = quote_ident(source_schema) AND c1.relnamespace = n.oid AND c1.relkind = 'r' AND c1.relispartition AND c1.oid=i.inhrelid AND i.inhparent = c2.oid AND c2.relnamespace = n.oid ORDER BY pg_catalog.pg_get_expr(c1.relpartbound, c1.oid) = 'DEFAULT', c1.oid::pg_catalog.regclass::pg_catalog.text LOOP qry := 'ALTER TABLE ONLY ' || object || ' ATTACH PARTITION ' || aname || ' ' || part_range || ';'; -- issue#91, not sure if we need to do this for child tables -- issue#95 we dont set ownership here IF bDDLOnly THEN RAISE INFO '%', qry; IF NOT bNoOwner THEN NULL; END IF; ELSE EXECUTE qry; IF NOT bNoOwner THEN NULL; END IF; END IF; END LOOP; END IF; -- INCLUDING ALL creates new index names, we restore them to the old name. -- There should be no conflicts since they live in different schemas FOR ix_old_name, ix_new_name IN SELECT old.indexname, new.indexname FROM pg_indexes old, pg_indexes new WHERE old.schemaname = source_schema AND new.schemaname = dest_schema AND old.tablename = new.tablename AND old.tablename = tblname AND old.indexname <> new.indexname AND regexp_replace(old.indexdef, E'.*USING','') = regexp_replace(new.indexdef, E'.*USING','') ORDER BY old.indexdef, new.indexdef LOOP IF bDDLOnly THEN RAISE INFO '%', 'ALTER INDEX ' || quote_ident(dest_schema) || '.' || quote_ident(ix_new_name) || ' RENAME TO ' || quote_ident(ix_old_name) || ';'; ELSE -- The SELECT query above may return duplicate names when a column is -- indexed twice the same manner with 2 different names. Therefore, to -- avoid a 'relation "xxx" already exists' we test if the index name -- is in use or free. Skipping existing index will fallback on unused -- ones and every duplicate will be mapped to distinct old names. IF NOT EXISTS ( SELECT TRUE FROM pg_indexes WHERE schemaname = dest_schema AND tablename = tblname AND indexname = quote_ident(ix_old_name)) AND EXISTS ( SELECT TRUE FROM pg_indexes WHERE schemaname = dest_schema AND tablename = tblname AND indexname = quote_ident(ix_new_name)) THEN EXECUTE 'ALTER INDEX ' || quote_ident(dest_schema) || '.' || quote_ident(ix_new_name) || ' RENAME TO ' || quote_ident(ix_old_name) || ';'; END IF; END IF; END LOOP; IF bData THEN -- Insert records from source table -- 2021-03-03 MJV FIX buffer := dest_schema || '.' || quote_ident(tblname); -- 2020/06/18 - Issue #31 fix: add "OVERRIDING SYSTEM VALUE" for IDENTITY columns marked as GENERATED ALWAYS. select count(*) into cnt2 from pg_class c, pg_attribute a, pg_namespace n where a.attrelid = c.oid and c.relname = quote_ident(tblname) and n.oid = c.relnamespace and n.nspname = quote_ident(source_schema) and a.attidentity = 'a'; buffer3 := ''; IF cnt2 > 0 THEN buffer3 := ' OVERRIDING SYSTEM VALUE'; END IF; -- BUG for inserting rows from tables with user-defined columns -- INSERT INTO sample_clone.address OVERRIDING SYSTEM VALUE SELECT * FROM sample.address; -- ERROR: column "id2" is of type sample_clone.udt_myint but expression is of type udt_myint -- Issue#86 fix: -- IF data_type = 'USER-DEFINED' THEN IF bDebug THEN RAISE NOTICE 'includerecs branch table=% data_type=% isgenerated=%', tblname, data_type, isGenerated; END IF; IF data_type = 'USER-DEFINED' OR isGenerated = 'ALWAYS' THEN -- RAISE WARNING 'Bypassing copying rows for table (%) with user-defined data types. You must copy them manually.', tblname; -- wont work --> INSERT INTO sample_clone1.address (id2, id3, addr) SELECT cast(id2 as sample_clone1.udt_myint), cast(id3 as sample_clone1.udt_myint), addr FROM sample.address; -- Issue#79 implementation follows -- COPY sample.statuses(id, s) TO '/tmp/statuses.txt' WITH DELIMITER AS ','; -- COPY sample_clone1.statuses FROM '/tmp/statuses.txt' (DELIMITER ',', NULL ''); IF bWindows THEN buffer2 := 'COPY ' || quote_ident(source_schema) || '.' || quote_ident(tblname) || ' TO ''C:\WINDOWS\TEMP\cloneschema.tmp'' WITH DELIMITER AS '','';'; tblarray2 := tblarray2 || buffer2; -- Issue #81 reformat COPY command for upload -- buffer2:= 'COPY ' || quote_ident(dest_schema) || '.' || quote_ident(tblname) || ' FROM ''C:\WINDOWS\TEMP\cloneschema.tmp'' (DELIMITER '','', NULL '''');'; buffer2 := 'COPY ' || quote_ident(dest_schema) || '.' || quote_ident(tblname) || ' FROM ''C:\WINDOWS\TEMP\cloneschema.tmp'' (DELIMITER '','', NULL ''\N'', FORMAT CSV);'; tblarray2 := tblarray2 || buffer2; ELSE buffer2 := 'COPY ' || quote_ident(source_schema) || '.' || quote_ident(tblname) || ' TO ''/tmp/cloneschema.tmp'' WITH DELIMITER AS '','';'; tblarray2 := tblarray2 || buffer2; -- Issue #81 reformat COPY command for upload -- buffer2 := 'COPY ' || quote_ident(dest_schema) || '.' || quote_ident(tblname) || ' FROM ''/tmp/cloneschema.tmp'' (DELIMITER '','', NULL '''');'; -- works--> COPY sample.timestamptbl2 FROM '/tmp/cloneschema.tmp' WITH (DELIMITER ',', NULL '\N', FORMAT CSV) ; buffer2 := 'COPY ' || quote_ident(dest_schema) || '.' || quote_ident(tblname) || ' FROM ''/tmp/cloneschema.tmp'' (DELIMITER '','', NULL ''\N'', FORMAT CSV);'; tblarray2 := tblarray2 || buffer2; END IF; ELSE -- bypass child tables since we populate them when we populate the parents -- RAISE NOTICE 'tblname=% bRelispart=% relknd=% l_child=% bChild=%', tblname, bRelispart, relknd, l_child, bChild; IF NOT bRelispart AND NOT bChild THEN -- Issue#75: Must defer population of tables until child tables have been added to parents buffer2 := 'INSERT INTO ' || buffer || buffer3 || ' SELECT * FROM ' || quote_ident(source_schema) || '.' || quote_ident(tblname) || ';'; tblarray := tblarray || buffer2; END IF; END IF; END IF; -- Issue#61 FIX: use set_config for empty string -- SET search_path = ''; SELECT set_config('search_path', '', false) into v_dummy; FOR column_, default_ IN SELECT column_name::text, REPLACE(column_default::text, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.') FROM information_schema.COLUMNS WHERE table_schema = source_schema AND TABLE_NAME = tblname AND column_default LIKE 'nextval(%' || quote_ident(source_schema) || '%::regclass)' LOOP -- Issue#78 FIX: handle case-sensitive names with quote_ident() on column name buffer2 = 'ALTER TABLE ' || buffer || ' ALTER COLUMN ' || quote_ident(column_) || ' SET DEFAULT ' || default_ || ';'; IF bDDLOnly THEN -- May need to come back and revisit this since previous sql will not return anything since no schema as created! RAISE INFO '%', buffer2; ELSE EXECUTE buffer2; END IF; END LOOP; EXECUTE 'SET search_path = ' || quote_ident(source_schema) ; END LOOP; RAISE NOTICE ' TABLES cloned: %', LPAD(cnt::text, 5, ' '); SELECT setting INTO v_dummy FROM pg_settings WHERE name = 'search_path'; IF bDebug THEN RAISE NOTICE 'search_path=%', v_dummy; END IF; -- Assigning sequences to table columns. action := 'Sequences assigning'; cnt := 0; FOR object IN SELECT sequence_name::text FROM information_schema.sequences WHERE sequence_schema = quote_ident(source_schema) LOOP cnt := cnt + 1; srctbl := quote_ident(source_schema) || '.' || quote_ident(object); -- Get owning column, inspired from Sadique Ali post at: -- https://sadique.io/blog/2019/05/07/viewing-sequence-ownership-information-in-postgres/ SELECT ' OWNED BY ' || quote_ident(dest_schema) || '.' || quote_ident(dc.relname) || '.' || quote_ident(a.attname) INTO sq_owned FROM pg_class AS c JOIN pg_depend AS d ON (c.relfilenode = d.objid) JOIN pg_class AS dc ON (d.refobjid = dc.relfilenode) JOIN pg_attribute AS a ON ( a.attnum = d.refobjsubid AND a.attrelid = d.refobjid ) JOIN pg_namespace n ON c.relnamespace = n.oid WHERE n.nspname = quote_ident(source_schema) AND c.relkind = 'S' AND c.relname = object; IF sq_owned IS NOT NULL THEN qry := 'ALTER SEQUENCE ' || quote_ident(dest_schema) || '.' || quote_ident(object) || sq_owned || ';'; IF bDDLOnly THEN RAISE INFO '%', qry; ELSE EXECUTE qry; END IF; END IF; END LOOP; RAISE NOTICE ' SEQUENCES set: %', LPAD(cnt::text, 2, ' '); -- Update IDENTITY sequences to the last value action := 'Identity updating'; cnt := 0; FOR object, sq_last_value IN SELECT sequencename::text, COALESCE(last_value, -999) from pg_sequences where schemaname = quote_ident(source_schema) AND NOT EXISTS (select 1 from information_schema.sequences where sequence_schema = quote_ident(source_schema) and sequence_name = sequencename) LOOP IF sq_last_value = -999 THEN continue; END IF; cnt := cnt + 1; buffer := quote_ident(dest_schema) || '.' || quote_ident(object); IF bData THEN EXECUTE 'SELECT setval( ''' || buffer || ''', ' || sq_last_value || ', ' || sq_is_called || ');' ; ELSE if bDDLOnly THEN -- fix#63 RAISE INFO '%', 'SELECT setval( ''' || buffer || ''', ' || sq_last_value || ', ' || sq_is_called || ');' ; ELSE -- fix#63 EXECUTE 'SELECT setval( ''' || buffer || ''', ' || sq_last_value || ', ' || sq_is_called || ');' ; END IF; END IF; END LOOP; RAISE NOTICE ' IDENTITIES set: %', LPAD(cnt::text, 2, ' '); -- Issue#78 forces us to defer FKeys until the end since we previously did row copies before FKeys -- add FK constraint -- action := 'FK Constraints'; -- Issue#62: Add comments on indexes, and then removed them from here and reworked later below. -- Issue 90: moved functions to here, before views or MVs that might use them -- Create functions action := 'Functions'; cnt := 0; -- MJV FIX per issue# 34 -- SET search_path = ''; EXECUTE 'SET search_path = ' || quote_ident(source_schema) ; -- Fixed Issue#65 -- Fixed Issue#97 -- FOR func_oid IN SELECT oid FROM pg_proc WHERE pronamespace = src_oid AND prokind != 'a' IF is_prokind THEN FOR func_oid, func_owner, func_name, func_args, func_argno, buffer3 IN SELECT p.oid, pg_catalog.pg_get_userbyid(p.proowner), p.proname, oidvectortypes(p.proargtypes), p.pronargs, CASE WHEN prokind = 'p' THEN 'PROCEDURE' WHEN prokind = 'f' THEN 'FUNCTION' ELSE '' END FROM pg_proc p WHERE p.pronamespace = src_oid AND p.prokind != 'a' LOOP cnt := cnt + 1; SELECT pg_get_functiondef(func_oid) INTO qry; SELECT replace(qry, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.') INTO dest_qry; IF bDDLOnly THEN RAISE INFO '%;', dest_qry; -- Issue#91 Fix -- issue#95 IF NOT bNoOwner THEN IF func_argno = 0 THEN RAISE INFO 'ALTER % %() OWNER TO %', buffer3, quote_ident(dest_schema) || '.' || quote_ident(func_name), func_owner || ';'; ELSE RAISE INFO 'ALTER % % OWNER TO %', buffer3, quote_ident(dest_schema) || '.' || quote_ident(func_name) || '(' || func_args || ')', func_owner || ';'; END IF; END IF; ELSE IF bDebug THEN RAISE NOTICE '%', dest_qry; END IF; EXECUTE dest_qry; -- Issue#91 Fix -- issue#95 IF NOT bNoOwner THEN IF func_argno = 0 THEN dest_qry = 'ALTER ' || buffer3 || ' ' || quote_ident(dest_schema) || '.' || quote_ident(func_name) || '() OWNER TO ' || func_owner || ';'; ELSE dest_qry = 'ALTER ' || buffer3 || ' ' || quote_ident(dest_schema) || '.' || quote_ident(func_name) || '(' || func_args || ') OWNER TO ' || func_owner || ';'; END IF; END IF; EXECUTE dest_qry; END IF; END LOOP; ELSE FOR func_oid IN SELECT oid FROM pg_proc WHERE pronamespace = src_oid AND not proisagg LOOP cnt := cnt + 1; SELECT pg_get_functiondef(func_oid) INTO qry; SELECT replace(qry, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.') INTO dest_qry; IF bDDLOnly THEN RAISE INFO '%;', dest_qry; ELSE EXECUTE dest_qry; END IF; END LOOP; END IF; -- Create aggregate functions. -- Fixed Issue#65 -- FOR func_oid IN SELECT oid FROM pg_proc WHERE pronamespace = src_oid AND prokind = 'a' IF is_prokind THEN FOR func_oid IN SELECT oid FROM pg_proc WHERE pronamespace = src_oid AND prokind = 'a' LOOP cnt := cnt + 1; SELECT 'CREATE AGGREGATE ' || dest_schema || '.' || p.proname || '(' -- || format_type(a.aggtranstype, NULL) -- Issue#65 Fixes for specific datatype mappings || CASE WHEN format_type(a.aggtranstype, NULL) = 'double precision[]' THEN 'float8' WHEN format_type(a.aggtranstype, NULL) = 'anyarray' THEN 'anyelement' ELSE format_type(a.aggtranstype, NULL) END || ') (sfunc = ' || regexp_replace(a.aggtransfn::text, '(^|\W)' || quote_ident(source_schema) || '\.', '\1' || quote_ident(dest_schema) || '.') || ', stype = ' -- || format_type(a.aggtranstype, NULL) -- Issue#65 Fixes for specific datatype mappings || CASE WHEN format_type(a.aggtranstype, NULL) = 'double precision[]' THEN 'float8[]' ELSE format_type(a.aggtranstype, NULL) END || CASE WHEN op.oprname IS NULL THEN '' ELSE ', sortop = ' || op.oprname END || CASE WHEN a.agginitval IS NULL THEN '' ELSE ', initcond = ''' || a.agginitval || '''' END || ')' INTO dest_qry FROM pg_proc p JOIN pg_aggregate a ON a.aggfnoid = p.oid LEFT JOIN pg_operator op ON op.oid = a.aggsortop WHERE p.oid = func_oid; IF bDDLOnly THEN RAISE INFO '%;', dest_qry; ELSE EXECUTE dest_qry; END IF; END LOOP; RAISE NOTICE ' FUNCTIONS cloned: %', LPAD(cnt::text, 5, ' '); ELSE FOR func_oid IN SELECT oid FROM pg_proc WHERE pronamespace = src_oid AND proisagg LOOP cnt := cnt + 1; SELECT 'CREATE AGGREGATE ' || dest_schema || '.' || p.proname || '(' -- || format_type(a.aggtranstype, NULL) -- Issue#65 Fixes for specific datatype mappings || CASE WHEN format_type(a.aggtranstype, NULL) = 'double precision[]' THEN 'float8' WHEN format_type(a.aggtranstype, NULL) = 'anyarray' THEN 'anyelement' ELSE format_type(a.aggtranstype, NULL) END || ') (sfunc = ' || regexp_replace(a.aggtransfn::text, '(^|\W)' || quote_ident(source_schema) || '\.', '\1' || quote_ident(dest_schema) || '.') || ', stype = ' -- || format_type(a.aggtranstype, NULL) -- Issue#65 Fixes for specific datatype mappings || CASE WHEN format_type(a.aggtranstype, NULL) = 'double precision[]' THEN 'float8[]' ELSE format_type(a.aggtranstype, NULL) END || CASE WHEN op.oprname IS NULL THEN '' ELSE ', sortop = ' || op.oprname END || CASE WHEN a.agginitval IS NULL THEN '' ELSE ', initcond = ''' || a.agginitval || '''' END || ')' INTO dest_qry FROM pg_proc p JOIN pg_aggregate a ON a.aggfnoid = p.oid LEFT JOIN pg_operator op ON op.oid = a.aggsortop WHERE p.oid = func_oid; IF bDDLOnly THEN RAISE INFO '%;', dest_qry; ELSE EXECUTE dest_qry; END IF; END LOOP; RAISE NOTICE ' FUNCTIONS cloned: %', LPAD(cnt::text, 5, ' '); END IF; -- Create views action := 'Views'; -- Issue#61 FIX: use set_config for empty string -- MJV FIX #43: also had to reset search_path from source schema to empty. -- SET search_path = ''; SELECT set_config('search_path', '', false) INTO v_dummy; cnt := 0; --FOR object IN -- SELECT table_name::text, view_definition -- FROM information_schema.views -- WHERE table_schema = quote_ident(source_schema) -- Issue#73 replace loop query to handle dependencies -- Issue#91 get view_owner FOR srctbl, aname, view_owner, object IN WITH RECURSIVE views AS ( SELECT n.nspname as schemaname, v.relname as tablename, v.oid::regclass AS viewname, v.relkind = 'm' AS is_materialized, pg_catalog.pg_get_userbyid(v.relowner) as owner, 1 AS level FROM pg_depend AS d JOIN pg_rewrite AS r ON r.oid = d.objid JOIN pg_class AS v ON v.oid = r.ev_class JOIN pg_namespace n ON n.oid = v.relnamespace -- WHERE v.relkind IN ('v', 'm') WHERE v.relkind IN ('v') AND d.classid = 'pg_rewrite'::regclass AND d.refclassid = 'pg_class'::regclass AND d.deptype = 'n' UNION -- add the views that depend on these SELECT n.nspname as schemaname, v.relname as tablename, v.oid::regclass AS viewname, v.relkind = 'm', pg_catalog.pg_get_userbyid(v.relowner) as owner, views.level + 1 FROM views JOIN pg_depend AS d ON d.refobjid = views.viewname JOIN pg_rewrite AS r ON r.oid = d.objid JOIN pg_class AS v ON v.oid = r.ev_class JOIN pg_namespace n ON n.oid = v.relnamespace -- WHERE v.relkind IN ('v', 'm') WHERE v.relkind IN ('v') AND d.classid = 'pg_rewrite'::regclass AND d.refclassid = 'pg_class'::regclass AND d.deptype = 'n' AND v.oid <> views.viewname ) SELECT tablename, viewname, owner, format('CREATE OR REPLACE%s VIEW %s AS%s', CASE WHEN is_materialized THEN ' MATERIALIZED' ELSE '' END, viewname, pg_get_viewdef(viewname)) FROM views WHERE schemaname = quote_ident(source_schema) GROUP BY schemaname, tablename, viewname, owner, is_materialized ORDER BY max(level), schemaname, tablename LOOP cnt := cnt + 1; -- Issue#73 replace logic based on new loop sql buffer := quote_ident(dest_schema) || '.' || quote_ident(aname); -- MJV FIX: #43 -- SELECT view_definition INTO v_def -- SELECT REPLACE(view_definition, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.') INTO v_def -- FROM information_schema.views -- WHERE table_schema = quote_ident(source_schema) -- AND table_name = quote_ident(object); SELECT REPLACE(object, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.') INTO v_def; -- NOTE: definition already includes the closing statement semicolon SELECT REPLACE(aname, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.') INTO buffer3; IF bDDLOnly THEN RAISE INFO '%', v_def; -- Issue#91 Fix -- issue#95 IF NOT bNoOwner THEN RAISE INFO 'ALTER TABLE % OWNER TO %', buffer3, view_owner || ';'; END IF; ELSE -- EXECUTE 'CREATE OR REPLACE VIEW ' || buffer || ' AS ' || v_def; EXECUTE v_def; -- Issue#73: commented out comment logic for views since we do it elsewhere now. -- Issue#91 Fix -- issue#95 IF NOT bNoOwner THEN v_def = 'ALTER TABLE ' || buffer3 || ' OWNER TO ' || view_owner || ';'; EXECUTE v_def; END IF; END IF; END LOOP; RAISE NOTICE ' VIEWS cloned: %', LPAD(cnt::text, 5, ' '); -- Create Materialized views action := 'Mat. Views'; cnt := 0; -- Issue#91 get view_owner FOR object, view_owner, v_def IN SELECT matviewname::text, matviewowner::text, replace(definition,';','') FROM pg_catalog.pg_matviews WHERE schemaname = quote_ident(source_schema) LOOP cnt := cnt + 1; -- Issue#78 FIX: handle case-sensitive names with quote_ident() on target schema and object buffer := quote_ident(dest_schema) || '.' || quote_ident(object); -- MJV FIX: #72 remove source schema in MV def SELECT REPLACE(v_def, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.') INTO buffer2; IF bData THEN -- issue#98 defer creation until after regular tables are populated. Also defer the ownership as well. -- EXECUTE 'CREATE MATERIALIZED VIEW ' || buffer || ' AS ' || buffer2 || ' WITH DATA;' ; buffer3 = 'CREATE MATERIALIZED VIEW ' || buffer || ' AS ' || buffer2 || ' WITH DATA;'; mvarray := mvarray || buffer3; -- issue#95 IF NOT bNoOwner THEN -- buffer3 = 'ALTER MATERIALIZED VIEW ' || buffer || ' OWNER TO ' || view_owner || ';' ; -- EXECUTE buffer3; buffer3 = 'ALTER MATERIALIZED VIEW ' || buffer || ' OWNER TO ' || view_owner || ';' ; mvarray := mvarray || buffer3; END IF; ELSE IF bDDLOnly THEN RAISE INFO '%', 'CREATE MATERIALIZED VIEW ' || buffer || ' AS ' || buffer2 || ' WITH NO DATA;' ; -- Issue#91 -- issue#95 IF NOT bNoOwner THEN RAISE INFO '%', 'ALTER MATERIALIZED VIEW ' || buffer || ' OWNER TO ' || view_owner || ';' ; END IF; ELSE EXECUTE 'CREATE MATERIALIZED VIEW ' || buffer || ' AS ' || buffer2 || ' WITH NO DATA;' ; -- Issue#91 -- issue#95 IF NOT bNoOwner THEN buffer3 = 'ALTER MATERIALIZED VIEW ' || buffer || ' OWNER TO ' || view_owner || ';' ; EXECUTE buffer3; END IF; END IF; END IF; SELECT coalesce(obj_description(oid), '') into adef from pg_class where relkind = 'm' and relname = object; IF adef <> '' THEN IF bDDLOnly THEN RAISE INFO '%', 'COMMENT ON MATERIALIZED VIEW ' || quote_ident(dest_schema) || '.' || object || ' IS ''' || adef || ''';'; ELSE -- Issue#$98: also defer if copy rows is on since we defer MVIEWS in that case IF bData THEN buffer3 = 'COMMENT ON MATERIALIZED VIEW ' || quote_ident(dest_schema) || '.' || object || ' IS ''' || adef || ''';'; mvarray = mvarray || buffer3; ELSE EXECUTE 'COMMENT ON MATERIALIZED VIEW ' || quote_ident(dest_schema) || '.' || object || ' IS ''' || adef || ''';'; END IF; END IF; END IF; FOR aname, adef IN SELECT indexname, replace(indexdef, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.') as newdef FROM pg_indexes where schemaname = quote_ident(source_schema) and tablename = object order by indexname LOOP IF bDDLOnly THEN RAISE INFO '%', adef || ';'; ELSE EXECUTE adef || ';'; END IF; END LOOP; END LOOP; RAISE NOTICE ' MAT VIEWS cloned: %', LPAD(cnt::text, 5, ' '); -- Issue 90 Move create functions to before views -- MV: Create Triggers -- MJV FIX: #38 -- EXECUTE 'SET search_path = ' || quote_ident(source_schema) ; -- Issue#61 FIX: use set_config for empty string -- SET search_path = ''; SELECT set_config('search_path', '', false) into v_dummy; action := 'Triggers'; cnt := 0; FOR arec IN -- 2021-03-09 MJV FIX: #40 fixed sql to get the def using pg_get_triggerdef() sql SELECT n.nspname, c.relname, t.tgname, p.proname, REPLACE(pg_get_triggerdef(t.oid), quote_ident(source_schema), quote_ident(dest_schema)) || ';' AS trig_ddl FROM pg_trigger t, pg_class c, pg_namespace n, pg_proc p WHERE n.nspname = quote_ident(source_schema) AND n.oid = c.relnamespace AND c.relkind in ('r','p') AND n.oid = p.pronamespace AND c.oid = t.tgrelid AND p.oid = t.tgfoid ORDER BY c.relname, t.tgname LOOP BEGIN cnt := cnt + 1; IF bDDLOnly THEN RAISE INFO '%', arec.trig_ddl; ELSE EXECUTE arec.trig_ddl; END IF; END; END LOOP; RAISE NOTICE ' TRIGGERS cloned: %', LPAD(cnt::text, 5, ' '); -- MV: Create Rules -- Fixes Issue#59 Implement Rules action := 'Rules'; cnt := 0; FOR arec IN SELECT regexp_replace(definition, E'[\\n\\r]+', ' ', 'g' ) as definition FROM pg_rules WHERE schemaname = quote_ident(source_schema) LOOP cnt := cnt + 1; buffer := REPLACE(arec.definition, quote_ident(source_schema) || '.', quote_ident(dest_schema) || '.'); IF bDDLOnly THEN RAISE INFO '%', buffer; ELSE EXECUTE buffer; END IF; END LOOP; RAISE NOTICE ' RULES cloned: %', LPAD(cnt::text, 5, ' '); -- MV: Create Policies -- Fixes Issue#66 Implement Security policies for RLS action := 'Policies'; cnt := 0; FOR arec IN -- Issue#78 FIX: handle case-sensitive names with quote_ident() on policy, tablename SELECT schemaname as schemaname, tablename as tablename, 'CREATE POLICY ' || policyname || ' ON ' || quote_ident(dest_schema) || '.' || quote_ident(tablename) || ' AS ' || permissive || ' FOR ' || cmd || ' TO ' || array_to_string(roles, ',', '*') || ' USING (' || regexp_replace(qual, E'[\\n\\r]+', ' ', 'g' ) || ')' || CASE WHEN with_check IS NOT NULL THEN ' WITH CHECK (' ELSE '' END || coalesce(with_check, '') || CASE WHEN with_check IS NOT NULL THEN ');' ELSE ';' END as definition FROM pg_policies WHERE schemaname = quote_ident(source_schema) ORDER BY policyname LOOP cnt := cnt + 1; IF bDDLOnly THEN RAISE INFO '%', arec.definition; ELSE EXECUTE arec.definition; END IF; -- Issue#76: Enable row security if indicated SELECT c.relrowsecurity INTO abool FROM pg_class c, pg_namespace n where n.nspname = quote_ident(arec.schemaname) AND n.oid = c.relnamespace AND c.relname = quote_ident(arec.tablename) and c.relkind = 'r'; IF abool THEN buffer = 'ALTER TABLE ' || dest_schema || '.' || arec.tablename || ' ENABLE ROW LEVEL SECURITY;'; IF bDDLOnly THEN RAISE INFO '%', buffer; ELSE EXECUTE buffer; END IF; END IF; END LOOP; RAISE NOTICE ' POLICIES cloned: %', LPAD(cnt::text, 5, ' '); -- MJV Fixed #62 for comments (PASS 1) action := 'Comments1'; cnt := 0; FOR qry IN -- Issue#74 Fix: Change schema from source to target. Also, do not include comments on foreign tables since we do not clone foreign tables at this time. SELECT 'COMMENT ON ' || CASE WHEN c.relkind in ('r','p') AND a.attname IS NULL THEN 'TABLE ' WHEN c.relkind in ('r','p') AND a.attname IS NOT NULL THEN 'COLUMN ' WHEN c.relkind = 'f' THEN 'FOREIGN TABLE ' WHEN c.relkind = 'm' THEN 'MATERIALIZED VIEW ' WHEN c.relkind = 'v' THEN 'VIEW ' WHEN c.relkind = 'i' THEN 'INDEX ' WHEN c.relkind = 'S' THEN 'SEQUENCE ' ELSE 'XX' END || quote_ident(dest_schema) || '.' || CASE WHEN c.relkind in ('r','p') AND -- Issue#78: handle case-sensitive names with quote_ident() a.attname IS NOT NULL THEN quote_ident(c.relname) || '.' || a.attname ELSE quote_ident(c.relname) END || -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl FROM pg_class c JOIN pg_namespace n ON (n.oid = c.relnamespace) LEFT JOIN pg_description d ON (c.oid = d.objoid) LEFT JOIN pg_attribute a ON (c.oid = a.attrelid AND a.attnum > 0 and a.attnum = d.objsubid) WHERE c.relkind <> 'f' AND d.description IS NOT NULL AND n.nspname = quote_ident(source_schema) ORDER BY ddl LOOP cnt := cnt + 1; -- BAD : "COMMENT ON SEQUENCE sample_clone2.CaseSensitive_ID_seq IS 'just a comment on CaseSensitive sequence';" -- GOOD: "COMMENT ON SEQUENCE "CaseSensitive_ID_seq" IS 'just a comment on CaseSensitive sequence';" -- Issue#98 For MVs we create comments when we create the MVs IF substring(qry,1,28) = 'COMMENT ON MATERIALIZED VIEW' THEN IF bDebug THEN RAISE NOTICE 'deferring comments on MVs'; END IF; cnt = cnt - 1; continue; END IF; IF bDDLOnly THEN RAISE INFO '%', qry; ELSE EXECUTE qry; END IF; END LOOP; RAISE NOTICE ' COMMENTS(1) cloned: %', LPAD(cnt::text, 5, ' '); -- MJV Fixed #62 for comments (PASS 2) action := 'Comments2'; cnt2 := 0; IF is_prokind THEN FOR qry IN -- Issue#74 Fix: Change schema from source to target. SELECT 'COMMENT ON SCHEMA ' || dest_schema || -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl from pg_namespace n, pg_description d where d.objoid = n.oid and n.nspname = quote_ident(source_schema) UNION -- Issue#74 Fix: need to replace source schema inline -- SELECT 'COMMENT ON TYPE ' || pg_catalog.format_type(t.oid, NULL) || ' IS ''' || pg_catalog.obj_description(t.oid, 'pg_type') || ''';' as ddl SELECT 'COMMENT ON TYPE ' || REPLACE(pg_catalog.format_type(t.oid, NULL), quote_ident(source_schema), quote_ident(dest_schema)) || ' IS ''' || pg_catalog.obj_description(t.oid, 'pg_type') || ''';' as ddl FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el WHERE el.oid = t.typelem AND el.typarray = t.oid) AND n.nspname = quote_ident(source_schema) COLLATE pg_catalog.default AND pg_catalog.obj_description(t.oid, 'pg_type') IS NOT NULL and t.typtype = 'c' UNION -- Issue#78: handle case-sensitive names with quote_ident() SELECT 'COMMENT ON COLLATION ' || quote_ident(dest_schema) || '.' || quote_ident(c.collname) || ' IS ''' || pg_catalog.obj_description(c.oid, 'pg_collation') || ''';' as ddl FROM pg_catalog.pg_collation c, pg_catalog.pg_namespace n WHERE n.oid = c.collnamespace AND c.collencoding IN (-1, pg_catalog.pg_char_to_encoding(pg_catalog.getdatabaseencoding())) AND n.nspname = quote_ident(source_schema) COLLATE pg_catalog.default AND pg_catalog.obj_description(c.oid, 'pg_collation') IS NOT NULL UNION SELECT 'COMMENT ON ' || CASE WHEN p.prokind = 'f' THEN 'FUNCTION ' WHEN p.prokind = 'p' THEN 'PROCEDURE ' WHEN p.prokind = 'a' THEN 'AGGREGATE ' END || dest_schema || '.' || p.proname || ' (' || oidvectortypes(p.proargtypes) || ')' -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl FROM pg_catalog.pg_namespace n JOIN pg_catalog.pg_proc p ON p.pronamespace = n.oid JOIN pg_description d ON (d.objoid = p.oid) WHERE n.nspname = quote_ident(source_schema) UNION SELECT 'COMMENT ON POLICY ' || p1.policyname || ' ON ' || dest_schema || '.' || p1.tablename || -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl FROM pg_policies p1, pg_policy p2, pg_class c, pg_namespace n, pg_description d WHERE p1.schemaname = n.nspname AND p1.tablename = c.relname AND n.oid = c.relnamespace AND c.relkind in ('r','p') AND p1.policyname = p2.polname AND d.objoid = p2.oid AND p1.schemaname = quote_ident(source_schema) UNION SELECT 'COMMENT ON DOMAIN ' || dest_schema || '.' || t.typname || -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace JOIN pg_catalog.pg_description d ON d.classoid = t.tableoid AND d.objoid = t.oid AND d.objsubid = 0 WHERE t.typtype = 'd' AND n.nspname = quote_ident(source_schema) COLLATE pg_catalog.default ORDER BY 1 LOOP cnt2 := cnt2 + 1; IF bDDLOnly THEN RAISE INFO '%', qry; ELSE EXECUTE qry; END IF; END LOOP; ELSE -- must be v 10 or less FOR qry IN -- Issue#74 Fix: Change schema from source to target. SELECT 'COMMENT ON SCHEMA ' || dest_schema || -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl from pg_namespace n, pg_description d where d.objoid = n.oid and n.nspname = quote_ident(source_schema) UNION -- Issue#74 Fix: need to replace source schema inline -- SELECT 'COMMENT ON TYPE ' || pg_catalog.format_type(t.oid, NULL) || ' IS ''' || pg_catalog.obj_description(t.oid, 'pg_type') || ''';' as ddl SELECT 'COMMENT ON TYPE ' || REPLACE(pg_catalog.format_type(t.oid, NULL), quote_ident(source_schema), quote_ident(dest_schema)) || ' IS ''' || pg_catalog.obj_description(t.oid, 'pg_type') || ''';' as ddl FROM pg_catalog.pg_type t JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el WHERE el.oid = t.typelem AND el.typarray = t.oid) AND n.nspname = quote_ident(source_schema) COLLATE pg_catalog.default AND pg_catalog.obj_description(t.oid, 'pg_type') IS NOT NULL and t.typtype = 'c' UNION -- FIX Isse#87 by adding double quotes around collation name SELECT 'COMMENT ON COLLATION ' || dest_schema || '."' || c.collname || '" IS ''' || pg_catalog.obj_description(c.oid, 'pg_collation') || ''';' as ddl FROM pg_catalog.pg_collation c, pg_catalog.pg_namespace n WHERE n.oid = c.collnamespace AND c.collencoding IN (-1, pg_catalog.pg_char_to_encoding(pg_catalog.getdatabaseencoding())) AND n.nspname = quote_ident(source_schema) COLLATE pg_catalog.default AND pg_catalog.obj_description(c.oid, 'pg_collation') IS NOT NULL UNION SELECT 'COMMENT ON ' || CASE WHEN proisagg THEN 'AGGREGATE ' ELSE 'FUNCTION ' END || dest_schema || '.' || p.proname || ' (' || oidvectortypes(p.proargtypes) || ')' -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl FROM pg_catalog.pg_namespace n JOIN pg_catalog.pg_proc p ON p.pronamespace = n.oid JOIN pg_description d ON (d.objoid = p.oid) WHERE n.nspname = quote_ident(source_schema) UNION SELECT 'COMMENT ON POLICY ' || p1.policyname || ' ON ' || dest_schema || '.' || p1.tablename || -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl FROM pg_policies p1, pg_policy p2, pg_class c, pg_namespace n, pg_description d WHERE p1.schemaname = n.nspname AND p1.tablename = c.relname AND n.oid = c.relnamespace AND c.relkind in ('r','p') AND p1.policyname = p2.polname AND d.objoid = p2.oid AND p1.schemaname = quote_ident(source_schema) UNION SELECT 'COMMENT ON DOMAIN ' || dest_schema || '.' || t.typname || -- Issue#74 Fix -- ' IS ''' || d.description || ''';' as ddl ' IS ' || quote_literal(d.description) || ';' as ddl FROM pg_catalog.pg_type t LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace JOIN pg_catalog.pg_description d ON d.classoid = t.tableoid AND d.objoid = t.oid AND d.objsubid = 0 WHERE t.typtype = 'd' AND n.nspname = quote_ident(source_schema) COLLATE pg_catalog.default ORDER BY 1 LOOP cnt2 := cnt2 + 1; IF bDDLOnly THEN RAISE INFO '%', qry; ELSE EXECUTE qry; END IF; END LOOP; END IF; RAISE NOTICE ' COMMENTS(2) cloned: %', LPAD(cnt2::text, 5, ' '); -- Issue#95 bypass if No ACL specified. IF NOT bNoACL THEN -- --------------------- -- MV: Permissions: Defaults -- --------------------- EXECUTE 'SET search_path = ' || quote_ident(source_schema) ; action := 'PRIVS: Defaults'; cnt := 0; FOR arec IN SELECT pg_catalog.pg_get_userbyid(d.defaclrole) AS "owner", n.nspname AS schema, CASE d.defaclobjtype WHEN 'r' THEN 'table' WHEN 'S' THEN 'sequence' WHEN 'f' THEN 'function' WHEN 'T' THEN 'type' WHEN 'n' THEN 'schema' END AS atype, d.defaclacl as defaclacl, pg_catalog.array_to_string(d.defaclacl, ',') as defaclstr FROM pg_catalog.pg_default_acl d LEFT JOIN pg_catalog.pg_namespace n ON (n.oid = d.defaclnamespace) WHERE n.nspname IS NOT NULL AND n.nspname = quote_ident(source_schema) ORDER BY 3, 2, 1 LOOP BEGIN -- RAISE NOTICE ' owner=% type=% defaclacl=% defaclstr=%', arec.owner, arec.atype, arec.defaclacl, arec.defaclstr; FOREACH aclstr IN ARRAY arec.defaclacl LOOP cnt := cnt + 1; -- RAISE NOTICE ' aclstr=%', aclstr; -- break up into grantor, grantee, and privs, mydb_update=rwU/mydb_owner SELECT split_part(aclstr, '=',1) INTO grantee; SELECT split_part(aclstr, '=',2) INTO grantor; SELECT split_part(grantor, '/',1) INTO privs; SELECT split_part(grantor, '/',2) INTO grantor; -- RAISE NOTICE ' grantor=% grantee=% privs=%', grantor, grantee, privs; IF arec.atype = 'function' THEN -- Just having execute is enough to grant all apparently. buffer := 'ALTER DEFAULT PRIVILEGES FOR ROLE ' || grantor || ' IN SCHEMA ' || quote_ident(dest_schema) || ' GRANT ALL ON FUNCTIONS TO "' || grantee || '";'; -- Issue#92 Fix -- set role = cm_stage_ro_grp; -- ALTER DEFAULT PRIVILEGES FOR ROLE cm_stage_ro_grp IN SCHEMA cm_stage GRANT REFERENCES, TRIGGER ON TABLES TO cm_stage_ro_grp; IF grantor = grantee THEN -- append set role to statement buffer = 'SET ROLE = ' || grantor || '; ' || buffer; END IF; IF bDDLOnly THEN RAISE INFO '%', buffer; ELSE EXECUTE buffer; END IF; -- Issue#92 Fix: EXECUTE 'SET ROLE = ' || calleruser; ELSIF arec.atype = 'sequence' THEN IF POSITION('r' IN privs) > 0 AND POSITION('w' IN privs) > 0 AND POSITION('U' IN privs) > 0 THEN -- arU is enough for all privs buffer := 'ALTER DEFAULT PRIVILEGES FOR ROLE ' || grantor || ' IN SCHEMA ' || quote_ident(dest_schema) || ' GRANT ALL ON SEQUENCES TO "' || grantee || '";'; -- Issue#92 Fix IF grantor = grantee THEN -- append set role to statement buffer = 'SET ROLE = ' || grantor || '; ' || buffer; END IF; IF bDDLOnly THEN RAISE INFO '%', buffer; ELSE EXECUTE buffer; END IF; -- Issue#92 Fix: EXECUTE 'SET ROLE = ' || calleruser; ELSE -- have to specify each priv individually buffer2 := ''; IF POSITION('r' IN privs) > 0 THEN buffer2 := 'SELECT'; END IF; IF POSITION('w' IN privs) > 0 THEN IF buffer2 = '' THEN buffer2 := 'UPDATE'; ELSE buffer2 := buffer2 || ', UPDATE'; END IF; END IF; IF POSITION('U' IN privs) > 0 THEN IF buffer2 = '' THEN buffer2 := 'USAGE'; ELSE buffer2 := buffer2 || ', USAGE'; END IF; END IF; buffer := 'ALTER DEFAULT PRIVILEGES FOR ROLE ' || grantor || ' IN SCHEMA ' || quote_ident(dest_schema) || ' GRANT ' || buffer2 || ' ON SEQUENCES TO "' || grantee || '";'; -- Issue#92 Fix IF grantor = grantee THEN -- append set role to statement buffer = 'SET ROLE = ' || grantor || '; ' || buffer; END IF; IF bDDLOnly THEN RAISE INFO '%', buffer; ELSE EXECUTE buffer; END IF; select current_user into buffer; -- Issue#92 Fix: EXECUTE 'SET ROLE = ' || calleruser; END IF; ELSIF arec.atype = 'table' THEN -- do each priv individually, jeeeesh! buffer2 := ''; IF POSITION('a' IN privs) > 0 THEN buffer2 := 'INSERT'; END IF; IF POSITION('r' IN privs) > 0 THEN IF buffer2 = '' THEN buffer2 := 'SELECT'; ELSE buffer2 := buffer2 || ', SELECT'; END IF; END IF; IF POSITION('w' IN privs) > 0 THEN IF buffer2 = '' THEN buffer2 := 'UPDATE'; ELSE buffer2 := buffer2 || ', UPDATE'; END IF; END IF; IF POSITION('d' IN privs) > 0 THEN IF buffer2 = '' THEN buffer2 := 'DELETE'; ELSE buffer2 := buffer2 || ', DELETE'; END IF; END IF; IF POSITION('t' IN privs) > 0 THEN IF buffer2 = '' THEN buffer2 := 'TRIGGER'; ELSE buffer2 := buffer2 || ', TRIGGER'; END IF; END IF; IF POSITION('T' IN privs) > 0 THEN IF buffer2 = '' THEN buffer2 := 'TRUNCATE'; ELSE buffer2 := buffer2 || ', TRUNCATE'; END IF; END IF; buffer := 'ALTER DEFAULT PRIVILEGES FOR ROLE ' || grantor || ' IN SCHEMA ' || quote_ident(dest_schema) || ' GRANT ' || buffer2 || ' ON TABLES TO "' || grantee || '";'; -- Issue#92 Fix IF grantor = grantee THEN -- append set role to statement buffer = 'SET ROLE = ' || grantor || '; ' || buffer; END IF; IF bDDLOnly THEN RAISE INFO '%', buffer; ELSE EXECUTE buffer; END IF; select current_user into buffer; -- Issue#92 Fix: EXECUTE 'SET ROLE = ' || calleruser; ELSIF arec.atype = 'type' THEN IF POSITION('r' IN privs) > 0 AND POSITION('w' IN privs) > 0 AND POSITION('U' IN privs) > 0 THEN -- arU is enough for all privs buffer := 'ALTER DEFAULT PRIVILEGES FOR ROLE ' || grantor || ' IN SCHEMA ' || quote_ident(dest_schema) || ' GRANT ALL ON TYPES TO "' || grantee || '";'; -- Issue#92 Fix IF grantor = grantee THEN -- append set role to statement buffer = 'SET ROLE = ' || grantor || '; ' || buffer; END IF; IF bDDLOnly THEN RAISE INFO '%', buffer; ELSE EXECUTE buffer; END IF; -- Issue#92 Fix: EXECUTE 'SET ROLE = ' || calleruser; ELSIF POSITION('U' IN privs) THEN buffer := 'ALTER DEFAULT PRIVILEGES FOR ROLE ' || grantor || ' IN SCHEMA ' || quote_ident(dest_schema) || ' GRANT USAGE ON TYPES TO "' || grantee || '";'; -- Issue#92 Fix IF grantor = grantee THEN -- append set role to statement buffer = 'SET ROLE = ' || grantor || '; ' || buffer; END IF; IF bDDLOnly THEN RAISE INFO '%', buffer; ELSE EXECUTE buffer; END IF; -- Issue#92 Fix: EXECUTE 'SET ROLE = ' || calleruser; ELSE RAISE WARNING 'Unhandled TYPE Privs:: type=% privs=% owner=% defaclacl=% defaclstr=% grantor=% grantee=% ', arec.atype, privs, arec.owner, arec.defaclacl, arec.defaclstr, grantor, grantee; END IF; ELSE RAISE WARNING 'Unhandled Privs:: type=% privs=% owner=% defaclacl=% defaclstr=% grantor=% grantee=% ', arec.atype, privs, arec.owner, arec.defaclacl, arec.defaclstr, grantor, grantee; END IF; END LOOP; END; END LOOP; RAISE NOTICE ' DFLT PRIVS cloned: %', LPAD(cnt::text, 5, ' '); END IF; -- NO ACL BRANCH -- Issue#95 bypass if No ACL specified IF NOT bNoACL THEN -- MV: PRIVS: schema -- crunchy data extension, check_access -- SELECT role_path, base_role, as_role, objtype, schemaname, objname, array_to_string(array_agg(privname),',') as privs FROM all_access() -- WHERE base_role != CURRENT_USER and objtype = 'schema' and schemaname = 'public' group by 1,2,3,4,5,6; action := 'PRIVS: Schema'; cnt := 0; FOR arec IN SELECT 'GRANT ' || p.perm::perm_type || ' ON SCHEMA ' || quote_ident(dest_schema) || ' TO "' || r.rolname || '";' as schema_ddl FROM pg_catalog.pg_namespace AS n CROSS JOIN pg_catalog.pg_roles AS r CROSS JOIN (VALUES ('USAGE'), ('CREATE')) AS p(perm) WHERE n.nspname = quote_ident(source_schema) AND NOT r.rolsuper AND has_schema_privilege(r.oid, n.oid, p.perm) ORDER BY r.rolname, p.perm::perm_type LOOP BEGIN cnt := cnt + 1; IF bDDLOnly THEN RAISE INFO '%', arec.schema_ddl; ELSE EXECUTE arec.schema_ddl; END IF; END; END LOOP; RAISE NOTICE 'SCHEMA PRIVS cloned: %', LPAD(cnt::text, 5, ' '); END IF; -- NO ACL BRANCH -- Issue#95 bypass if No ACL specified IF NOT bNoACL THEN -- MV: PRIVS: sequences action := 'PRIVS: Sequences'; cnt := 0; FOR arec IN -- Issue#78 FIX: handle case-sensitive names with quote_ident() on t.relname SELECT 'GRANT ' || p.perm::perm_type || ' ON ' || quote_ident(dest_schema) || '.' || quote_ident(t.relname::text) || ' TO "' || r.rolname || '";' as seq_ddl FROM pg_catalog.pg_class AS t CROSS JOIN pg_catalog.pg_roles AS r CROSS JOIN (VALUES ('SELECT'), ('USAGE'), ('UPDATE')) AS p(perm) WHERE t.relnamespace::regnamespace::name = quote_ident(source_schema) AND t.relkind = 'S' AND NOT r.rolsuper AND has_sequence_privilege(r.oid, t.oid, p.perm) LOOP BEGIN cnt := cnt + 1; -- IF bDebug THEN RAISE NOTICE 'DEBUG: ddl=%', arec.seq_ddl; END IF; IF bDDLOnly THEN RAISE INFO '%', arec.seq_ddl; ELSE EXECUTE arec.seq_ddl; END IF; END; END LOOP; RAISE NOTICE ' SEQ. PRIVS cloned: %', LPAD(cnt::text, 5, ' '); END IF; -- NO ACL BRANCH -- Issue#95 bypass if No ACL specified IF NOT bNoACL THEN -- MV: PRIVS: functions action := 'PRIVS: Functions/Procedures'; cnt := 0; -- Issue#61 FIX: use set_config for empty string -- SET search_path = ''; SELECT set_config('search_path', '', false) into v_dummy; -- RAISE NOTICE ' source_schema=% dest_schema=%',source_schema, dest_schema; FOR arec IN -- 2021-03-05 MJV FIX: issue#35: caused exception in some functions with parameters and gave privileges to other users that should not have gotten them. -- SELECT 'GRANT EXECUTE ON FUNCTION ' || quote_ident(dest_schema) || '.' || replace(regexp_replace(f.oid::regprocedure::text, '^((("[^"]*")|([^"][^.]*))\.)?', ''), source_schema, dest_schema) || ' TO "' || r.rolname || '";' as func_ddl -- FROM pg_catalog.pg_proc f CROSS JOIN pg_catalog.pg_roles AS r WHERE f.pronamespace::regnamespace::name = quote_ident(source_schema) AND NOT r.rolsuper AND has_function_privilege(r.oid, f.oid, 'EXECUTE') -- order by regexp_replace(f.oid::regprocedure::text, '^((("[^"]*")|([^"][^.]*))\.)?', '') -- 2021-03-05 MJV FIX: issue#37: defaults cause problems, use system function that returns args WITHOUT DEFAULTS -- COALESCE(r.routine_type, 'FUNCTION'): for aggregate functions, information_schema.routines contains NULL as routine_type value. -- Issue#78 FIX: handle case-sensitive names with quote_ident() on rp.routine_name SELECT 'GRANT ' || rp.privilege_type || ' ON ' || COALESCE(r.routine_type, 'FUNCTION') || ' ' || quote_ident(dest_schema) || '.' || quote_ident(rp.routine_name) || ' (' || pg_get_function_identity_arguments(p.oid) || ') TO ' || string_agg(distinct rp.grantee, ',') || ';' as func_dcl FROM information_schema.routine_privileges rp, information_schema.routines r, pg_proc p, pg_namespace n WHERE rp.routine_schema = quote_ident(source_schema) AND rp.is_grantable = 'YES' AND rp.routine_schema = r.routine_schema AND rp.routine_name = r.routine_name AND rp.routine_schema = n.nspname AND n.oid = p.pronamespace AND p.proname = r.routine_name GROUP BY rp.privilege_type, r.routine_type, rp.routine_name, pg_get_function_identity_arguments(p.oid) LOOP BEGIN cnt := cnt + 1; IF bDDLOnly THEN RAISE INFO '%', arec.func_dcl; ELSE EXECUTE arec.func_dcl; END IF; END; END LOOP; EXECUTE 'SET search_path = ' || quote_ident(source_schema) ; RAISE NOTICE ' FUNC PRIVS cloned: %', LPAD(cnt::text, 5, ' '); END IF; -- NO ACL BRANCH -- Issue#95 bypass if No ACL specified IF NOT bNoACL THEN -- MV: PRIVS: tables action := 'PRIVS: Tables'; -- regular, partitioned, and foreign tables plus view and materialized view permissions. Ignored for now: implement foreign table defs. cnt := 0; FOR arec IN -- SELECT 'GRANT ' || p.perm::perm_type || CASE WHEN t.relkind in ('r', 'p', 'f') THEN ' ON TABLE ' WHEN t.relkind in ('v', 'm') THEN ' ON ' END || quote_ident(dest_schema) || '.' || t.relname::text || ' TO "' || r.rolname || '";' as tbl_ddl, -- has_table_privilege(r.oid, t.oid, p.perm) AS granted, t.relkind -- FROM pg_catalog.pg_class AS t CROSS JOIN pg_catalog.pg_roles AS r CROSS JOIN (VALUES (TEXT 'SELECT'), ('INSERT'), ('UPDATE'), ('DELETE'), ('TRUNCATE'), ('REFERENCES'), ('TRIGGER')) AS p(perm) -- WHERE t.relnamespace::regnamespace::name = quote_ident(source_schema) AND t.relkind in ('r', 'p', 'f', 'v', 'm') AND NOT r.rolsuper AND has_table_privilege(r.oid, t.oid, p.perm) order by t.relname::text, t.relkind -- 2021-03-05 MJV FIX: Fixed Issue#36 for tables SELECT c.relkind, 'GRANT ' || tb.privilege_type || CASE WHEN c.relkind in ('r', 'p') THEN ' ON TABLE ' WHEN c.relkind in ('v', 'm') THEN ' ON ' END || -- Issue#78 FIX: handle case-sensitive names with quote_ident() on t.relname quote_ident(dest_schema) || '.' || quote_ident(tb.table_name) || ' TO ' || string_agg(tb.grantee, ',') || ';' as tbl_dcl FROM information_schema.table_privileges tb, pg_class c, pg_namespace n WHERE tb.table_schema = quote_ident(source_schema) AND tb.table_name = c.relname AND c.relkind in ('r', 'p', 'v', 'm') AND c.relnamespace = n.oid AND n.nspname = quote_ident(source_schema) GROUP BY c.relkind, tb.privilege_type, tb.table_schema, tb.table_name LOOP BEGIN cnt := cnt + 1; -- IF bDebug THEN RAISE NOTICE 'DEBUG: ddl=%', arec.tbl_dcl; END IF; -- Issue#46. Fixed reference to invalid record name (tbl_ddl --> tbl_dcl). IF arec.relkind = 'f' THEN RAISE WARNING 'Foreign tables are not currently implemented, so skipping privs for them. ddl=%', arec.tbl_dcl; ELSE IF bDDLOnly THEN RAISE INFO '%', arec.tbl_dcl; ELSE EXECUTE arec.tbl_dcl; END IF; END IF; END; END LOOP; RAISE NOTICE ' TABLE PRIVS cloned: %', LPAD(cnt::text, 5, ' '); END IF; -- NO ACL BRANCH -- LOOP for regular tables and populate them if specified -- Issue#75 moved from big table loop above to here. IF bData THEN r = clock_timestamp(); -- IF bVerbose THEN RAISE NOTICE 'START: copy rows %',clock_timestamp() - t; END IF; IF bVerbose THEN RAISE NOTICE 'Copying rows...'; END IF; EXECUTE 'SET search_path = ' || quote_ident(dest_schema) ; action := 'Copy Rows'; FOREACH tblelement IN ARRAY tblarray LOOP s = clock_timestamp(); EXECUTE tblelement; GET DIAGNOSTICS cnt = ROW_COUNT; buffer = substring(tblelement, 13); SELECT POSITION(' OVERRIDING SYSTEM VALUE SELECT ' IN buffer) INTO cnt2; IF cnt2 = 0 THEN SELECT POSITION(' SELECT ' IN buffer) INTO cnt2; buffer = substring(buffer,1, cnt2); ELSE buffer = substring(buffer,1, cnt2); END IF; SELECT RPAD(buffer, 35, ' ') INTO buffer; cnt2 := cast(extract(epoch from (clock_timestamp() - s)) as numeric(18,3)); IF bVerbose THEN RAISE NOTICE ' Populated cloned table, % Rows Copied: % seconds: %', buffer, LPAD(cnt::text, 10, ' '), LPAD(cnt2::text, 5, ' '); END IF; tblscopied := tblscopied + 1; END LOOP; -- Issue#79 implementation -- Do same for tables with user-defined elements FOREACH tblelement IN ARRAY tblarray2 LOOP s = clock_timestamp(); EXECUTE tblelement; GET DIAGNOSTICS cnt = ROW_COUNT; cnt2 = POSITION(' FROM ' IN tblelement::text); IF cnt2 > 0 THEN buffer = substring(tblelement, 1, cnt2); buffer = substring(buffer, 6); SELECT RPAD(buffer, 35, ' ') INTO buffer; cnt2 := cast(extract(epoch from (clock_timestamp() - s)) as numeric(18,3)); IF bVerbose THEN RAISE NOTICE ' Populated cloned table, % Rows Copied: % seconds: %', buffer, LPAD(cnt::text, 10, ' '), LPAD(cnt2::text, 5, ' '); END IF; tblscopied := tblscopied + 1; END IF; END LOOP; -- Issue#98 MVs deferred until now FOREACH tblelement IN ARRAY mvarray LOOP s = clock_timestamp(); EXECUTE tblelement; -- get diagnostics for MV creates or refreshes does not work, always returns 1 GET DIAGNOSTICS cnt = ROW_COUNT; buffer = substring(tblelement, 25); cnt2 = POSITION(' AS ' IN buffer); IF cnt2 > 0 THEN buffer = substring(buffer, 1, cnt2); SELECT RPAD(buffer, 36, ' ') INTO buffer; cnt2 := cast(extract(epoch from (clock_timestamp() - s)) as numeric(18,3)); IF bVerbose THEN RAISE NOTICE ' Populated Mat. View, % Rows Inserted: ? seconds: %', buffer, LPAD(cnt2::text, 5, ' '); END IF; mvscopied := mvscopied + 1; END IF; END LOOP; cnt := cast(extract(epoch from (clock_timestamp() - r)) as numeric(18,3)); IF bVerbose THEN RAISE NOTICE 'Copy rows duration: % seconds',cnt; END IF; END IF; RAISE NOTICE ' TABLES copied: %', LPAD(tblscopied::text, 5, ' '); RAISE NOTICE ' MATVIEWS refreshed: %', LPAD(mvscopied::text, 5, ' '); -- Issue#78 forces us to defer FKeys until the end since we previously did row copies before FKeys -- add FK constraint action := 'FK Constraints'; cnt := 0; -- Issue#61 FIX: use set_config for empty string -- SET search_path = ''; SELECT set_config('search_path', '', false) into v_dummy; FOR qry IN SELECT 'ALTER TABLE ' || quote_ident(dest_schema) || '.' || quote_ident(rn.relname) || ' ADD CONSTRAINT ' || quote_ident(ct.conname) || ' ' || REPLACE(pg_get_constraintdef(ct.oid), 'REFERENCES ' || quote_ident(source_schema) || '.', 'REFERENCES ' || quote_ident(dest_schema) || '.') || ';' FROM pg_constraint ct JOIN pg_class rn ON rn.oid = ct.conrelid WHERE connamespace = src_oid AND rn.relkind = 'r' AND ct.contype = 'f' LOOP cnt := cnt + 1; IF bDDLOnly THEN RAISE INFO '%', qry; ELSE EXECUTE qry; END IF; END LOOP; EXECUTE 'SET search_path = ' || quote_ident(source_schema) ; RAISE NOTICE ' FKEYS cloned: %', LPAD(cnt::text, 5, ' '); IF src_path_old = '' OR src_path_old = '""' THEN -- RAISE NOTICE 'Restoring old search_path to empty string'; SELECT set_config('search_path', '', false) into v_dummy; ELSE -- RAISE NOTICE 'Restoring old search_path to:%', src_path_old; EXECUTE 'SET search_path = ' || src_path_old; END IF; SELECT setting INTO v_dummy FROM pg_settings WHERE name = 'search_path'; IF bDebug THEN RAISE NOTICE 'setting search_path back to what it was: %', v_dummy; END IF; cnt := cast(extract(epoch from (clock_timestamp() - t)) as numeric(18,3)); IF bVerbose THEN RAISE NOTICE 'clone_schema duration: % seconds',cnt; END IF; EXCEPTION WHEN others THEN BEGIN GET STACKED DIAGNOSTICS v_diag1 = MESSAGE_TEXT, v_diag2 = PG_EXCEPTION_DETAIL, v_diag3 = PG_EXCEPTION_HINT, v_diag4 = RETURNED_SQLSTATE, v_diag5 = PG_CONTEXT, v_diag6 = PG_EXCEPTION_CONTEXT; -- v_ret := 'line=' || v_diag6 || '. '|| v_diag4 || '. ' || v_diag1 || ' .' || v_diag2 || ' .' || v_diag3; v_ret := 'line=' || v_diag6 || '. '|| v_diag4 || '. ' || v_diag1; -- Issue#101: added version to exception output RAISE EXCEPTION 'Version: % Action: % Diagnostics: %',v_version, action, v_ret; IF src_path_old = '' THEN -- RAISE NOTICE 'setting old search_path to empty string'; SELECT set_config('search_path', '', false); ELSE -- RAISE NOTICE 'setting old search_path to:%', src_path_old; EXECUTE 'SET search_path = ' || src_path_old; END IF; RETURN; END; RETURN; END; $BODY$ LANGUAGE plpgsql VOLATILE COST 100; -- ALTER FUNCTION public.clone_schema(text, text, boolean, boolean, boolean) OWNER TO postgres; django-pgschemas-0.15.2/django_pgschemas/contrib/000077500000000000000000000000001463633566500217665ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/contrib/__init__.py000066400000000000000000000000001463633566500240650ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/contrib/cache.py000066400000000000000000000010631463633566500234030ustar00rootroot00000000000000from ..schema import get_current_schema def make_key(key, key_prefix, version): """ Tenant aware function to generate a cache key. Constructs the key used by all other methods. Prepends the tenant `schema_name` and `key_prefix'. """ current_schema = get_current_schema() return "%s:%s:%s:%s" % (current_schema.schema_name, key_prefix, version, key) def reverse_key(key): """ Tenant aware function to reverse a cache key. Required for django-redis REVERSE_KEY_FUNCTION setting. """ return key.split(":", 3)[3] django-pgschemas-0.15.2/django_pgschemas/contrib/channels2/000077500000000000000000000000001463633566500236435ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/contrib/channels2/__init__.py000066400000000000000000000000611463633566500257510ustar00rootroot00000000000000from .router import TenantProtocolRouter # noqa django-pgschemas-0.15.2/django_pgschemas/contrib/channels2/auth.py000066400000000000000000000035621463633566500251640ustar00rootroot00000000000000from channels.auth import AuthMiddleware, CookieMiddleware, SessionMiddleware, _get_user_session_key from channels.db import database_sync_to_async from django.conf import settings from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, load_backend from django.contrib.auth.models import AnonymousUser from django.utils.crypto import constant_time_compare @database_sync_to_async def get_user(scope): """ Return the user model instance associated with the given scope. If no user is retrieved, return an instance of `AnonymousUser`. """ if "session" not in scope: raise ValueError( "Cannot find session in scope. You should wrap your consumer in SessionMiddleware." ) user = None session = scope["session"] with scope["tenant"]: try: user_id = _get_user_session_key(session) backend_path = session[BACKEND_SESSION_KEY] except KeyError: pass else: if backend_path in settings.AUTHENTICATION_BACKENDS: backend = load_backend(backend_path) user = backend.get_user(user_id) # Verify the session if hasattr(user, "get_session_auth_hash"): session_hash = session.get(HASH_SESSION_KEY) session_hash_verified = session_hash and constant_time_compare( session_hash, user.get_session_auth_hash() ) if not session_hash_verified: session.flush() user = None return user or AnonymousUser() class TenantAuthMiddleware(AuthMiddleware): async def resolve_scope(self, scope): scope["user"]._wrapped = await get_user(scope) def TenantAuthMiddlewareStack(inner): return CookieMiddleware(SessionMiddleware(TenantAuthMiddleware(inner))) django-pgschemas-0.15.2/django_pgschemas/contrib/channels2/router.py000066400000000000000000000065321463633566500255430ustar00rootroot00000000000000from channels.routing import ProtocolTypeRouter, URLRouter from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.encoding import force_str from django.utils.module_loading import import_string from ...schema import SchemaDescriptor from ...utils import get_domain_model, remove_www from .auth import TenantAuthMiddlewareStack class TenantAwareProtocolTypeRouter(ProtocolTypeRouter): def __init__(self, application_mapping, tenant_prefix): self.tenant_prefix = tenant_prefix super().__init__(application_mapping) def __call__(self, scope): if scope["type"] != "http": scope["path"] = scope["path"][len(self.tenant_prefix) + 1 :] return super().__call__(scope) class TenantProtocolRouter: """ ProtocolRouter that handles multi-tenancy. """ def __init__(self): self.root_ws_urlconf = settings.TENANTS["default"].get("WS_URLCONF") if self.root_ws_urlconf is None: raise ImproperlyConfigured( "TENANTS['default'] must contain a 'WS_URLCONF' key in order to use TenantProtocolRouter." ) def get_tenant_scope(self, scope): """ Get tenant and websockets urlconf based on scope host. """ hostname = force_str(dict(scope["headers"]).get(b"host", b"")) hostname = remove_www(hostname.split(":")[0]) tenant = None ws_urlconf = self.root_ws_urlconf # Checking for static tenants for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if hostname in data["DOMAINS"]: tenant = SchemaDescriptor.create(schema_name=schema, domain_url=hostname) if "WS_URLCONF" in data: ws_urlconf = data["WS_URLCONF"] return tenant, "", import_string(ws_urlconf + ".urlpatterns") # Checking for dynamic tenants else: DomainModel = get_domain_model() prefix = scope["path"].split("/")[1] try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder=prefix ) except DomainModel.DoesNotExist: try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder="" ) except DomainModel.DoesNotExist: return None, "", [] tenant = domain.tenant tenant.domain_url = hostname ws_urlconf = settings.TENANTS["default"]["WS_URLCONF"] return ( tenant, prefix if prefix == domain.folder else "", import_string(ws_urlconf + ".urlpatterns"), ) def get_protocol_type_router(self, tenant_prefix, ws_urlconf): """ Subclasses can override this to include more protocols. """ return TenantAwareProtocolTypeRouter( {"websocket": TenantAuthMiddlewareStack(URLRouter(ws_urlconf))}, tenant_prefix ) def __call__(self, scope): tenant, tenant_prefix, ws_urlconf = self.get_tenant_scope(scope) scope.update({"tenant": tenant}) return self.get_protocol_type_router(tenant_prefix, ws_urlconf)(scope) django-pgschemas-0.15.2/django_pgschemas/contrib/channels3/000077500000000000000000000000001463633566500236445ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/contrib/channels3/__init__.py000066400000000000000000000000611463633566500257520ustar00rootroot00000000000000from .router import TenantProtocolRouter # noqa django-pgschemas-0.15.2/django_pgschemas/contrib/channels3/auth.py000066400000000000000000000035621463633566500251650ustar00rootroot00000000000000from channels.auth import AuthMiddleware, CookieMiddleware, SessionMiddleware, _get_user_session_key from channels.db import database_sync_to_async from django.conf import settings from django.contrib.auth import BACKEND_SESSION_KEY, HASH_SESSION_KEY, load_backend from django.contrib.auth.models import AnonymousUser from django.utils.crypto import constant_time_compare @database_sync_to_async def get_user(scope): """ Return the user model instance associated with the given scope. If no user is retrieved, return an instance of `AnonymousUser`. """ if "session" not in scope: raise ValueError( "Cannot find session in scope. You should wrap your consumer in SessionMiddleware." ) user = None session = scope["session"] with scope["tenant"]: try: user_id = _get_user_session_key(session) backend_path = session[BACKEND_SESSION_KEY] except KeyError: pass else: if backend_path in settings.AUTHENTICATION_BACKENDS: backend = load_backend(backend_path) user = backend.get_user(user_id) # Verify the session if hasattr(user, "get_session_auth_hash"): session_hash = session.get(HASH_SESSION_KEY) session_hash_verified = session_hash and constant_time_compare( session_hash, user.get_session_auth_hash() ) if not session_hash_verified: session.flush() user = None return user or AnonymousUser() class TenantAuthMiddleware(AuthMiddleware): async def resolve_scope(self, scope): scope["user"]._wrapped = await get_user(scope) def TenantAuthMiddlewareStack(inner): return CookieMiddleware(SessionMiddleware(TenantAuthMiddleware(inner))) django-pgschemas-0.15.2/django_pgschemas/contrib/channels3/router.py000066400000000000000000000067771463633566500255570ustar00rootroot00000000000000from channels.db import database_sync_to_async from channels.routing import ProtocolTypeRouter, URLRouter from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.encoding import force_str from django.utils.module_loading import import_string from ...schema import SchemaDescriptor from ...utils import get_domain_model, remove_www from .auth import TenantAuthMiddlewareStack class TenantAwareProtocolTypeRouter(ProtocolTypeRouter): def __init__(self, application_mapping, tenant_prefix): self.tenant_prefix = tenant_prefix super().__init__(application_mapping) async def __call__(self, scope, receive, send): if scope["type"] != "http": scope["path"] = scope["path"][len(self.tenant_prefix) + 1 :] return await super().__call__(scope, receive, send) class TenantProtocolRouter: """ ProtocolRouter that handles multi-tenancy. """ def __init__(self): self.root_ws_urlconf = settings.TENANTS["default"].get("WS_URLCONF") if self.root_ws_urlconf is None: raise ImproperlyConfigured( "TENANTS['default'] must contain a 'WS_URLCONF' key in order to use TenantProtocolRouter." ) @database_sync_to_async def get_tenant_scope(self, scope): """ Get tenant and websockets urlconf based on scope host. """ hostname = force_str(dict(scope["headers"]).get(b"host", b"")) hostname = remove_www(hostname.split(":")[0]) tenant = None ws_urlconf = self.root_ws_urlconf # Checking for static tenants for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if hostname in data["DOMAINS"]: tenant = SchemaDescriptor.create(schema_name=schema, domain_url=hostname) if "WS_URLCONF" in data: ws_urlconf = data["WS_URLCONF"] return tenant, "", import_string(ws_urlconf + ".urlpatterns") # Checking for dynamic tenants else: DomainModel = get_domain_model() prefix = scope["path"].split("/")[1] try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder=prefix ) except DomainModel.DoesNotExist: try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder="" ) except DomainModel.DoesNotExist: return None, "", [] tenant = domain.tenant tenant.domain_url = hostname ws_urlconf = settings.TENANTS["default"]["WS_URLCONF"] return ( tenant, prefix if prefix == domain.folder else "", import_string(ws_urlconf + ".urlpatterns"), ) def get_protocol_type_router(self, tenant_prefix, ws_urlconf): """ Subclasses can override this to include more protocols. """ return TenantAwareProtocolTypeRouter( {"websocket": TenantAuthMiddlewareStack(URLRouter(ws_urlconf))}, tenant_prefix ) async def __call__(self, scope, receive, send): tenant, tenant_prefix, ws_urlconf = await self.get_tenant_scope(scope) scope.update({"tenant": tenant}) return await self.get_protocol_type_router(tenant_prefix, ws_urlconf)(scope, receive, send) django-pgschemas-0.15.2/django_pgschemas/contrib/files/000077500000000000000000000000001463633566500230705ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/contrib/files/__init__.py000066400000000000000000000000651463633566500252020ustar00rootroot00000000000000from .storage import TenantFileSystemStorage # noqa django-pgschemas-0.15.2/django_pgschemas/contrib/files/storage.py000066400000000000000000000040171463633566500251100ustar00rootroot00000000000000import os from django.conf import settings from django.core.files.storage import FileSystemStorage from ...schema import get_current_schema class TenantFileSystemStorage(FileSystemStorage): """ Tenant aware file system storage. Appends the tenant identifier to the base location and base URL. """ def get_schema_path_identifier(self): current_schema = get_current_schema() if current_schema is None: return "" path_identifier = current_schema.schema_name if hasattr(current_schema, "schema_pathname"): path_identifier = current_schema.schema_pathname() elif hasattr(settings, "PGSCHEMAS_PATHNAME_FUNCTION"): path_identifier = settings.PGSCHEMAS_PATHNAME_FUNCTION(current_schema) return path_identifier @property # To avoid caching of tenant def base_location(self): """ Appends base location with the schema path identifier. """ file_folder = self.get_schema_path_identifier() location = os.path.join(super().base_location, file_folder) if not location.endswith("/"): location += "/" return location @property # To avoid caching of tenant def location(self): return super().location @property # To avoid caching of tenant def base_url(self): """ Optionally appends base URL with the schema path identifier. If the current schema is already using a folder, no path identifier is appended. """ current_schema = get_current_schema() url_folder = self.get_schema_path_identifier() if url_folder and current_schema and current_schema.folder: # Since we're already prepending all URLs with schema, there is no # need to make the differentiation here url_folder = "" parent_base_url = super().base_url.strip("/") url = "/".join(["", parent_base_url, url_folder]) if not url.endswith("/"): url += "/" return url django-pgschemas-0.15.2/django_pgschemas/log.py000066400000000000000000000006121463633566500214600ustar00rootroot00000000000000import logging from .schema import get_current_schema class SchemaContextFilter(logging.Filter): """ Add the current ``schema_name`` and ``domain_url`` to log records. """ def filter(self, record): current_schema = get_current_schema() record.schema_name = current_schema.schema_name record.domain_url = current_schema.domain_url return True django-pgschemas-0.15.2/django_pgschemas/management/000077500000000000000000000000001463633566500224425ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/management/__init__.py000066400000000000000000000000001463633566500245410ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/management/commands/000077500000000000000000000000001463633566500242435ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/management/commands/__init__.py000066400000000000000000000255601463633566500263640ustar00rootroot00000000000000import enum from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.db.models import CharField, Q, Value as V from django.db.models.functions import Concat from django.db.utils import ProgrammingError from ...schema import get_current_schema from ...utils import create_schema, dynamic_models_exist, get_clone_reference, get_tenant_model from ._executors import parallel, sequential class CommandScope(enum.Enum): ALL = "all" DYNAMIC = "dynamic" STATIC = "static" @classmethod def allow_static(cls): return [cls.ALL, cls.STATIC] @classmethod def allow_dynamic(cls): return [cls.ALL, cls.DYNAMIC] EXECUTORS = { "sequential": sequential, "parallel": parallel, } class WrappedSchemaOption: scope = CommandScope.ALL specific_schemas = None allow_interactive = True allow_wildcards = True def add_arguments(self, parser): if self.allow_interactive: parser.add_argument( "--noinput", "--no-input", action="store_false", dest="interactive", help="Tells Django to NOT prompt the user for input of any kind.", ) parser.add_argument( "-s", "--schema", nargs="+", dest="schemas", help="Schema(s) to execute the current command", ) parser.add_argument( "-x", "--exclude-schema", nargs="+", dest="excluded_schemas", help="Schema(s) to exclude when executing the current command", ) if self.allow_wildcards: parser.add_argument( "-as", "--include-all-schemas", action="store_true", dest="all_schemas", help="Include all schemas when executing the current command", ) parser.add_argument( "-ss", "--include-static-schemas", action="store_true", dest="static_schemas", help="Include all static schemas when executing the current command", ) parser.add_argument( "-ds", "--include-dynamic-schemas", action="store_true", dest="dynamic_schemas", help="Include all dynamic schemas when executing the current command", ) parser.add_argument( "-ts", "--include-tenant-schemas", action="store_true", dest="tenant_schemas", help="Include all tenant-like schemas when executing the current command", ) parser.add_argument( "--parallel", dest="parallel", action="store_true", help="Run command in parallel mode", ) parser.add_argument( "--no-create-schemas", dest="skip_schema_creation", action="store_true", help="Skip automatic creation of non-existing schemas", ) def get_schemas_from_options(self, **options): skip_schema_creation = options.get("skip_schema_creation", False) try: schemas = self._get_schemas_from_options(**options) except ProgrammingError: # This happens with unmigrated database. # It can also happen when the tenant model contains unapplied migrations that break. raise CommandError( "Error while attempting to retrieve dynamic schemas. " "Perhaps you need to migrate the 'public' schema first?" ) if self.specific_schemas is not None: schemas = [x for x in schemas if x in self.specific_schemas] if not schemas: raise CommandError("This command can only run in %s" % self.specific_schemas) if not skip_schema_creation: for schema in schemas: create_schema(schema, check_if_exists=True, sync_schema=False, verbosity=0) return schemas def get_executor_from_options(self, **options): return EXECUTORS["parallel"] if options.get("parallel") else EXECUTORS["sequential"] def get_scope_display(self): return "|".join(self.specific_schemas or []) or self.scope.value def _get_schemas_from_options(self, **options): schemas = options.get("schemas") or [] excluded_schemas = options.get("excluded_schemas") or [] include_all_schemas = options.get("all_schemas") or False include_static_schemas = options.get("static_schemas") or False include_dynamic_schemas = options.get("dynamic_schemas") or False include_tenant_schemas = options.get("tenant_schemas") or False dynamic_ready = dynamic_models_exist() allow_static = self.scope in CommandScope.allow_static() allow_dynamic = self.scope in CommandScope.allow_dynamic() clone_reference = get_clone_reference() if ( not schemas and not include_all_schemas and not include_static_schemas and not include_dynamic_schemas and not include_tenant_schemas ): if not self.allow_interactive: include_all_schemas = True elif options.get("interactive", True): schema = input( "Enter schema to run command (leave blank for running on '%s' schemas): " % self.get_scope_display() ).strip() if schema: schemas.append(schema) else: include_all_schemas = True else: raise CommandError("No schema provided") TenantModel = get_tenant_model() static_schemas = ( [x for x in settings.TENANTS.keys() if x != "default"] if allow_static else [] ) dynamic_schemas = ( TenantModel.objects.values_list("schema_name", flat=True) if TenantModel is not None and dynamic_ready and allow_dynamic else [] ) if clone_reference and allow_static: static_schemas.append(clone_reference) schemas_to_return = set() if include_all_schemas: if not allow_static and not allow_dynamic: raise CommandError("Including all schemas is NOT allowed") schemas_to_return = schemas_to_return.union(static_schemas + list(dynamic_schemas)) if include_static_schemas: if not allow_static: raise CommandError("Including static schemas is NOT allowed") schemas_to_return = schemas_to_return.union(static_schemas) if include_dynamic_schemas: if not allow_dynamic: raise CommandError("Including dynamic schemas is NOT allowed") schemas_to_return = schemas_to_return.union(dynamic_schemas) if include_tenant_schemas: if not allow_dynamic: raise CommandError("Including tenant-like schemas is NOT allowed") schemas_to_return = schemas_to_return.union(dynamic_schemas) if clone_reference: schemas_to_return.add(clone_reference) def find_schema_by_reference(reference, as_excluded=False): if reference in settings.TENANTS and reference != "default" and allow_static: return reference elif reference == clone_reference: return reference elif ( TenantModel is not None and dynamic_ready and TenantModel.objects.filter(schema_name=reference).exists() and allow_dynamic ): return reference else: local = [] if allow_static: local += [ schema_name for schema_name, data in settings.TENANTS.items() if schema_name not in ["public", "default"] and any(x for x in data["DOMAINS"] if x.startswith(reference)) ] if TenantModel is not None and dynamic_ready and allow_dynamic: local += ( TenantModel.objects.annotate( route=Concat( "domains__domain", V("/"), "domains__folder", output_field=CharField(), ) ) .filter( Q(schema_name=reference) | Q(domains__domain__istartswith=reference) | Q(route=reference) ) .distinct() .values_list("schema_name", flat=True) ) if not local: message = ( "No schema found for '%s' (excluded)" if as_excluded else "No schema found for '%s'" ) raise CommandError(message % reference) if len(local) > 1: message = ( "More than one tenant found for schema '%s' by domain (excluded), " "please, narrow down the filter" if as_excluded else "More than one tenant found for schema '%s' by domain, please, narrow down the filter" ) raise CommandError(message % reference) return local[0] for schema in schemas: included = find_schema_by_reference(schema, as_excluded=False) schemas_to_return.add(included) for schema in excluded_schemas: excluded = find_schema_by_reference(schema, as_excluded=True) schemas_to_return -= {excluded} return ( list(schemas_to_return) if "public" not in schemas_to_return else ["public"] + list(schemas_to_return - {"public"}) ) class TenantCommand(WrappedSchemaOption, BaseCommand): def handle(self, *args, **options): schemas = self.get_schemas_from_options(**options) executor = self.get_executor_from_options(**options) executor(schemas, self, "_raw_handle_tenant", args, options, pass_schema_in_kwargs=True) def _raw_handle_tenant(self, *args, **kwargs): kwargs.pop("schema_name") self.handle_tenant(get_current_schema(), *args, **kwargs) def handle_tenant(self, tenant, *args, **options): pass class StaticTenantCommand(TenantCommand): scope = CommandScope.STATIC class DynamicTenantCommand(TenantCommand): scope = CommandScope.DYNAMIC django-pgschemas-0.15.2/django_pgschemas/management/commands/_executors.py000066400000000000000000000101311463633566500267710ustar00rootroot00000000000000import functools import multiprocessing from django.conf import settings from django.core.management import call_command from django.core.management.base import BaseCommand, CommandError, OutputWrapper from django.db import connection, connections, transaction from ...schema import SchemaDescriptor, activate from ...utils import get_clone_reference, get_tenant_model def run_on_schema( schema_name, executor_codename, command, function_name=None, args=None, kwargs=None, pass_schema_in_kwargs=False, fork_db=False, ): if args is None: args = [] if kwargs is None: kwargs = {} if not isinstance(command, BaseCommand): # Parallel executor needs to pass command 'type' instead of 'instance' # Therefore, no customizations for the command can be done, nor using custom stdout, stderr command = command() command.stdout = kwargs.pop("stdout", command.stdout) if not isinstance(command.stdout, OutputWrapper): command.stdout = OutputWrapper(command.stdout) command.stderr = kwargs.pop("stderr", command.stderr) if not isinstance(command.stderr, OutputWrapper): command.stderr = OutputWrapper(command.stderr) # Since we are prepending every output with the schema_name and executor, we need to determine # whether we need to do so based on the last ending used to write. If the last write didn't end # in '\n' then we don't do the prefixing in order to keep the output looking good. class StyleFunc: last_message = None def __call__(self, message): last_message = self.last_message self.last_message = message if last_message is None or last_message.endswith("\n"): return "[%s:%s] %s" % ( command.style.NOTICE(executor_codename), command.style.NOTICE(schema_name), message, ) return message command.stdout.style_func = StyleFunc() command.stderr.style_func = StyleFunc() if fork_db: connections.close_all() if schema_name in settings.TENANTS: domains = settings.TENANTS[schema_name].get("DOMAINS", []) schema = SchemaDescriptor.create( schema_name=schema_name, domain_url=domains[0] if domains else None ) elif schema_name == get_clone_reference(): schema = SchemaDescriptor.create(schema_name=schema_name) elif (TenantModel := get_tenant_model()) is not None: schema = TenantModel.objects.get(schema_name=schema_name) else: raise CommandError(f"Unable to find schema {schema_name}!") activate(schema) if pass_schema_in_kwargs: kwargs.update({"schema_name": schema_name}) if function_name == "special:call_command": call_command(command, *args, **kwargs) elif function_name == "special:run_from_argv": command.run_from_argv(args) else: getattr(command, function_name)(*args, **kwargs) if fork_db: transaction.commit() connection.close() return schema_name def sequential( schemas, command, function_name, args=None, kwargs=None, pass_schema_in_kwargs=False ): runner = functools.partial( run_on_schema, executor_codename="sequential", command=command, function_name=function_name, args=args, kwargs=kwargs, pass_schema_in_kwargs=pass_schema_in_kwargs, fork_db=False, ) for schema in schemas: runner(schema) return schemas def parallel(schemas, command, function_name, args=None, kwargs=None, pass_schema_in_kwargs=False): processes = getattr(settings, "PGSCHEMAS_PARALLEL_MAX_PROCESSES", None) pool = multiprocessing.Pool(processes=processes) runner = functools.partial( run_on_schema, executor_codename="parallel", command=type(command), # Can't pass streams to children processes function_name=function_name, args=args, kwargs=kwargs, pass_schema_in_kwargs=pass_schema_in_kwargs, fork_db=True, ) return pool.map(runner, schemas) django-pgschemas-0.15.2/django_pgschemas/management/commands/cloneschema.py000066400000000000000000000123511463633566500271000ustar00rootroot00000000000000from distutils.util import strtobool from django.core.checks import Tags, run_checks from django.core.management.base import BaseCommand, CommandError from ...utils import clone_schema, get_domain_model, get_tenant_model class Command(BaseCommand): help = "Clones a schema" def _run_checks(self, **kwargs): # pragma: no cover issues = run_checks(tags=[Tags.database]) issues.extend(super()._run_checks(**kwargs)) return issues def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( "source", help="The name of the schema you want to clone", ) parser.add_argument( "destination", help="The name of the schema you want to create as clone", ) parser.add_argument( "--noinput", "--no-input", action="store_false", dest="interactive", help="Tells Django to NOT prompt the user for input of any kind.", ) parser.add_argument( "--dry-run", dest="dry_run", action="store_true", help="Just show what clone would do; without actually cloning.", ) def _ask(self, question): answer = None while answer is None: try: raw_answer = input(f"{question.strip()} [Y/n] ").strip() or "y" answer = strtobool(raw_answer) except ValueError: self.stderr.write(f"{raw_answer} is not a valid answer.") pass return answer def _check_required_field(self, field, exclude=None): if exclude is None: exclude = [] return ( field.editable and not field.primary_key and not field.is_relation and not ( field.null or field.has_default() or (field.blank and field.empty_strings_allowed) or getattr(field, "auto_now", False) or getattr(field, "auto_now_add", False) ) and field.name not in exclude ) def _get_constructed_instance(self, model_class, data): fields = [ field for field in model_class._meta.fields if self._check_required_field(field, data.keys()) ] instance = model_class(**data) if fields: self.stdout.write( self.style.WARNING(f"We need some data for model '{model_class._meta.model_name}':") ) for field in fields: while field.name not in data: raw_value = input(f"Value for field '{field.name}': ") try: data[field.name] = field.clean(raw_value, None) instance = model_class(**data) instance.clean() except Exception as e: if hasattr(e, "message"): self.stderr.write(e.message) # noqa elif hasattr(e, "messages"): self.stderr.write(" ".join(e.messages)) # noqa else: self.stderr.write(e) data.pop(field.name, None) return instance def get_dynamic_tenant(self, **options): tenant = None domain = None if self._ask( "You are cloning a schema for a dynamic tenant. Would you like to create a database entry for it?" ): tenant = self._get_constructed_instance( get_tenant_model(), {"schema_name": options["destination"]} ) domain = self._get_constructed_instance(get_domain_model(), {"is_primary": True}) if options["verbosity"] >= 1: self.stdout.write(self.style.WARNING("Looks good! Let's get to it!")) return tenant, domain def handle(self, *args, **options): tenant = None domain = None dry_run = options.get("dry_run") if options.get("interactive", True): TenantModel = get_tenant_model() if ( TenantModel is not None and TenantModel.objects.filter(schema_name=options["source"]).exists() ): tenant, domain = self.get_dynamic_tenant(**options) try: clone_schema(options["source"], options["destination"], dry_run) if tenant and domain: if options["verbosity"] >= 1: self.stdout.write("Schema cloned.") if not dry_run: tenant.save() domain.tenant = tenant if not dry_run: domain.save() if options["verbosity"] >= 1: self.stdout.write("Tenant and domain successfully saved.") if options["verbosity"] >= 1: self.stdout.write("All done!") except Exception as e: if hasattr(e, "message"): raise CommandError(e.message) # noqa elif hasattr(e, "messages"): raise CommandError(" ".join(e.messages)) # noqa else: raise CommandError(e) django-pgschemas-0.15.2/django_pgschemas/management/commands/createrefschema.py000066400000000000000000000033271463633566500277430ustar00rootroot00000000000000from django.core.checks import Tags, run_checks from django.core.management.base import BaseCommand, CommandError from ...utils import create_schema, drop_schema, get_clone_reference class Command(BaseCommand): help = "Creates the reference schema for faster dynamic tenant creation" def _run_checks(self, **kwargs): # pragma: no cover issues = run_checks(tags=[Tags.database]) issues.extend(super()._run_checks(**kwargs)) return issues def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( "--recreate", action="store_true", dest="recreate", help="Recreate reference schema.", ) def handle(self, *args, **options): clone_reference = get_clone_reference() if not clone_reference: raise CommandError("There is no reference schema configured.") if options.get("recreate", False): drop_schema(clone_reference, check_if_exists=True, verbosity=options["verbosity"]) if options["verbosity"] >= 1: self.stdout.write("Destroyed existing reference schema.") created = create_schema( clone_reference, check_if_exists=True, verbosity=options["verbosity"] ) if options["verbosity"] >= 1: if created: self.stdout.write("Reference schema successfully created!") else: self.stdout.write("Reference schema already exists.") self.stdout.write( self.style.WARNING( "Run this command again with --recreate if you want to recreate the reference schema." ) ) django-pgschemas-0.15.2/django_pgschemas/management/commands/migrate.py000066400000000000000000000001201463633566500262360ustar00rootroot00000000000000from .migrateschema import MigrateSchemaCommand Command = MigrateSchemaCommand django-pgschemas-0.15.2/django_pgschemas/management/commands/migrateschema.py000066400000000000000000000020141463633566500274230ustar00rootroot00000000000000from django.core.checks import Tags, run_checks from django.core.management.base import BaseCommand from django.core.management.commands.migrate import Command as MigrateCommand from . import WrappedSchemaOption from .runschema import Command as RunSchemaCommand class NonInteractiveRunSchemaCommand(RunSchemaCommand): allow_interactive = False class MigrateSchemaCommand(WrappedSchemaOption, BaseCommand): allow_interactive = False requires_system_checks = [] def _run_checks(self, **kwargs): # pragma: no cover issues = run_checks(tags=[Tags.database]) issues.extend(super()._run_checks(**kwargs)) return issues def add_arguments(self, parser): super().add_arguments(parser) MigrateCommand.add_arguments(self, parser) def handle(self, *args, **options): runschema = NonInteractiveRunSchemaCommand() options.pop("run_syncdb", False) runschema.execute(command_name="django.core.migrate", *args, **options) Command = MigrateSchemaCommand django-pgschemas-0.15.2/django_pgschemas/management/commands/runschema.py000066400000000000000000000064701463633566500266110ustar00rootroot00000000000000import argparse import sys from django.core.management import get_commands, load_command_class from django.core.management.base import BaseCommand, CommandError, SystemCheckError from . import WrappedSchemaOption class Command(WrappedSchemaOption, BaseCommand): help = "Wrapper around Django commands for use with an individual schema" def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument("command_name", help="The command name you want to run") def get_command_from_arg(self, arg): *chunks, command = arg.split(".") path = ".".join(chunks) if not path: path = get_commands().get(command) try: cmd = load_command_class(path, command) except Exception: raise CommandError("Unknown command: %s" % arg) if isinstance(cmd, WrappedSchemaOption): raise CommandError("Command '%s' cannot be used in runschema" % arg) return cmd def run_from_argv(self, argv): # pragma: no cover """ Changes the option_list to use the options from the wrapped command. Adds schema parameter to specify which schema will be used when executing the wrapped command. """ try: # load the command object. if len(argv) <= 2: raise CommandError("No command to run") target_class = self.get_command_from_arg(argv[2]) # Ugly, but works. Delete command_name from the argv, parse the schemas manually # and forward the rest of the arguments to the actual command being wrapped. del argv[1] schema_parser = argparse.ArgumentParser() super().add_arguments(schema_parser) schema_ns, args = schema_parser.parse_known_args(argv) schemas = self.get_schemas_from_options( schemas=schema_ns.schemas, all_schemas=schema_ns.all_schemas, static_schemas=schema_ns.static_schemas, dynamic_schemas=schema_ns.dynamic_schemas, tenant_schemas=schema_ns.tenant_schemas, ) executor = self.get_executor_from_options(parallel=schema_ns.parallel) except Exception as e: if not isinstance(e, CommandError): raise # SystemCheckError takes care of its own formatting. if isinstance(e, SystemCheckError): self.stderr.write(str(e), lambda x: x) else: self.stderr.write("%s: %s" % (e.__class__.__name__, e)) sys.exit(1) executor(schemas, target_class, "special:run_from_argv", args) def handle(self, *args, **options): target = self.get_command_from_arg(options.pop("command_name")) schemas = self.get_schemas_from_options(**options) executor = self.get_executor_from_options(**options) options.pop("schemas") options.pop("excluded_schemas") options.pop("all_schemas") options.pop("static_schemas") options.pop("dynamic_schemas") options.pop("tenant_schemas") options.pop("parallel") options.pop("skip_schema_creation") if self.allow_interactive: options.pop("interactive") executor(schemas, target, "special:call_command", args, options) django-pgschemas-0.15.2/django_pgschemas/management/commands/whowill.py000066400000000000000000000006611463633566500263050ustar00rootroot00000000000000from . import TenantCommand class Command(TenantCommand): help = "Displays which schemas would be used based on the passed schema selectors" def handle_tenant(self, tenant, *args, **options): if options["verbosity"] >= 1: self.stdout.write( str(tenant.get_primary_domain()) if tenant.is_dynamic else tenant.domain_url or tenant.schema_name ) django-pgschemas-0.15.2/django_pgschemas/middleware.py000066400000000000000000000075651463633566500230320ustar00rootroot00000000000000import re from asgiref.sync import iscoroutinefunction, sync_to_async from django.conf import settings from django.http import Http404 from django.shortcuts import redirect from django.urls import clear_url_caches, set_urlconf from django.utils.decorators import sync_and_async_middleware from .schema import SchemaDescriptor, activate, activate_public from .urlresolvers import get_urlconf_from_schema from .utils import get_domain_model, remove_www def strip_tenant_from_path_factory(prefix): def strip_tenant_from_path(path): return re.sub(r"^/{}/".format(prefix), "/", path) return strip_tenant_from_path @sync_and_async_middleware def TenantMiddleware(get_response): """ This middleware should be placed at the very top of the middleware stack. Selects the proper static/dynamic tenant using the request host. """ def logic(request): hostname = remove_www(request.get_host().split(":")[0]) activate_public() tenant = None # Checking for static tenants for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if hostname in data["DOMAINS"]: tenant = SchemaDescriptor.create(schema_name=schema, domain_url=hostname) break # Checking for dynamic tenants else: DomainModel = get_domain_model() prefix = request.path.split("/")[1] domain = None if DomainModel is not None: try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder=prefix ) except DomainModel.DoesNotExist: try: domain = DomainModel.objects.select_related("tenant").get( domain=hostname, folder="" ) except DomainModel.DoesNotExist: pass if domain is not None: tenant = domain.tenant tenant.domain_url = hostname tenant.folder = None request.strip_tenant_from_path = lambda x: x if prefix and domain.folder == prefix: tenant.folder = prefix request.strip_tenant_from_path = strip_tenant_from_path_factory(prefix) clear_url_caches() # Required to remove previous tenant prefix from cache (#8) if domain.redirect_to_primary: primary_domain = tenant.domains.get(is_primary=True) path = request.strip_tenant_from_path(request.path) return redirect(primary_domain.absolute_url(path), permanent=True) # Checking fallback domains if not tenant: for schema, data in settings.TENANTS.items(): if schema in ["public", "default"]: continue if hostname in data.get("FALLBACK_DOMAINS", []): tenant = SchemaDescriptor.create(schema_name=schema, domain_url=hostname) break # No tenant found from domain / folder if not tenant: raise Http404("No tenant for hostname '%s'" % hostname) request.tenant = tenant urlconf = get_urlconf_from_schema(tenant) request.urlconf = urlconf set_urlconf(urlconf) activate(tenant) if iscoroutinefunction(get_response): async_logic = sync_to_async(logic) async def middleware(request): if response := await async_logic(request): return response return await get_response(request) else: def middleware(request): if response := logic(request): return response return get_response(request) return middleware django-pgschemas-0.15.2/django_pgschemas/models.py000066400000000000000000000123151463633566500221650ustar00rootroot00000000000000from django.conf import settings from django.db import models, transaction from .postgresql_backend.base import check_schema_name from .schema import SchemaDescriptor from .signals import dynamic_tenant_needs_sync, dynamic_tenant_post_sync, dynamic_tenant_pre_drop from .utils import create_or_clone_schema, drop_schema, get_domain_model, schema_exists class TenantMixin(SchemaDescriptor, models.Model): """ All tenant models must inherit this class. """ auto_create_schema = True """ Set this flag to ``False`` on a parent class if you don't want the schema to be automatically created upon save. """ auto_drop_schema = False """ **USE THIS WITH CAUTION!** Set this flag to ``True`` on a parent class if you want the schema to be automatically deleted if the tenant row gets deleted. """ is_dynamic = True """ Leave this as ``True``. Denotes it's a database controlled tenant. """ schema_name = models.CharField(max_length=63, unique=True, validators=[check_schema_name]) class Meta: abstract = True def save(self, *args, **kwargs): verbosity = kwargs.pop("verbosity", 1) is_new = self.pk is None super().save(*args, **kwargs) if is_new and self.auto_create_schema: try: self.create_schema(verbosity=verbosity) dynamic_tenant_post_sync.send(sender=TenantMixin, tenant=self.serializable_fields()) except Exception: # We failed creating the tenant, delete what we created and re-raise the exception self.delete(force_drop=True) raise elif is_new: # Although we are not using the schema functions directly, the signal might be registered by a listener dynamic_tenant_needs_sync.send(sender=TenantMixin, tenant=self.serializable_fields()) elif not is_new and self.auto_create_schema and not schema_exists(self.schema_name): # Create schemas for existing models, deleting only the schema on failure try: self.create_schema(verbosity=verbosity) dynamic_tenant_post_sync.send(sender=TenantMixin, tenant=self.serializable_fields()) except Exception: # We failed creating the schema, delete what we created and re-raise the exception self.drop_schema() raise def delete(self, force_drop=False, *args, **kwargs): """ Deletes this row. Drops the tenant's schema if the attribute ``auto_drop_schema`` is ``True``. """ if force_drop or self.auto_drop_schema: dynamic_tenant_pre_drop.send(sender=TenantMixin, tenant=self.serializable_fields()) self.drop_schema() super().delete(*args, **kwargs) def serializable_fields(self): """ In certain cases the user model isn't serializable so you may want to only send the id. """ return self def create_schema(self, sync_schema=True, verbosity=1): """ Creates or clones the schema ``schema_name`` for this tenant. """ return create_or_clone_schema(self.schema_name, sync_schema, verbosity) def drop_schema(self): """ Drops the schema. """ return drop_schema(self.schema_name) def get_primary_domain(self): try: domain = self.domains.get(is_primary=True) return domain except get_domain_model().DoesNotExist: return None class DomainMixin(models.Model): """ All models that store the domains must inherit this class. """ tenant = ( models.ForeignKey( settings.TENANTS["default"]["TENANT_MODEL"], db_index=True, related_name="domains", on_delete=models.CASCADE, ) if "default" in settings.TENANTS else None ) domain = models.CharField(max_length=253, db_index=True) folder = models.SlugField(max_length=253, blank=True, db_index=True) is_primary = models.BooleanField(default=True) redirect_to_primary = models.BooleanField(default=False) class Meta: abstract = True unique_together = (("domain", "folder"),) def __str__(self): return "/".join([self.domain, self.folder]) if self.folder else self.domain @transaction.atomic def save(self, *args, **kwargs): using = kwargs.get("using") domain_list = self.__class__.objects if using: domain_list = domain_list.using(using) domain_list = domain_list.filter(tenant=self.tenant, is_primary=True).exclude(pk=self.pk) self.is_primary = self.is_primary or (not domain_list.exists()) if self.is_primary: domain_list.update(is_primary=False) if self.redirect_to_primary: self.redirect_to_primary = False super().save(*args, **kwargs) def absolute_url(self, path): """ Constructs an absolute url for this domain / folder and a given path """ folder = self.folder and "/" + self.folder if not path.startswith("/"): path = "/" + path return "//" + self.domain + folder + path django-pgschemas-0.15.2/django_pgschemas/postgresql_backend/000077500000000000000000000000001463633566500242005ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/postgresql_backend/__init__.py000066400000000000000000000000001463633566500262770ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/postgresql_backend/_constraints.py000066400000000000000000000105531463633566500272640ustar00rootroot00000000000000from django.db.models.indexes import Index from ..schema import get_current_schema def get_constraints(self, cursor, table_name): """ Retrieve any constraints or keys (unique, pk, fk, check, index) across one or more columns. Also retrieve the definition of expression-based indexes. """ constraints = {} # Loop over the key table, collecting things as constraints. The column # array must return column names in the same order in which they were # created. # The subquery containing generate_series can be replaced with # "WITH ORDINALITY" when support for PostgreSQL 9.3 is dropped. cursor.execute( """ SELECT c.conname, array( SELECT attname FROM ( SELECT unnest(c.conkey) AS colid, generate_series(1, array_length(c.conkey, 1)) AS arridx ) AS cols JOIN pg_attribute AS ca ON cols.colid = ca.attnum WHERE ca.attrelid = c.conrelid ORDER BY cols.arridx ), c.contype, (SELECT fkc.relname || '.' || fka.attname FROM pg_attribute AS fka JOIN pg_class AS fkc ON fka.attrelid = fkc.oid WHERE fka.attrelid = c.confrelid AND fka.attnum = c.confkey[1]), cl.reloptions FROM pg_constraint AS c JOIN pg_class AS cl ON c.conrelid = cl.oid JOIN pg_namespace AS ns ON cl.relnamespace = ns.oid WHERE ns.nspname = %s AND cl.relname = %s """, [get_current_schema().schema_name, table_name], ) for constraint, columns, kind, used_cols, options in cursor.fetchall(): constraints[constraint] = { "columns": columns, "primary_key": kind == "p", "unique": kind in ["p", "u"], "foreign_key": tuple(used_cols.split(".", 1)) if kind == "f" else None, "check": kind == "c", "index": False, "definition": None, "options": options, } # Now get indexes # The row_number() function for ordering the index fields can be # replaced by WITH ORDINALITY in the unnest() functions when support # for PostgreSQL 9.3 is dropped. cursor.execute( """ SELECT indexname, array_agg(attname ORDER BY rnum), indisunique, indisprimary, array_agg(ordering ORDER BY rnum), amname, exprdef, s2.attoptions FROM ( SELECT row_number() OVER () as rnum, c2.relname as indexname, idx.*, attr.attname, am.amname, CASE WHEN idx.indexprs IS NOT NULL THEN pg_get_indexdef(idx.indexrelid) END AS exprdef, CASE am.amname WHEN 'btree' THEN CASE (option & 1) WHEN 1 THEN 'DESC' ELSE 'ASC' END END as ordering, c2.reloptions as attoptions FROM ( SELECT *, unnest(i.indkey) as key, unnest(i.indoption) as option FROM pg_index i ) idx LEFT JOIN pg_class c ON idx.indrelid = c.oid LEFT JOIN pg_namespace n ON n.oid = c.relnamespace LEFT JOIN pg_class c2 ON idx.indexrelid = c2.oid LEFT JOIN pg_am am ON c2.relam = am.oid LEFT JOIN pg_attribute attr ON attr.attrelid = c.oid AND attr.attnum = idx.key WHERE c.relname = %s and n.nspname = %s ) s2 GROUP BY indexname, indisunique, indisprimary, amname, exprdef, attoptions; """, [table_name, get_current_schema().schema_name], ) for index, columns, unique, primary, orders, type_, definition, options in cursor.fetchall(): if index not in constraints: constraints[index] = { "columns": columns if columns != [None] else [], "orders": orders if orders != [None] else [], "primary_key": primary, "unique": unique, "foreign_key": None, "check": False, "index": True, "type": Index.suffix if type_ == "btree" else type_, "definition": definition, "options": options, } return constraints django-pgschemas-0.15.2/django_pgschemas/postgresql_backend/base.py000066400000000000000000000051001463633566500254600ustar00rootroot00000000000000from django.core.exceptions import ImproperlyConfigured from django.db.utils import DatabaseError from ..schema import get_current_schema, get_default_schema from ..utils import check_schema_name, get_limit_set_calls from .introspection import DatabaseSchemaIntrospection from .settings import EXTRA_SEARCH_PATHS, original_backend try: try: import psycopg as _psycopg except ImportError: import psycopg2 as _psycopg except ImportError: raise ImproperlyConfigured("Error loading psycopg2 or psycopg module") IntegrityError = _psycopg.IntegrityError def get_search_path(schema=None): if schema is None: schema = get_default_schema() search_path = ["public"] if schema.schema_name == "public" else [schema.schema_name, "public"] search_path.extend(EXTRA_SEARCH_PATHS) for part in search_path: check_schema_name(part) return ", ".join(search_path) class DatabaseWrapper(original_backend.DatabaseWrapper): def __init__(self, *args, **kwargs): self._search_path = None self._setting_search_path = False super().__init__(*args, **kwargs) # Patched version of DatabaseIntrospection that only returns the table list for the currently selected schema self.introspection = DatabaseSchemaIntrospection(self) def close(self): self._search_path = None self._setting_search_path = False super().close() def _handle_search_path(self, cursor=None): search_path_for_current_schema = get_search_path(get_current_schema()) skip = self._setting_search_path or ( self._search_path == search_path_for_current_schema and get_limit_set_calls() ) if not skip: self._setting_search_path = True cursor_for_search_path = self.connection.cursor() if cursor is None else cursor try: cursor_for_search_path.execute( f"SET search_path = {search_path_for_current_schema}" ) except (DatabaseError, _psycopg.InternalError): self._search_path = None else: self._search_path = search_path_for_current_schema finally: self._setting_search_path = False if cursor is None: cursor_for_search_path.close() def _cursor(self, name=None): cursor = super()._cursor(name=name) cursor_for_search_path = cursor if name is None else None # Named cursors cannot be reused self._handle_search_path(cursor_for_search_path) return cursor django-pgschemas-0.15.2/django_pgschemas/postgresql_backend/introspection.py000066400000000000000000000135631463633566500274620ustar00rootroot00000000000000from django.db.backends.base.introspection import FieldInfo, TableInfo from django.utils.encoding import force_str from ..schema import get_current_schema from . import _constraints from .settings import base_backend, original_backend try: DatabaseIntrospection = original_backend.DatabaseIntrospection except AttributeError: DatabaseIntrospection = base_backend.DatabaseIntrospection class DatabaseSchemaIntrospection(DatabaseIntrospection): # pragma: no cover """ database schema introspection class """ _get_indexes_query = """ SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary FROM pg_catalog.pg_class c, INNER JOIN pg_catalog.pg_index idx ON c.oid = idx.indrelid INNER JOIN pg_catalog.pg_class c2 ON idx.indexrelid = c2.oid INNER JOIN pg_catalog.pg_attribute attr ON attr.attrelid = c.oid and attr.attnum = idx.indkey[0] INNER JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relname = %s AND n.nspname = %s """ def get_table_list(self, cursor): """ Returns a list of table names in the current database and schema. """ cursor.execute( """ SELECT c.relname, c.relkind FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind IN ('r', 'v', '') AND n.nspname = '%s' AND pg_catalog.pg_table_is_visible(c.oid)""" % get_current_schema().schema_name ) return [ TableInfo(row[0], {"r": "t", "v": "v"}.get(row[1])) for row in cursor.fetchall() if row[0] not in self.ignored_tables ] def get_table_description(self, cursor, table_name): "Returns a description of the table, with the DB-API cursor.description interface." # As cursor.description does not return reliably the nullable property, # we have to query the information_schema (#7783) cursor.execute( """ SELECT column_name, is_nullable, column_default FROM information_schema.columns WHERE table_schema = %s and table_name = %s""", [get_current_schema().schema_name, table_name], ) field_map = {line[0]: line[1:] for line in cursor.fetchall()} cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)) return [ FieldInfo( *( (force_str(line[0]),) + line[1:6] + (field_map[force_str(line[0])][0] == "YES", field_map[force_str(line[0])][1]) ) ) for line in cursor.description ] def get_indexes(self, cursor, table_name): # This query retrieves each index on the given table, including the # first associated field name cursor.execute(self._get_indexes_query, [table_name, get_current_schema().schema_name]) indexes = {} for row in cursor.fetchall(): # row[1] (idx.indkey) is stored in the DB as an array. It comes out as # a string of space-separated integers. This designates the field # indexes (1-based) of the fields that have indexes on the table. # Here, we skip any indexes across multiple fields. if " " in row[1]: continue if row[0] not in indexes: indexes[row[0]] = {"primary_key": False, "unique": False} # It's possible to have the unique and PK constraints in separate indexes. if row[3]: indexes[row[0]]["primary_key"] = True if row[2]: indexes[row[0]]["unique"] = True return indexes def get_relations(self, cursor, table_name): """ Returns a dictionary of {field_name: (field_name_other_table, other_table)} representing all relationships to the given table. """ cursor.execute( """ SELECT c2.relname, a1.attname, a2.attname FROM pg_constraint con LEFT JOIN pg_class c1 ON con.conrelid = c1.oid LEFT JOIN pg_namespace n ON n.oid = c1.relnamespace LEFT JOIN pg_class c2 ON con.confrelid = c2.oid LEFT JOIN pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1] LEFT JOIN pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1] WHERE c1.relname = %s and n.nspname = %s AND con.contype = 'f'""", [table_name, get_current_schema().schema_name], ) relations = {} for row in cursor.fetchall(): relations[row[1]] = (row[2], row[0]) return relations get_constraints = _constraints.get_constraints def get_key_columns(self, cursor, table_name): key_columns = [] cursor.execute( """ SELECT kcu.column_name, ccu.table_name AS referenced_table, ccu.column_name AS referenced_column FROM information_schema.constraint_column_usage ccu LEFT JOIN information_schema.key_column_usage kcu ON ccu.constraint_catalog = kcu.constraint_catalog AND ccu.constraint_schema = kcu.constraint_schema AND ccu.constraint_name = kcu.constraint_name LEFT JOIN information_schema.table_constraints tc ON ccu.constraint_catalog = tc.constraint_catalog AND ccu.constraint_schema = tc.constraint_schema AND ccu.constraint_name = tc.constraint_name WHERE kcu.table_name = %s AND tc.constraint_type = 'FOREIGN KEY' AND tc.table_schema = %s """, [table_name, get_current_schema().schema_name], ) key_columns.extend(cursor.fetchall()) return key_columns django-pgschemas-0.15.2/django_pgschemas/postgresql_backend/settings.py000066400000000000000000000006051463633566500264130ustar00rootroot00000000000000from importlib import import_module from django.conf import settings BASE_BACKEND = "django.db.backends.postgresql" ORIGINAL_BACKEND = getattr(settings, "PGSCHEMAS_ORIGINAL_BACKEND", BASE_BACKEND) EXTRA_SEARCH_PATHS = getattr(settings, "PGSCHEMAS_EXTRA_SEARCH_PATHS", []) base_backend = import_module(BASE_BACKEND + ".base") original_backend = import_module(ORIGINAL_BACKEND + ".base") django-pgschemas-0.15.2/django_pgschemas/routers.py000066400000000000000000000022731463633566500224070ustar00rootroot00000000000000from django.apps import apps from django.conf import settings from .schema import get_current_schema from .utils import get_tenant_database_alias class SyncRouter: """ A router to control which applications will be synced depending on the schema we're syncing. """ def app_in_list(self, app_label, app_list): app_config = apps.get_app_config(app_label) app_config_full_name = f"{app_config.__module__}.{app_config.__class__.__name__}" return (app_config.name in app_list) or (app_config_full_name in app_list) def allow_migrate(self, db, app_label, model_name=None, **hints): current_schema = get_current_schema() if db != get_tenant_database_alias() or current_schema is None: return False app_list = [] if current_schema.schema_name == "public": app_list = settings.TENANTS["public"]["APPS"] elif current_schema.schema_name in settings.TENANTS: app_list = settings.TENANTS[current_schema.schema_name]["APPS"] else: app_list = settings.TENANTS["default"]["APPS"] if not app_list: return None return self.app_in_list(app_label, app_list) django-pgschemas-0.15.2/django_pgschemas/schema.py000066400000000000000000000027761463633566500221540ustar00rootroot00000000000000from typing import Optional from asgiref.local import Local from .signals import schema_activate _active = Local() def get_default_schema() -> "SchemaDescriptor": return SchemaDescriptor.create("public") def get_current_schema() -> "SchemaDescriptor": current_schema = getattr(_active, "value", None) return current_schema or get_default_schema() def activate(schema: "SchemaDescriptor"): if not isinstance(schema, SchemaDescriptor): raise RuntimeError("'activate' must be called with a SchemaDescriptor descendant") _active.value = schema schema_activate.send(sender=SchemaDescriptor, schema=schema) def deactivate(): if hasattr(_active, "value"): del _active.value schema_activate.send(sender=SchemaDescriptor, schema=SchemaDescriptor.create("public")) activate_public = deactivate class SchemaDescriptor: schema_name = None domain_url = None folder = None is_dynamic = False @staticmethod def create(schema_name: str, domain_url: Optional[str] = None, folder: Optional[str] = None): schema = SchemaDescriptor() schema.schema_name = schema_name schema.domain_url = domain_url schema.folder = folder return schema def __enter__(self): self.previous_schema = get_current_schema() activate(self) def __exit__(self, exc_type, exc_val, exc_tb): previous_schema = getattr(self, "previous_schema", None) activate(previous_schema) if previous_schema else deactivate() django-pgschemas-0.15.2/django_pgschemas/signals.py000066400000000000000000000020721463633566500223410ustar00rootroot00000000000000from django.db.models.signals import pre_delete from django.dispatch import Signal, receiver from .utils import get_tenant_model, schema_exists schema_activate = Signal() schema_activate.__doc__ = "Sent after a schema has been activated" dynamic_tenant_needs_sync = Signal() dynamic_tenant_needs_sync.__doc__ = "Sent when a schema from a dynamic tenant needs to be synced" dynamic_tenant_post_sync = Signal() dynamic_tenant_post_sync.__doc__ = ( "Sent after a tenant has been saved, its schema created and synced" ) dynamic_tenant_pre_drop = Signal() dynamic_tenant_pre_drop.__doc__ = "Sent when a schema from a dynamic tenant is about to be dropped" @receiver(pre_delete) def tenant_delete_callback(sender, instance, **kwargs): TenantModel = get_tenant_model() if TenantModel is None: return if not isinstance(instance, TenantModel): return if instance.auto_drop_schema and schema_exists(instance.schema_name): dynamic_tenant_pre_drop.send(sender=TenantModel, tenant=instance.serializable_fields()) instance.drop_schema() django-pgschemas-0.15.2/django_pgschemas/test/000077500000000000000000000000001463633566500213055ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/test/__init__.py000066400000000000000000000000001463633566500234040ustar00rootroot00000000000000django-pgschemas-0.15.2/django_pgschemas/test/cases.py000066400000000000000000000134341463633566500227620ustar00rootroot00000000000000from django.conf import settings from django.core.management import call_command from django.test import TestCase from ..schema import SchemaDescriptor, activate, activate_public from ..utils import get_clone_reference, get_domain_model, get_tenant_model ALLOWED_TEST_DOMAIN = ".localhost" class BaseTenantTestCaseMixin: @classmethod def get_verbosity(cls): return 0 @classmethod def add_allowed_test_domain(cls): cls.BACKUP_ALLOWED_HOSTS = settings.ALLOWED_HOSTS # ALLOWED_HOSTS is a special setting of Django setup_test_environment so we can't modify it with helpers if ALLOWED_TEST_DOMAIN not in settings.ALLOWED_HOSTS: settings.ALLOWED_HOSTS += [ALLOWED_TEST_DOMAIN] @classmethod def remove_allowed_test_domain(cls): settings.ALLOWED_HOSTS = cls.BACKUP_ALLOWED_HOSTS @classmethod def sync_public(cls): call_command("migrateschema", schemas=["public"], verbosity=0) class StaticTenantTestCase(BaseTenantTestCaseMixin, TestCase): schema_name = None # Meant to be set by subclasses tenant = None @classmethod def setUpClass(cls): assert ( cls.schema_name in settings.TENANTS ), f"{cls.__name__}.schema_name must be defined to a valid static tenant" assert ( cls.schema_name not in ["public", "default"] and cls.schema_name != get_clone_reference() ), f"{cls.__name__}.schema_name must be defined to a valid static tenant" super(TestCase, cls).setUpClass() cls.sync_public() cls.add_allowed_test_domain() domain = ( settings.TENANTS[cls.schema_name]["DOMAINS"][0] if settings.TENANTS[cls.schema_name]["DOMAINS"] else cls.schema_name + ALLOWED_TEST_DOMAIN ) cls.tenant = SchemaDescriptor.create(schema_name=cls.schema_name, domain_url=domain) activate(cls.tenant) cls.cls_atomics = cls._enter_atomics() try: cls.setUpTestData() except Exception: cls._rollback_atomics(cls.cls_atomics) raise @classmethod def tearDownClass(cls): super().tearDownClass() activate_public() cls.remove_allowed_test_domain() class DynamicTenantTestCase(BaseTenantTestCaseMixin, TestCase): tenant = None domain = None @classmethod def setup_tenant(cls, tenant): """ Add any additional setting to the tenant before it get saved. This is required if you have required fields. :param tenant: :return: """ pass @classmethod def setup_domain(cls, domain): """ Add any additional setting to the domain before it get saved. This is required if you have required fields. :param domain: :return: """ pass @classmethod def setUpClass(cls): super(TestCase, cls).setUpClass() cls.sync_public() cls.add_allowed_test_domain() cls.tenant = get_tenant_model()(schema_name=cls.get_test_schema_name()) cls.setup_tenant(cls.tenant) cls.tenant.save(verbosity=cls.get_verbosity()) tenant_domain = cls.get_test_tenant_domain() cls.domain = get_domain_model()(tenant=cls.tenant, domain=tenant_domain) cls.setup_domain(cls.domain) cls.domain.save() activate(cls.tenant) cls.cls_atomics = cls._enter_atomics() try: cls.setUpTestData() except Exception: cls._rollback_atomics(cls.cls_atomics) raise @classmethod def tearDownClass(cls): super().tearDownClass() activate_public() cls.domain.delete() cls.tenant.delete(force_drop=True) cls.remove_allowed_test_domain() @classmethod def get_test_tenant_domain(cls): return "tenant.localhost" @classmethod def get_test_schema_name(cls): return "test" class TenantTestCase(DynamicTenantTestCase): pass class FastDynamicTenantTestCase(DynamicTenantTestCase): @classmethod def flush_data(cls): """ Do you want to flush the data out of the tenant database. :return: bool """ return True @classmethod def use_existing_tenant(cls): """ Gets called if a existing tenant is found in the database """ pass @classmethod def use_new_tenant(cls): """ Gets called if a new tenant is created in the database """ pass @classmethod def setup_test_tenant_and_domain(cls): cls.tenant = get_tenant_model()(schema_name=cls.get_test_schema_name()) cls.setup_tenant(cls.tenant) cls.tenant.save(verbosity=cls.get_verbosity()) # Set up domain tenant_domain = cls.get_test_tenant_domain() cls.domain = get_domain_model()(tenant=cls.tenant, domain=tenant_domain) cls.setup_domain(cls.domain) cls.domain.save() cls.use_new_tenant() @classmethod def setUpClass(cls): TenantModel = get_tenant_model() test_schema_name = cls.get_test_schema_name() if TenantModel.objects.filter(schema_name=test_schema_name).exists(): cls.tenant = TenantModel.objects.filter(schema_name=test_schema_name).first() cls.use_existing_tenant() else: cls.setup_test_tenant_and_domain() activate(cls.tenant) @classmethod def tearDownClass(cls): TenantModel = get_tenant_model() test_schema_name = cls.get_test_schema_name() TenantModel.objects.filter(schema_name=test_schema_name).delete() activate_public() def _fixture_teardown(self): if self.flush_data(): super()._fixture_teardown() class FastTenantTestCase(FastDynamicTenantTestCase): pass django-pgschemas-0.15.2/django_pgschemas/test/client.py000066400000000000000000000047421463633566500231440ustar00rootroot00000000000000from django.test import Client, RequestFactory def get_domain(tenant): if tenant.is_dynamic: return tenant.get_primary_domain().domain return tenant.domain_url or tenant.schema_name class TenantRequestFactory(RequestFactory): def __init__(self, tenant, **defaults): super().__init__(**defaults) self.tenant = tenant def get(self, path, data=None, **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().get(path, data, **extra) def post(self, path, data=None, **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().post(path, data, **extra) def patch(self, path, data=None, **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().patch(path, data, **extra) def put(self, path, data=None, **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().put(path, data, **extra) def delete(self, path, data=None, content_type="application/octet-stream", **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().delete(path, data, **extra) class TenantClient(Client): def __init__(self, tenant, enforce_csrf_checks=False, **defaults): super().__init__(enforce_csrf_checks, **defaults) self.tenant = tenant def get(self, path, data=None, **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().get(path, data, **extra) def post(self, path, data=None, **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().post(path, data, **extra) def patch(self, path, data=None, **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().patch(path, data, **extra) def put(self, path, data=None, **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().put(path, data, **extra) def delete(self, path, data=None, content_type="application/octet-stream", **extra): if "HTTP_HOST" not in extra: extra["HTTP_HOST"] = get_domain(self.tenant) return super().delete(path, data, **extra) django-pgschemas-0.15.2/django_pgschemas/urlresolvers.py000066400000000000000000000052501463633566500234510ustar00rootroot00000000000000import re import sys from django.conf import settings from django.urls import URLResolver from .schema import SchemaDescriptor, get_current_schema class TenantPrefixPattern: converters = {} @property def tenant_prefix(self): current_schema = get_current_schema() return f"{current_schema.folder}/" if current_schema.folder else "/" @property def regex(self): # This is only used by reverse() and cached in _reverse_dict. return re.compile(self.tenant_prefix) def match(self, path): tenant_prefix = self.tenant_prefix if path.startswith(tenant_prefix): return path[len(tenant_prefix) :], (), {} return None def check(self): return [] def describe(self): return f"'{self}'" def __str__(self): return self.tenant_prefix def tenant_patterns(*urls): """ Add the tenant prefix to every URL pattern within this function. This may only be used in the root URLconf, not in an included URLconf. """ return [URLResolver(TenantPrefixPattern(), list(urls))] def get_dynamic_tenant_prefixed_urlconf(urlconf, dynamic_path): """ Generates a new URLConf module with all patterns prefixed with tenant. """ from types import ModuleType from django.utils.module_loading import import_string class LazyURLConfModule(ModuleType): def __getattr__(self, attr): imported = import_string(f"{urlconf}.{attr}") if attr == "urlpatterns": return tenant_patterns(*imported) return imported return LazyURLConfModule(dynamic_path) def get_urlconf_from_schema(schema): """ Returns the proper URLConf depending on the schema. The schema must come with ``domain_url`` and ``folder`` members set. """ assert isinstance(schema, SchemaDescriptor) if not schema.domain_url: return None # Checking for static tenants if not schema.is_dynamic: for schema_name, data in settings.TENANTS.items(): if schema_name in ["public", "default"]: continue if schema.domain_url in data["DOMAINS"]: return data["URLCONF"] if schema.domain_url in data.get("FALLBACK_DOMAINS", []): return data["URLCONF"] return None # Checking for dynamic tenants urlconf = settings.TENANTS["default"]["URLCONF"] if schema.folder: dynamic_path = urlconf + "_dynamically_tenant_prefixed" if not sys.modules.get(dynamic_path): sys.modules[dynamic_path] = get_dynamic_tenant_prefixed_urlconf(urlconf, dynamic_path) urlconf = dynamic_path return urlconf django-pgschemas-0.15.2/django_pgschemas/utils.py000066400000000000000000000163421463633566500220460ustar00rootroot00000000000000import os import re from typing import Optional from django.apps import apps from django.conf import settings from django.core.exceptions import ValidationError from django.core.management import call_command from django.db import DEFAULT_DB_ALIAS, ProgrammingError, connection, transaction from django.db.models import Model def get_tenant_model(require_ready: bool = True) -> Optional[Model]: "Returns the tenant model." if "default" not in settings.TENANTS: return None return apps.get_model(settings.TENANTS["default"]["TENANT_MODEL"], require_ready=require_ready) def get_domain_model(require_ready: bool = True) -> Optional[Model]: "Returns the domain model." if "default" not in settings.TENANTS: return None return apps.get_model(settings.TENANTS["default"]["DOMAIN_MODEL"], require_ready=require_ready) def get_tenant_database_alias() -> str: return getattr(settings, "PGSCHEMAS_TENANT_DB_ALIAS", DEFAULT_DB_ALIAS) def get_limit_set_calls() -> bool: return getattr(settings, "PGSCHEMAS_LIMIT_SET_CALLS", False) def get_clone_reference() -> Optional[str]: if "default" not in settings.TENANTS: return None return settings.TENANTS["default"].get("CLONE_REFERENCE", None) def is_valid_identifier(identifier: str) -> bool: "Checks the validity of identifier." SQL_IDENTIFIER_RE = re.compile(r"^[_a-zA-Z][_a-zA-Z0-9]{,62}$") return bool(SQL_IDENTIFIER_RE.match(identifier)) def is_valid_schema_name(name: str) -> bool: "Checks the validity of a schema name." SQL_SCHEMA_NAME_RESERVED_RE = re.compile(r"^pg_", re.IGNORECASE) return is_valid_identifier(name) and not SQL_SCHEMA_NAME_RESERVED_RE.match(name) def check_schema_name(name: str): """ Checks schema name and raises ``ValidationError`` if ``name`` is not a valid identifier. """ if not is_valid_schema_name(name): raise ValidationError("Invalid string used for the schema name.") def remove_www(hostname: str) -> str: """ Removes ``www``. from the beginning of the address. Only for routing purposes. ``www.test.com/login/`` and ``test.com/login/`` should find the same tenant. """ if hostname.startswith("www."): return hostname[4:] return hostname def django_is_in_test_mode() -> bool: """ I know this is very ugly! I'm looking for more elegant solutions. See: http://stackoverflow.com/questions/6957016/detect-django-testing-mode """ from django.core import mail return hasattr(mail, "outbox") def run_in_public_schema(func): "Decorator that makes decorated function to be run in the public schema." def wrapper(*args, **kwargs): from .schema import SchemaDescriptor with SchemaDescriptor.create(schema_name="public"): return func(*args, **kwargs) return wrapper def schema_exists(schema_name: str) -> bool: "Checks if a schema exists in database." sql = """ SELECT EXISTS( SELECT 1 FROM pg_catalog.pg_namespace WHERE LOWER(nspname) = LOWER(%s) ) """ cursor = connection.cursor() cursor.execute(sql, (schema_name,)) row = cursor.fetchone() if row: exists = row[0] else: # pragma: no cover exists = False cursor.close() return exists @run_in_public_schema def dynamic_models_exist() -> bool: "Checks if tenant model and domain model have been synced." sql = """ SELECT count(*) FROM information_schema.tables WHERE table_schema = 'public' AND table_name in ('%s', '%s'); """ TenantModel = get_tenant_model() DomainModel = get_domain_model() if TenantModel is None or DomainModel is None: return False cursor = connection.cursor() cursor.execute(sql % (TenantModel._meta.db_table, DomainModel._meta.db_table)) value = cursor.fetchone() == (2,) cursor.close() return value @run_in_public_schema def create_schema( schema_name: str, check_if_exists: bool = False, sync_schema: bool = True, verbosity: int = 1, ) -> bool: """ Creates the schema ``schema_name``. Optionally checks if the schema already exists before creating it. Returns ``True`` if the schema was created, ``False`` otherwise. """ check_schema_name(schema_name) if check_if_exists and schema_exists(schema_name): return False cursor = connection.cursor() cursor.execute("CREATE SCHEMA %s" % schema_name) cursor.close() if sync_schema: call_command("migrateschema", schemas=[schema_name], verbosity=verbosity) return True @run_in_public_schema def drop_schema(schema_name: str, check_if_exists: bool = True, verbosity: int = 1) -> bool: """ Drops the schema. Optionally checks if the schema already exists before dropping it. """ if check_if_exists and not schema_exists(schema_name): return False cursor = connection.cursor() cursor.execute("DROP SCHEMA %s CASCADE" % schema_name) cursor.close() return True class DryRunException(Exception): pass def _create_clone_schema_function(): """ Creates a postgres function `clone_schema` that copies a schema and its contents. Will replace any existing `clone_schema` functions owned by the `postgres` superuser. """ with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "clone_schema.sql")) as f: CLONE_SCHEMA_FUNCTION = ( f.read() .replace("RAISE NOTICE ' source schema", "RAISE EXCEPTION ' source schema") .replace("RAISE NOTICE ' dest schema", "RAISE EXCEPTION ' dest schema") ) cursor = connection.cursor() cursor.execute(CLONE_SCHEMA_FUNCTION) cursor.close() @run_in_public_schema def clone_schema(base_schema_name: str, new_schema_name: str, dry_run: bool = False): """ Creates a new schema ``new_schema_name`` as a clone of an existing schema ``base_schema_name``. """ check_schema_name(new_schema_name) cursor = connection.cursor() # check if the clone_schema function already exists in the db try: cursor.execute( "SELECT 'public.clone_schema(text, text, public.cloneparms[])'::regprocedure" ) except ProgrammingError: # pragma: no cover _create_clone_schema_function() transaction.commit() try: with transaction.atomic(): cursor.callproc("clone_schema", [base_schema_name, new_schema_name, "DATA"]) cursor.close() if dry_run: raise DryRunException except DryRunException: cursor.close() def create_or_clone_schema(schema_name: str, sync_schema: bool = True, verbosity: int = 1) -> bool: """ Creates the schema ``schema_name``. Optionally checks if the schema already exists before creating it. Returns ``True`` if the schema was created, ``False`` otherwise. """ check_schema_name(schema_name) if schema_exists(schema_name): return False clone_reference = get_clone_reference() if ( clone_reference and schema_exists(clone_reference) and not django_is_in_test_mode() ): # pragma: no cover clone_schema(clone_reference, schema_name) return True return create_schema(schema_name, sync_schema=sync_schema, verbosity=verbosity) django-pgschemas-0.15.2/docs/000077500000000000000000000000001463633566500157625ustar00rootroot00000000000000django-pgschemas-0.15.2/docs/Makefile000066400000000000000000000011051463633566500174170ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) django-pgschemas-0.15.2/docs/advanced.rst000066400000000000000000000243521463633566500202670ustar00rootroot00000000000000Advanced configuration ====================== Fast dynamic tenant creation ---------------------------- Every time a instance of ``settings.TENANTS["default"]["TENANT_MODEL"]`` is created, by default, the corresponding schema is created and synchronized automatically. Depending on the number of migrations you already have in place, or the amount of time these could take, or whether you need to pre-populate the newly created schema with fixtures, this process could take a considerable amount of time. If you need a faster creation of dynamic schemas, you can do so by provisioning a "reference" schema that can cloned into new schemas. .. code-block:: python TENANTS = { # ... "default": { # ... "CLONE_REFERENCE": "sample", }, } Once you have this in your settings, you need to prepare your reference schema with everything a newly created dynamic schema will need. The first step is actually creating and synchronizing the reference schema. After that, you can run any command on it, or edit its tables via ``shell``. .. code-block:: bash python manage.py createrefschema python runschema loaddata tenant_app.products -s sample python runschema shell -s sample The ``runschema`` command is explained in :ref:`running management commands`. You don't need any extra step. As soon as a reference schema is configured, next time you create an instance of the tenant model, it will clone the reference schema instead of actually creating and synchronizing the schema. Most importantly, by default, migrations will include the reference schema, so that it is kept up to date for future tenant creation. .. tip:: The reference schema will get apps from ``settings.TENANTS["default"]["APPS"]`` and may look like any other dynamic tenant, but it is considered a *static* tenant instead, as there is no corresponding database entry for it. It's a special case of a static tenant, and it cannot be routed. Fallback domains ---------------- If there is only one domain available, and no possibility to use subdomain routing, the URLs for accessing your different tenants might look like:: mydomain.com -> main site mydomain.com/customer1 -> customer 1 mydomain.com/customer2 -> customer 2 In this case, due to the order in which domains are tested, it is not possible to put ``mydomain.com`` as domain for the main tenant without blocking all dynamic schemas from getting routed. When ``django_pgschemas.middleware.TenantMiddleware`` is checking which tenant to route from the incoming domain, it checks for static tenants first, then for dynamic tenants. If ``mydomain.com`` is used for the main tenant (which is static), then URLs like ``mydomain.com/customer1/some/url/`` will match the main tenant always. For a case like this, we provide a setting called ``FALLBACK_DOMAINS``. If no tenant is found for an incoming combination of domain and subfolder, then, static tenants are checked again for the fallback domains. Something like this would be the proper configuration for the present case: .. code-block:: python TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", # ... "django_pgschemas", "shared_app", # ... ], }, "main": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "main_app", ], "DOMAINS": [], # <--- No domain here "FALLBACK_DOMAINS": ["mydomain.com"], # <--- This is checked last "URLCONF": "main_app.urls", }, "default": { "TENANT_MODEL": "shared_app.Client", "DOMAIN_MODEL": "shared_app.Domain", "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "tenant_app", # ... ], "URLCONF": "tenant_app.urls", } } This example assumes that dynamic tenants will get their domains set to ``mydomain.com`` with a tenant specific subfolder, like ``client1`` or ``client2``. Here, an incoming request for ``mydomain.com/client1/some/url/`` will fail for the main tenant, then match against an existing dynamic tenant. On the other hand, an incoming request for ``mydomain.com/some/url/`` will fail for all static tenants, then fail for all dynamic tenants, and will finally match against the fallback domains of the main tenant. Static-only tenants ------------------- It's also possible to have only static tenants. For this, the default key must be omitted: .. code-block:: python TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", # ... "django_pgschemas", "shared_app", # ... ], }, "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "main_app", ], "DOMAINS": ["mydomain.com"], "URLCONF": "main_app.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "blog_app", ], "DOMAINS": ["blog.mydomain.com", "help.mydomain.com"], "URLCONF": "blog_app.urls", } } In this case, no model is expected to inherit from ``TenantMixin`` and ``DomainMixin``, and no clone reference schema can be created. Running management commands --------------------------- Since all management commands occur outside the request/response cycle, all commands from Django and any other third party apps are executed by default on the public schema. In order to work around this, we provide a ``runschema`` command that accepts any other command to be run on one or multiple schemas. A concise synopsis of the ``runschema`` command is as follows:: usage: manage.py runschema [-s SCHEMAS [SCHEMAS ...]] [-x EXCLUDED_SCHEMAS [EXCLUDED_SCHEMAS ...]] [-as] [-ss] [-ds] [-ts] [--parallel] [--no-create-schemas] [--noinput] command_name Wrapper around django commands for use with an individual schema positional arguments: command_name The command name you want to run optional arguments: --noinput, --no-input Tells Django to NOT prompt the user for input of any kind. -s SCHEMAS [SCHEMAS ...], --schema SCHEMAS [SCHEMAS ...] Schema(s) to execute the current command -as, --include-all-schemas Include all schemas when executing the current command -ss, --include-static-schemas Include all static schemas when executing the current command -ds, --include-dynamic-schemas Include all dynamic schemas when executing the current command -ts, --include-tenant-schemas Include all tenant-like schemas when executing the current command -x EXCLUDED_SCHEMAS [EXCLUDED_SCHEMAS ...], --exclude-schema EXCLUDED_SCHEMAS [EXCLUDED_SCHEMAS ...] Schema(s) to exclude when executing the current command --parallel Run command in parallel mode --no-create-schemas Skip automatic creation of non-existing schemas The ``--schema`` parameter accepts multiple inputs of different kinds: - The key of a static tenant or the ``schema_name`` of a dynamic tenant. - The prefix of any domain, provided only one corresponding tenant is found. - The ``domain/folder`` of a tenant, like ``customers.mydomain.com/client1`` The parameters ``-as``, ``-ss``, ``-ds`` and ``-ts`` act as wildcards for including all schemas, static schemas, dynamic schemas and tenant-like schemas, respectively. Tenant-like schemas are dynamic schemas plus the clone reference, if it exists. It's possible to exclude schemas via the ``-x`` parameter. Excluded schemas will take precedence over included ones. At least one schema is mandatory. If it's not provided with the command, either explicitly or via wildcard params, it will be asked interactively. One notable exception to this is when the option ``--noinput`` is passed, in which case the command will fail. If ``--parallel`` is passed, the command will be run asynchronously, spawning multiple threads controlled by the setting ``PGSCHEMAS_PARALLEL_MAX_PROCESSES``. It defaults to ``None``, in which case the number of CPUs will be used. By default, schemas that do not exist will be created (but not synchronized), except if ``--no-create-schemas`` is passed. Full details for this command can be found in :ref:`runschema-cmd`. Inheritable commands ++++++++++++++++++++ We also provide some base commands you can inherit, in order to mimic the behavior of ``runschema``. By inheriting these you will get the parameters we discussed in the previous section. The base commands provide a ``handle_tenant`` you must override in order to execute the actions you need on any given tenant. The base commands are: .. code-block:: python # django_pgschemas.management.commands.__init__.py class TenantCommand(WrappedSchemaOption, BaseCommand): # ... def handle_tenant(self, tenant, *args, **options): pass class StaticTenantCommand(TenantCommand): # ... class DynamicTenantCommand(TenantCommand): # ... .. attention:: Since these commands can work with both static and dynamic tenants, the parameter ``tenant`` will be an instance of ``django_pgschemas.schema.SchemaDescriptor``. Make sure you do the appropriate type checking before accessing the tenant members, as not every tenant will be an instance of ``settings.TENANTS["default"]["TENANT_MODEL"]``. django-pgschemas-0.15.2/docs/basic.rst000066400000000000000000000123071463633566500176000ustar00rootroot00000000000000Installation ============ This app requires: * Python (3.8.1+) * Django (4.0+) * Any version of psycopg You can install ``django-pgschemas`` via ``pip`` or any other installer. .. code-block:: bash pip install django-pgschemas Basic Configuration =================== Use ``django_pgschemas.postgresql_backend`` as your database engine. This enables the API for setting PostgreSQL search path .. code-block:: python DATABASES = { "default": { "ENGINE": "django_pgschemas.postgresql_backend", # ... } } Add the middleware ``django_pgschemas.middleware.TenantMiddleware`` to the top of ``MIDDLEWARE``, so that each request can be set to use the correct schema. .. code-block:: python MIDDLEWARE = ( "django_pgschemas.middleware.TenantMiddleware", # ... ) Add ``django_pgschemas.routers.SyncRouter`` to your ``DATABASE_ROUTERS``, so that the correct apps can be synced, depending on the target schema. .. code-block:: python DATABASE_ROUTERS = ( "django_pgschemas.routers.SyncRouter", # ... ) Add the minimal tenant configuration. .. code-block:: python TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", # ... "django_pgschemas", "shared_app", # ... ], }, # ... "default": { "TENANT_MODEL": "shared_app.Client", "DOMAIN_MODEL": "shared_app.Domain", "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "tenant_app", # ... ], "URLCONF": "tenant_app.urls", } } Each entry in the ``TENANTS`` dictionary represents a static tenant, except for ``default``, which controls the settings for all dynamic tenants. Notice how each tenant has the relevant ``APPS`` that will be synced in the corresponding schema. .. tip:: ``public`` is always treated as shared schema and cannot be routed directly. Every other tenant will get its search path set to its schema first, then the public schema. For Django to function properly, ``INSTALLED_APPS`` and ``ROOT_URLCONF`` settings must be defined. Just make them get their information from the ``TENANTS`` dictionary, for the sake of consistency. .. code-block:: python INSTALLED_APPS = [] for schema in TENANTS: INSTALLED_APPS += [app for app in TENANTS[schema]["APPS"] if app not in INSTALLED_APPS] ROOT_URLCONF = TENANTS["default"]["URLCONF"] Creating tenants ---------------- More static tenants can be added and routed. .. code-block:: python TENANTS = { # ... "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "main_app", ], "DOMAINS": ["mydomain.com"], "URLCONF": "main_app.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "blog_app", ], "DOMAINS": ["blog.mydomain.com", "help.mydomain.com"], "URLCONF": "blog_app.urls", }, # ... } Dynamic tenants need to be created through instances of ``TENANTS["default"]["TENANT_MODEL"]`` and routed through instances of ``TENANTS["default"]["DOMAIN_MODEL"]``. .. code-block:: python # shared_app/models.py from django.db import models from django_pgschemas.models import TenantMixin, DomainMixin class Client(TenantMixin): name = models.CharField(max_length=100) paid_until = models.DateField(blank=True, null=True) on_trial = models.BooleanField(default=True) created_on = models.DateField(auto_now_add=True) class Domain(DomainMixin): pass Synchronizing tenants --------------------- As a first step, you must always synchronize the public schema in order to get the tenant and domain models created. You can then synchronize the rest of the schemas. .. code-block:: bash python manage.py migrate -s public python manage.py migrate Now you are ready to create your first dynamic tenant. In the example, the tenant is created through a ``python manage.py shell`` session. >>> from shared_app.models import Client, Domain >>> client1 = Client.objects.create(schema_name="client1") >>> Domain.objects.create(domain="client1.mydomain.com", tenant=client1, is_primary=True) >>> Domain.objects.create(domain="clients.mydomain.com", folder="client1", tenant=client1) Now any request made to ``client1.mydomain.com`` or ``clients.mydomain.com/client1/`` will automatically set PostgreSQL's search path to ``client1`` and ``public``, making shared apps available too. Also, at this point, any request to ``blog.mydomain.com`` or ``help.mydomain.com`` will set search path to ``blog`` and ``public``. This means that any call to the methods ``filter``, ``get``, ``save``, ``delete`` or any other function involving a database connection will be done at the correct schema, be it static or dynamic. django-pgschemas-0.15.2/docs/commands.rst000066400000000000000000000013671463633566500203240ustar00rootroot00000000000000Management commands =================== .. _runschema-cmd: ``runschema`` ------------- .. djcommand:: django_pgschemas.management.commands.runschema .. _migrateschema-cmd: ``migrateschema`` ----------------- .. djcommand:: django_pgschemas.management.commands.migrateschema .. _migrate-cmd: ``migrate`` ----------- Wrapper around ``migrateschema`` to override Django's default ``migrate``. .. _createrefschema-cmd: ``createrefschema`` ------------------- .. djcommand:: django_pgschemas.management.commands.createrefschema .. _cloneschema-cmd: ``cloneschema`` --------------- .. djcommand:: django_pgschemas.management.commands.cloneschema .. _whowill-cmd: ``whowill`` ----------- .. djcommand:: django_pgschemas.management.commands.whowill django-pgschemas-0.15.2/docs/conf.py000066400000000000000000000126151463633566500172660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) import os import sys import django sys.path.insert(0, os.path.abspath("../dpgs_sandbox/")) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") django.setup() # -- Project information ----------------------------------------------------- project = "django-pgschemas" copyright = "2020, Lorenzo Peña, Tom Turner & Bernardo Pires." author = "Lorenzo Peña" # The short X.Y version version = "" # The full version, including alpha/beta/rc tags release = "" # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ["sphinx.ext.autosectionlabel", "sphinx.ext.autodoc", "djcommanddoc"] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The master toctree document. master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = "django-pgschemasdoc" # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, "django-pgschemas.tex", "django-pgschemas Documentation", "Lorenzo Peña", "manual") ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "django-pgschemas", "django-pgschemas Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "django-pgschemas", "django-pgschemas Documentation", author, "django-pgschemas", "One line description of project.", "Miscellaneous", ) ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] django-pgschemas-0.15.2/docs/contrib.rst000066400000000000000000000100451463633566500201540ustar00rootroot00000000000000Contributions ============= All contributions and third party integrations live inside ``django_pgschemas.contrib``. If you want to implement an integration with other Django packages, please submit a pull request containing: * The code for your integration. * The tests for your integration. * The docs for your integration in this section of the documentation. We're striving to maintain/increase our code coverage, but please, make sure your integration is properly tested. Proper tests will always beat meaningless 100% coverage. Caching ------- In order to generate tenant aware cache keys, we provide ``django_pgschemas.contrib.cache.make_key`` which can be used as ``KEY_FUNCTION``: .. code-block:: python CACHES = { "default": { # ... "KEY_FUNCTION": "django_pgschemas.contrib.cache.make_key", } } Tenant aware file system storage -------------------------------- We provide a tenant aware file system storage at ``django_pgschemas.contrib.files.TenantFileSystemStorage``. It subclasses ``django.core.files.storage.FileSystemStorage`` and behaves like it in every aspect, except that it prepends a tenant identifier to the path and URL of all files. By default, the tenant identifier is the schema name of the current tenant. In order to override this behavior, it is possible to provide a different identifier. The storage will consider these options when looking for an identifier: * A method called ``schema_pathname`` in the current tenant. This method must accept no arguments and return an identifier. * A function specified in a setting called ``PGSCHEMAS_PATHNAME_FUNCTION``. This function must accept a schema descriptor and return an identifier. * Finally, the identifier will default to the schema name of the current tenant. In the case of the URL returned from the storage, if the storage detects that the current schema has been routed via subfolder, it won't prepend the schema identifier, because it considers that the path is properly disambiguated as is. This means that instead of something like:: /tenant1/static/tenant1/path/to/file.txt It will generate:: /tenant1/static/path/to/file.txt This storage class is a convenient way of storing media files in a folder structure organized at the top by tenants, as well as providing a perceived tenant centric organization in the URLs that are generated. However, this storage class does NOT provide any form of security, such as controlling that from one tenant, files from another tenant are not accessible. Such security requirements have other implications that fall out of the scope of this basic utility. .. tip:: In a project that requires airtight security, you might want to use and customize `django-private-storage`_. .. _django-private-storage: https://github.com/edoburu/django-private-storage Channels (websockets) --------------------- We provide a tenant aware protocol router for using with ``channels``. You can use it as follows: .. code-block:: python # routing.py from django_pgschemas.contrib.channels2 import TenantProtocolRouter application = TenantProtocolRouter() # settings.py ASGI_APPLICATION = "routing.application" It requires that you also route the websockets requests, at least for the dynamic tenants. If you don't route websocket requests for static tenants, the dynamic route will be used: .. code-block:: python TENANTS = { # ... "default": { # ... "URLCONF": "tenant_app.urls", "WS_URLCONF": "tenant_app.ws_urls", } } You still need to name your channel groups appropriately, taking the current tenant into account if you want to keep your groups tenant-specific. You will get the current tenant in ``scope["tenant"]``. For Channels 3 to use with Django 3+, you can use ``django_pgschemas.contrib.channels3``. .. attention:: This module is NOT included in the test battery of the package. Please, create a `GitHub issue`_ for any errors you may find. .. _GitHub issue: https://github.com/lorinkoz/django-pgschemas/issues django-pgschemas-0.15.2/docs/credits.rst000066400000000000000000000004461463633566500201550ustar00rootroot00000000000000Credits ======= This project stands on the shoulders of giants. * Tom Turner for `django-tenants`_ * Bernardo Pires for `django-tenant-schemas`_ .. _django-tenants: https://github.com/tomturner/django-tenants .. _django-tenant-schemas: https://github.com/bernardopires/django-tenant-schemas django-pgschemas-0.15.2/docs/index.rst000066400000000000000000000032601463633566500176240ustar00rootroot00000000000000.. django-pgschemas documentation master file, created by sphinx-quickstart on Thu Jan 3 12:14:01 2019. django-pgschemas ================ .. image:: https://img.shields.io/badge/packaging-poetry-purple.svg :alt: Packaging: poetry :target: https://github.com/sdispater/poetry .. image:: https://img.shields.io/badge/code%20style-black-black.svg :alt: Code style: black :target: https://github.com/ambv/black .. image:: https://badges.gitter.im/Join%20Chat.svg :alt: Join the chat at https://gitter.im/django-pgschemas :target: https://gitter.im/django-pgschemas/community?utm_source=share-link&utm_medium=link&utm_campaign=share-link .. image:: https://github.com/lorinkoz/django-pgschemas/workflows/code/badge.svg :alt: Build status :target: https://github.com/lorinkoz/django-pgschemas/actions .. image:: https://readthedocs.org/projects/django-pgschemas/badge/?version=latest :alt: Documentation status :target: https://django-pgschemas.readthedocs.io/ .. image:: https://coveralls.io/repos/github/lorinkoz/django-pgschemas/badge.svg?branch=master :alt: Code coverage :target: https://coveralls.io/github/lorinkoz/django-pgschemas?branch=master .. image:: https://badge.fury.io/py/django-pgschemas.svg :alt: PyPi version :target: http://badge.fury.io/py/django-pgschemas .. image:: https://img.shields.io/pypi/dm/django-pgschemas :alt: Downloads ----- .. toctree:: :maxdepth: 2 :caption: Contents overview basic advanced commands contrib troubleshooting settings reference credits .. Indices and tables .. ================== .. * :ref:`genindex` .. * :ref:`modindex` .. * :ref:`search` django-pgschemas-0.15.2/docs/make.bat000066400000000000000000000013601463633566500173670ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% :end popd django-pgschemas-0.15.2/docs/overview.rst000066400000000000000000000122561463633566500203700ustar00rootroot00000000000000Overview ======== This app uses PostgreSQL schemas to support data multi-tenancy in a single Django project. Schemas are a layer of separation between databases and tables, so that one database can have multiple schemas, which in turn can have multiple (and possibly identical) tables. For an accurate description on schemas, see `the official documentation on PostgreSQL schemas`_. .. _the official documentation on PostgreSQL schemas: http://www.postgresql.org/docs/9.1/static/ddl-schemas.html PostgreSQL uses a "search path" to denote in which schemas it should look for the appropriate tables. If there are three schemas: ``client1``, ``common`` and ``public`` and the search path is set to ``["client1", "public"]``, PostgreSQL will look for tables first on schema ``client1``, and then, if not found, will look on schema ``public``. The tables on schema ``common`` would never be searched. Also, if there is a table with the same name on both ``client1`` and ``public`` schemas (i.e. ``django_migrations``), only the table in ``client1`` will be found by that search path. Table creation always takes place on the first schema in the search path. ``django-pgschemas``, as well as it's predecessors ``django-tenants`` and ``django-tenant-schemas``, takes advantage of PostgreSQL schemas to emulate multi-tenancy, by mapping certain URL patterns to schemas, and setting the search path accordingly. It also provides an API to smartly change the search path outside the request/response cycle, in order to perform schema-specific tasks. Multi-tenancy ------------- There are typically three solutions for solving the multi-tenancy problem. 1. Isolated approach: Separate databases. Each tenant has it's own database. 2. Semi-isolated approach: Shared database, separate schemas. One database for all tenants, but one schema per tenant. 3. Shared approach: Shared database, shared schema. All tenants share the same database and schema. There is a main tenant-table, where all other tables have a foreign key pointing to. Each solution has its up and down sides, for a more in-depth discussion, see Microsoft's excellent article on `Multi-Tenant Data Architecture`_. .. _Multi-Tenant Data Architecture: https://docs.microsoft.com/en-us/azure/sql-database/saas-tenancy-app-design-patterns This application implements the second approach, which in our opinion, represents a good compromise between simplicity and performance. .. tip:: If you are looking for an implementation of the third approach, you might be interested in `django-multitenant`_. For other solutions of the multi-tenancy problem, you could also look `here`_. .. _django-multitenant: https://github.com/citusdata/django-multitenant .. _here: https://djangopackages.org/grids/g/multi-tenancy/ The semi-isolated approach through PostgreSQL schemas has some advantages and disadvantages: * Simplicity: barely make any changes to your current code to support multi-tenancy. Plus, you only manage one database. * Performance: make use of shared connections, buffers and memory. vs. * Scalability: for a large number of tenants (thousands) the schema approach might not be feasible, and as of now, there is no clear way for implementing tenant sharding. Schemas vs. Tenants ------------------- The terms *schema* and *tenant* are used indistinctly all over the documentation. However, it is important to note some subtle differences between the two. We consider a *tenant* to be a subset of data that can be accessed with a URL (routed), and we use database *schemas* for that purpose. Still, there can be schemas that shouldn't be considered tenants according to our definition. One good example is the ``public`` schema, which most typically contains data shared across all tenants. Then, every tenant is a schema, but not every schema is a tenant. Static vs. Dynamic ------------------ In a typical software-as-a-service (SaaS), there is a number of static sites that are related to enterprise level operations. Using ``mydomain.com`` as example, one could think of these enterprise level sites:: mydomain.com www.mydomain.com blog.mydomain.com help-center.mydomain.com Likewise, there are going to be multiple sites for tenant specific operations. Those sites are dynamic in nature, as cannot be determined at the time of implementation -- and hopefully, will be thousands ;) The dynamic sites could follow a subdomain routing approach like:: customer1.mydomain.com customer2.mydomain.com A subfolder routing approach like:: customers.mydomain.com/customer1 customers.mydomain.com/customer2 Or a mixed approach, where even a-la-carte domains are used (say, for VIP clients) like:: customer1.mydomain.com customers.mydomain.com/customer1 customers.mydomain.com/customer2 www.thevipcustomer.com This app allows you to manage both static and dynamic tenants, and the three kinds of routing. .. attention:: For static tenants, only the subdomain routing is available. In order to manage dynamic tenants, we provide two model mixins you must inherit in your models: ``TenantMixin`` and ``DomainMixin``. The former controls the tenants and the latter controls the domain/folder combinations that will route each tenant. django-pgschemas-0.15.2/docs/reference.rst000066400000000000000000000017671463633566500204650ustar00rootroot00000000000000Reference ========= Models ------ ``TenantMixin`` +++++++++++++++ .. autoclass:: django_pgschemas.models.TenantMixin :members: auto_create_schema, auto_drop_schema, create_schema, drop_schema ``DomainMixin`` +++++++++++++++ .. autoclass:: django_pgschemas.models.DomainMixin :members: absolute_url Utils ----- .. automodule:: django_pgschemas.utils :members: get_tenant_model, get_domain_model, is_valid_identifier, is_valid_schema_name, check_schema_name, remove_www, run_in_public_schema, schema_exists, dynamic_models_exist, create_schema, drop_schema, clone_schema, create_or_clone_schema Signals ------- .. autodata:: django_pgschemas.signals.schema_activate .. autodata:: django_pgschemas.signals.dynamic_tenant_needs_sync .. autodata:: django_pgschemas.signals.dynamic_tenant_post_sync .. autodata:: django_pgschemas.signals.dynamic_tenant_pre_drop URL resolvers ------------- .. automodule:: django_pgschemas.urlresolvers :members: get_urlconf_from_schema django-pgschemas-0.15.2/docs/requirements.txt000066400000000000000000000000361463633566500212450ustar00rootroot00000000000000sphinx-django-command psycopg django-pgschemas-0.15.2/docs/settings.rst000066400000000000000000000057361463633566500203670ustar00rootroot00000000000000Settings ======== ``TENANTS`` ----------- Default: ``None`` The tenant configuration dictionary as explained in :ref:`Basic configuration`. A sample tenant configuration is: .. code-block:: python TENANTS = { "public": { "APPS": [ "django.contrib.contenttypes", "django.contrib.staticfiles", # ... "django_pgschemas", "shared_app", # ... ], }, "www": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "main_app", ], "DOMAINS": ["mydomain.com"], "URLCONF": "main_app.urls", }, "blog": { "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "blog_app", ], "DOMAINS": ["blog.mydomain.com", "help.mydomain.com"], "URLCONF": "blog_app.urls", }, "default": { "TENANT_MODEL": "shared_app.Client", "DOMAIN_MODEL": "shared_app.Domain", "APPS": [ "django.contrib.auth", "django.contrib.sessions", # ... "tenant_app", # ... ], "URLCONF": "tenant_app.urls", "CLONE_REFERENCE": "sample", } } ``PGSCHEMAS_EXTRA_SEARCH_PATHS`` -------------------------------- Default: ``[]`` Other schemas to include in PostgreSQL search path. You cannot include the schema for any static or dynamic tenant. The public schema is included by default, so, including it here will raise ``ImproperlyConfigured``. ``PGSCHEMAS_LIMIT_SET_CALLS`` ----------------------------- Default: ``False`` By default, the search path is set every time a database cursor is required. In some intense situations, this could ralentize the queries. Set to ``True`` to limit the number of calls for setting the search path. ``PGSCHEMAS_ORIGINAL_BACKEND`` ------------------------------ Default: ``"django.db.backends.postgresql"`` The base backend to inherit from. If you have a customized backend of PostgreSQL, you can specify it here. ``PGSCHEMAS_PARALLEL_MAX_PROCESSES`` ------------------------------------ Default: ``None`` When ``--executor parallel`` is passed in any tenant command, this setting will control the max number of processes the parallel executor can spawn. By default, ``None`` means that the number of CPUs will be used. ``PGSCHEMAS_TENANT_DB_ALIAS`` ----------------------------- Default: ``"default"`` The database alias where the tenant configuration is going to take place. ``PGSCHEMAS_PATHNAME_FUNCTION`` ------------------------------- Default: ``None`` Function that takes a schema descriptor and returns a string identifier for the schema. This identifier will be used in the ``TenantFileSystemStorage`` as the name of the tenant folder. django-pgschemas-0.15.2/docs/troubleshooting.rst000066400000000000000000000067301463633566500217510ustar00rootroot00000000000000Troubleshooting =============== Schema for tenant and domain models ----------------------------------- The application(s) that contain the tenant model and the domain model should be in the public schema only. Making those models available in other schemas will most likely cause serious bugs. This package will raise an error check if the tenant / domain application is found missing in ``settings.TENANTS["public"]["APPS"]`` or present in other tenant configuration. You can silence this check through the code ``pgschemas.W001``. Content types ------------- Installing ``django.contrib.contenttypes`` outside of the public schema can lead to problems when using other static or dynamic schemas. The recommended approach is to have this app in ``settings.TENANTS["public"]["APPS"]``. This package will raise a warning check if the content types app is found somewhere else. You can silence this check through the code ``pgschemas.W002``. Session leaking --------------- Configuring users in a multi-tenant application can be challenging, because the user model(s) can be installed on any schema. Depending on the scope of your desired authentication mechanism, you should decide whether the user app will leave in the public schema or in each of the other static or dynamic schemas. If you do the latter, consdier that the same user ID could be repeated in multiple schemas. User ID is what makes authentication possible via the sessions app. In order to prevent session leaking, the recommended approach is to always put the user app and the session app together. This package will raise a warning check if the user app and the session app are found to not be together in the same schemas. You can silence this check through the code ``pgschemas.W003``. Moving apps between schemas --------------------------- Regardless of which apps you have included in each schema, migrations will be tracked as being run on all of them. If you move an app between schemas, the tables will not be created in the destination schema, because migrations are considered to be run there already. In order to overcome this, you must remove all migrations of said app via ``manage.py migrate app zero --fake -s schema`` and then run migrations again. In order to remove the tables from the source app, you will have to actually do a zero migrate before removing the app from the said schema apps. Name clash between static and dynamic schemas --------------------------------------------- It is possible to define a static tenant whose name clashes with an existing dynamic tenant. This is especially true for the clone reference, which can be added as an afterthought in order to speed up dynamic tenant creation. It is also possible to create a dynamic tenant with a name already present in the static tenant configuration. We do not provide an out-of-the-box validation mechanism for dynamic tenants upon creation, as attempt to prevent name clashes with static tenants. However, we do provide a system check that fails with a critical error message if a name clash is found. Since this check must query the database in order to fetch the schema name for all dynamic tenants, it is tagged as a database check, which makes it run only in database related operations and management commands. This means that the check will not be run via ``runserver``, but will be run in commands like :ref:`migrate-cmd`, :ref:`cloneschema-cmd` and :ref:`createrefschema-cmd`. If absolutely needed, you can silence this check through the code ``pgschemas.W004``. django-pgschemas-0.15.2/dpgs_sandbox/000077500000000000000000000000001463633566500175055ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/__init__.py000066400000000000000000000000001463633566500216040ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_blog/000077500000000000000000000000001463633566500212705ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_blog/__init__.py000066400000000000000000000000001463633566500233670ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_blog/migrations/000077500000000000000000000000001463633566500234445ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_blog/migrations/0001_initial.py000066400000000000000000000017011463633566500261060ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 04:11 import django.db.models.deletion from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name="BlogEntry", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ( "user", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="blogs", to=settings.AUTH_USER_MODEL, ), ), ], ), ] django-pgschemas-0.15.2/dpgs_sandbox/app_blog/migrations/__init__.py000066400000000000000000000000001463633566500255430ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_blog/models.py000066400000000000000000000003141463633566500231230ustar00rootroot00000000000000from django.contrib.auth import get_user_model from django.db import models class BlogEntry(models.Model): user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE, related_name="blogs") django-pgschemas-0.15.2/dpgs_sandbox/app_blog/urls.py000066400000000000000000000002601463633566500226250ustar00rootroot00000000000000from django.urls import path from dpgs_sandbox.views import generic urlpatterns = [ path("", generic, name="blog-home"), path("entries/", generic, name="entries"), ] django-pgschemas-0.15.2/dpgs_sandbox/app_main/000077500000000000000000000000001463633566500212715ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_main/__init__.py000066400000000000000000000000001463633566500233700ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_main/migrations/000077500000000000000000000000001463633566500234455ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_main/migrations/0001_initial.py000066400000000000000000000010171463633566500261070ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 04:41 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="MainData", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ], ), ] django-pgschemas-0.15.2/dpgs_sandbox/app_main/migrations/__init__.py000066400000000000000000000000001463633566500255440ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_main/models.py000066400000000000000000000001061463633566500231230ustar00rootroot00000000000000from django.db import models class MainData(models.Model): pass django-pgschemas-0.15.2/dpgs_sandbox/app_main/urls.py000066400000000000000000000004271463633566500226330ustar00rootroot00000000000000from django.http import HttpResponse from django.urls import path from dpgs_sandbox.views import generic urlpatterns = [ path("", generic, name="main-home"), path("register/", generic, name="register"), path("ping/", lambda request: HttpResponse(), name="ping"), ] django-pgschemas-0.15.2/dpgs_sandbox/app_main/ws_urls.py000066400000000000000000000002421463633566500233370ustar00rootroot00000000000000from channels.generic.websocket import JsonWebsocketConsumer from django.urls import path urlpatterns = [ path("", JsonWebsocketConsumer, name="main-ws"), ] django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/000077500000000000000000000000001463633566500220215ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/__init__.py000066400000000000000000000000001463633566500241200ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/migrations/000077500000000000000000000000001463633566500241755ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/migrations/0001_initial.py000066400000000000000000000024471463633566500266470ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 03:07 import django.db.models.deletion from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ("shared_public", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name="TenantData", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ( "catalog", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="tenant_objects", to="shared_public.Catalog", ), ), ( "user", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="tenant_objects", to=settings.AUTH_USER_MODEL, ), ), ], ) ] django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/migrations/0002_tenantdata_active.py000066400000000000000000000005761463633566500306760ustar00rootroot00000000000000# Generated by Django 3.0.5 on 2021-02-22 21:07 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("app_tenants", "0001_initial"), ] operations = [ migrations.AddField( model_name="tenantdata", name="active", field=models.BooleanField(default=True), ), ] django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/migrations/__init__.py000066400000000000000000000000001463633566500262740ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/models.py000066400000000000000000000006241463633566500236600ustar00rootroot00000000000000from django.contrib.auth import get_user_model from django.db import models class TenantData(models.Model): catalog = models.ForeignKey( "shared_public.Catalog", on_delete=models.CASCADE, related_name="tenant_objects" ) user = models.ForeignKey( get_user_model(), on_delete=models.CASCADE, related_name="tenant_objects" ) active = models.BooleanField(default=True) django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/urls.py000066400000000000000000000005501463633566500233600ustar00rootroot00000000000000from django.contrib.auth.decorators import login_required from django.urls import path from dpgs_sandbox.views import generic urlpatterns = [ path("", generic, name="tenant-home"), path("profile/", generic, name="profile"), path("profile/advanced/", login_required(generic), name="advanced-profile"), path("login/", generic, name="login"), ] django-pgschemas-0.15.2/dpgs_sandbox/app_tenants/ws_urls.py000066400000000000000000000000211463633566500240620ustar00rootroot00000000000000urlpatterns = [] django-pgschemas-0.15.2/dpgs_sandbox/asgi.py000066400000000000000000000002551463633566500210040ustar00rootroot00000000000000import os from django.core.asgi import get_asgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dpgs_sandbox.settings") application = get_asgi_application() django-pgschemas-0.15.2/dpgs_sandbox/manage.py000077500000000000000000000010101463633566500213020ustar00rootroot00000000000000#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.full") try: from django.core.management import execute_from_command_line except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) execute_from_command_line(sys.argv) django-pgschemas-0.15.2/dpgs_sandbox/settings/000077500000000000000000000000001463633566500213455ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/settings/__init__.py000066400000000000000000000000001463633566500234440ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/settings/base.py000066400000000000000000000063251463633566500226370ustar00rootroot00000000000000""" Django settings for dpgs_sandbox project. Generated by 'django-admin startproject' using Django 2.1.4. For more information on this file, see https://docs.djangoproject.com/en/2.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.1/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "asd#$#ae)^gegm6m9omvic^ct@*@bkf!0afe*+4h$5-zmf^h&$u4(1vr" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [".localhost"] AUTH_USER_MODEL = "shared_common.User" LOGIN_URL = "login" MIDDLEWARE = [ "django_pgschemas.middleware.TenantMiddleware", "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "django.middleware.csrf.CsrfViewMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", ] TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", "DIRS": [], "APP_DIRS": True, "OPTIONS": { "context_processors": [ "django.template.context_processors.debug", "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", ] }, } ] # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { "default": { "ENGINE": "django_pgschemas.postgresql_backend", "NAME": "dpgs_sandbox", "USER": "postgres", "PASSWORD": os.environ.get("DATABASE_PASSWORD", "postgres"), "HOST": os.environ.get("DATABASE_HOST", "localhost"), "PORT": "", } } DATABASE_ROUTERS = ("django_pgschemas.routers.SyncRouter",) CACHES = { "default": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", "KEY_FUNCTION": "django_pgschemas.contrib.cache.make_key", } } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"}, {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = "en-us" TIME_ZONE = "UTC" USE_I18N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = "/static/" DEFAULT_AUTO_FIELD = "django.db.models.AutoField" django-pgschemas-0.15.2/dpgs_sandbox/settings/full.py000066400000000000000000000023061463633566500226620ustar00rootroot00000000000000from .base import * # noqa: F403 TENANTS = { "public": { "APPS": [ "shared_public", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.staticfiles", ], }, "www": { "APPS": ["shared_common", "app_main", "django.contrib.sessions"], "URLCONF": "app_main.urls", "WS_URLCONF": "app_main.ws_urls", "DOMAINS": ["localhost"], "FALLBACK_DOMAINS": ["everyone.localhost"], }, "blog": { "APPS": ["shared_common", "app_blog", "django.contrib.sessions"], "URLCONF": "app_blog.urls", "DOMAINS": ["blog.localhost"], }, "default": { "TENANT_MODEL": "shared_public.Tenant", "DOMAIN_MODEL": "shared_public.Domain", "APPS": ["shared_common", "app_tenants", "django.contrib.sessions"], "URLCONF": "app_tenants.urls", "WS_URLCONF": "app_tenants.ws_urls", "CLONE_REFERENCE": "sample", }, } # Application definition INSTALLED_APPS = ["django_pgschemas"] for schema in TENANTS: INSTALLED_APPS += [app for app in TENANTS[schema]["APPS"] if app not in INSTALLED_APPS] ROOT_URLCONF = TENANTS["default"]["URLCONF"] django-pgschemas-0.15.2/dpgs_sandbox/settings/static_only.py000066400000000000000000000016041463633566500242500ustar00rootroot00000000000000from .base import * # noqa: F403 TENANTS = { "public": { "APPS": [ "shared_public", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.staticfiles", ], }, "www": { "APPS": ["shared_common", "app_main", "django.contrib.sessions"], "URLCONF": "app_main.urls", "WS_URLCONF": "app_main.ws_urls", "DOMAINS": ["localhost"], "FALLBACK_DOMAINS": ["everyone.localhost"], }, "blog": { "APPS": ["shared_common", "app_blog", "django.contrib.sessions"], "URLCONF": "app_blog.urls", "DOMAINS": ["blog.localhost"], }, } # Application definition INSTALLED_APPS = ["django_pgschemas"] for schema in TENANTS: INSTALLED_APPS += [app for app in TENANTS[schema]["APPS"] if app not in INSTALLED_APPS] ROOT_URLCONF = TENANTS["www"]["URLCONF"] django-pgschemas-0.15.2/dpgs_sandbox/shared_common/000077500000000000000000000000001463633566500223235ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_common/__init__.py000066400000000000000000000000001463633566500244220ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_common/management/000077500000000000000000000000001463633566500244375ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_common/management/__init__.py000066400000000000000000000000001463633566500265360ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_common/management/commands/000077500000000000000000000000001463633566500262405ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_common/management/commands/__init__.py000066400000000000000000000000001463633566500303370ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_common/management/commands/reverse_url.py000066400000000000000000000015531463633566500311530ustar00rootroot00000000000000from django.conf import settings from django.urls import reverse from django_pgschemas.management.commands import TenantCommand from django_pgschemas.urlresolvers import get_urlconf_from_schema class Command(TenantCommand): def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( dest="url_name", help="Url name to resolve in the specified schema", ) def handle_tenant(self, tenant, *args, **options): if tenant.is_dynamic: primary_domain = tenant.get_primary_domain() tenant.domain_url = primary_domain.domain tenant.folder = primary_domain.folder else: tenant.domain_url = settings.TENANTS[tenant.schema_name]["DOMAINS"][0] self.stdout.write(reverse(options["url_name"], urlconf=get_urlconf_from_schema(tenant))) django-pgschemas-0.15.2/dpgs_sandbox/shared_common/migrations/000077500000000000000000000000001463633566500244775ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_common/migrations/0001_initial.py000066400000000000000000000017351463633566500271500ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 03:07 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="User", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("password", models.CharField(max_length=128, verbose_name="password")), ( "last_login", models.DateTimeField(blank=True, null=True, verbose_name="last login"), ), ("email", models.EmailField(max_length=254, unique=True)), ("display_name", models.CharField(max_length=50)), ], options={ "abstract": False, }, ), ] django-pgschemas-0.15.2/dpgs_sandbox/shared_common/migrations/__init__.py000066400000000000000000000000001463633566500265760ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_common/models.py000066400000000000000000000005661463633566500241670ustar00rootroot00000000000000from django.contrib.auth.models import AbstractBaseUser, BaseUserManager from django.db import models class UserManager(BaseUserManager): pass class User(AbstractBaseUser): email = models.EmailField(unique=True) display_name = models.CharField(max_length=50) USERNAME_FIELD = "email" REQUIRED_FIELDS = ("display_name",) objects = UserManager() django-pgschemas-0.15.2/dpgs_sandbox/shared_public/000077500000000000000000000000001463633566500223115ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_public/__init__.py000066400000000000000000000000001463633566500244100ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_public/migrations/000077500000000000000000000000001463633566500244655ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_public/migrations/0001_initial.py000066400000000000000000000044271463633566500271370ustar00rootroot00000000000000# Generated by Django 2.1.4 on 2019-01-13 03:07 import django.db.models.deletion from django.db import migrations, models import django_pgschemas.schema import django_pgschemas.utils class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="Domain", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("domain", models.CharField(db_index=True, max_length=253)), ("folder", models.SlugField(blank=True, max_length=253)), ("is_primary", models.BooleanField(default=True)), ], options={"abstract": False}, ), migrations.CreateModel( name="Catalog", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ) ], ), migrations.CreateModel( name="Tenant", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ( "schema_name", models.CharField( max_length=63, unique=True, validators=[django_pgschemas.utils.check_schema_name], ), ), ], options={"abstract": False}, bases=(django_pgschemas.schema.SchemaDescriptor, models.Model), ), migrations.AddField( model_name="domain", name="tenant", field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name="domains", to="shared_public.Tenant", ), ), migrations.AlterUniqueTogether(name="domain", unique_together={("domain", "folder")}), ] django-pgschemas-0.15.2/dpgs_sandbox/shared_public/migrations/0002_domain_redirect_to_primary.py000066400000000000000000000006121463633566500330740ustar00rootroot00000000000000# Generated by Django 3.2.4 on 2021-10-27 19:53 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("shared_public", "0001_initial"), ] operations = [ migrations.AddField( model_name="domain", name="redirect_to_primary", field=models.BooleanField(default=False), ), ] django-pgschemas-0.15.2/dpgs_sandbox/shared_public/migrations/__init__.py000066400000000000000000000000001463633566500265640ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/shared_public/models.py000066400000000000000000000003171463633566500241470ustar00rootroot00000000000000from django.db import models from django_pgschemas.models import DomainMixin, TenantMixin class Tenant(TenantMixin): pass class Domain(DomainMixin): pass class Catalog(models.Model): pass django-pgschemas-0.15.2/dpgs_sandbox/tests/000077500000000000000000000000001463633566500206475ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/tests/__init__.py000066400000000000000000000000001463633566500227460ustar00rootroot00000000000000django-pgschemas-0.15.2/dpgs_sandbox/tests/test_apps.py000066400000000000000000000225551463633566500232340ustar00rootroot00000000000000from django.apps import apps from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.test import TestCase, override_settings BASE_DEFAULT = { "TENANT_MODEL": "shared_public.Tenant", "DOMAIN_MODEL": "shared_public.Domain", "URLCONF": "", } class AppConfigTestCase(TestCase): """ Tests TENANTS settings is properly defined. """ def setUp(self): self.app_config = apps.get_app_config("django_pgschemas") @override_settings() def test_missing_tenants(self): del settings.TENANTS with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_tenant_dict() self.assertEqual(str(ctx.exception), "TENANTS dict setting not set.") @override_settings(TENANTS=list) def test_wrong_type_tenants(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_tenant_dict() self.assertEqual(str(ctx.exception), "TENANTS dict setting not set.") @override_settings(TENANTS={}) def test_no_public(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_public_schema() self.assertEqual(str(ctx.exception), "TENANTS must contain a 'public' dict.") @override_settings(TENANTS={"public": None}) def test_wrong_type_public(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_public_schema() self.assertEqual(str(ctx.exception), "TENANTS must contain a 'public' dict.") @override_settings(TENANTS={"public": 4}) def test_other_type_public(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_public_schema() self.assertEqual(str(ctx.exception), "TENANTS must contain a 'public' dict.") @override_settings(TENANTS={"public": {"URLCONF": ""}}) def test_urlconf_on_public(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_public_schema() self.assertEqual(str(ctx.exception), "TENANTS['public'] cannot contain a 'URLCONF' key.") @override_settings(TENANTS={"public": {"WS_URLCONF": ""}}) def test_wsurlconf_on_public(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_public_schema() self.assertEqual(str(ctx.exception), "TENANTS['public'] cannot contain a 'WS_URLCONF' key.") @override_settings(TENANTS={"public": {"DOMAINS": ""}}) def test_domains_on_public(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_public_schema() self.assertEqual(str(ctx.exception), "TENANTS['public'] cannot contain a 'DOMAINS' key.") @override_settings(TENANTS={"public": {"FALLBACK_DOMAINS": ""}}) def test_fallback_domains_on_public(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_public_schema() self.assertEqual( str(ctx.exception), "TENANTS['public'] cannot contain a 'FALLBACK_DOMAINS' key." ) @override_settings(TENANTS={"default": None}) def test_wrong_type_default(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual(str(ctx.exception), "TENANTS must contain a 'default' dict.") @override_settings(TENANTS={"default": "wawa"}) def test_other_type_default(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual(str(ctx.exception), "TENANTS must contain a 'default' dict.") @override_settings(TENANTS={"default": {"DOMAIN_MODEL": ""}}) def test_no_tenant_model_default(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual( str(ctx.exception), "TENANTS['default'] must contain a 'TENANT_MODEL' key." ) @override_settings(TENANTS={"default": {"TENANT_MODEL": ""}}) def test_no_domain_model_default(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual( str(ctx.exception), "TENANTS['default'] must contain a 'DOMAIN_MODEL' key." ) @override_settings(TENANTS={"default": {"TENANT_MODEL": None, "DOMAIN_MODEL": None}}) def test_no_urlconf_default(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual(str(ctx.exception), "TENANTS['default'] must contain a 'URLCONF' key.") @override_settings(TENANTS={"default": {**BASE_DEFAULT, "DOMAINS": ""}}) def test_domains_on_default(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual(str(ctx.exception), "TENANTS['default'] cannot contain a 'DOMAINS' key.") @override_settings(TENANTS={"default": {**BASE_DEFAULT, "FALLBACK_DOMAINS": ""}}) def test_fallback_domains_on_default(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual( str(ctx.exception), "TENANTS['default'] cannot contain a 'FALLBACK_DOMAINS' key." ) def test_repeated_clone_reference(self): with override_settings( TENANTS={"public": {}, "default": {**BASE_DEFAULT, "CLONE_REFERENCE": "public"}} ): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual( str(ctx.exception), "TENANTS['default']['CLONE_REFERENCE'] must be a unique schema name.", ) with override_settings(TENANTS={"default": {**BASE_DEFAULT, "CLONE_REFERENCE": "default"}}): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_default_schemas() self.assertEqual( str(ctx.exception), "TENANTS['default']['CLONE_REFERENCE'] must be a unique schema name.", ) def test_valid_schema_name(self): with override_settings(TENANTS={"pg_whatever": {}}): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_overall_schemas() self.assertEqual(str(ctx.exception), "'pg_whatever' is not a valid schema name.") with override_settings(TENANTS={"&$&*": {}}): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_overall_schemas() self.assertEqual(str(ctx.exception), "'&$&*' is not a valid schema name.") @override_settings(TENANTS={"www": {}}) def test_domains_on_others(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_overall_schemas() self.assertEqual(str(ctx.exception), "TENANTS['www'] must contain a 'DOMAINS' list.") @override_settings(DATABASE_ROUTERS=()) def test_database_routers(self): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_complementary_settings() self.assertEqual( str(ctx.exception), "DATABASE_ROUTERS setting must contain 'django_pgschemas.routers.SyncRouter'.", ) def test_extra_search_paths(self): with override_settings( TENANTS={"public": {}, "default": BASE_DEFAULT, "www": {}}, PGSCHEMAS_EXTRA_SEARCH_PATHS=["public"], ): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_extra_search_paths() self.assertEqual( str(ctx.exception), "Do not include 'public' on PGSCHEMAS_EXTRA_SEARCH_PATHS." ) with override_settings( TENANTS={"public": {}, "default": BASE_DEFAULT, "www": {}}, PGSCHEMAS_EXTRA_SEARCH_PATHS=["default"], ): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_extra_search_paths() self.assertEqual( str(ctx.exception), "Do not include 'default' on PGSCHEMAS_EXTRA_SEARCH_PATHS." ) with override_settings( TENANTS={"public": {}, "default": BASE_DEFAULT, "www": {}}, PGSCHEMAS_EXTRA_SEARCH_PATHS=["www"], ): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_extra_search_paths() self.assertEqual( str(ctx.exception), "Do not include 'www' on PGSCHEMAS_EXTRA_SEARCH_PATHS." ) with override_settings( TENANTS={ "public": {}, "default": {**BASE_DEFAULT, "CLONE_REFERENCE": "sample"}, "www": {}, }, PGSCHEMAS_EXTRA_SEARCH_PATHS=["sample"], ): with self.assertRaises(ImproperlyConfigured) as ctx: self.app_config._check_extra_search_paths() self.assertEqual( str(ctx.exception), "Do not include 'sample' on PGSCHEMAS_EXTRA_SEARCH_PATHS." ) @override_settings(TENANTS={"public": {}, "default": BASE_DEFAULT}) def test_all_good_here(self): self.app_config.ready() django-pgschemas-0.15.2/dpgs_sandbox/tests/test_bug_migrations_in_base_models.py000066400000000000000000000073621463633566500303240ustar00rootroot00000000000000import unittest import warnings from unittest.mock import patch from django.apps import apps from django.core import management from django.core.management.base import CommandError from django.db import models from django.db.utils import ProgrammingError from django.test import TestCase, TransactionTestCase, tag from django_pgschemas.checks import check_schema_names from django_pgschemas.models import TenantMixin from django_pgschemas.utils import get_tenant_model TenantModel = get_tenant_model() def patched_get_tenant_model(*args, **kwargs): class TenantModel(TenantMixin): dummy = models.TextField() class Meta: app_label = get_tenant_model()._meta.app_label return TenantModel @tag("bug") class MigrationZeroRoundTripTestCase(TransactionTestCase): """ Provoke a handled ProgrammingError by migrating models from empty database. """ def test_database_checks_with_zero_migrations(self): management.call_command("migrate", "shared_public", "zero", verbosity=0) # The goal is that the next line doesn't raise ProgrammingError check_schema_names(apps.get_app_config("django_pgschemas")) management.call_command("migrate", verbosity=0) @tag("bug") class UnappliedMigrationTestCase(TestCase): """ Provoke a handled ProgrammingError by running tenant command with pending model changes. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") tenant1 = TenantModel(schema_name="tenant1") tenant1.save(verbosity=0) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) @patch("django_pgschemas.management.commands.get_tenant_model", patched_get_tenant_model) def test_whowill_with_pending_migrations(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") # Avoid warnings about model being registered twice with self.assertRaises(CommandError) as ctx: management.call_command("whowill", all_schemas=True, verbosity=0) self.assertEqual( str(ctx.exception), "Error while attempting to retrieve dynamic schemas. " "Perhaps you need to migrate the 'public' schema first?", ) @tag("bug") class MigrateIgnoringExcludedSchemasTestCase(TestCase): @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") tenant1 = TenantModel(schema_name="tenant1") tenant1.save(verbosity=0) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_migrate_with_exclusions(self): # We first unapply a migration with fake so we can reapply it without fake # This should work without errors management.call_command( "migrate", "app_tenants", "0001_initial", fake=True, schemas=["tenant1"], verbosity=0 ) # We then migrate on all schemas except for tenant1, THIS IS THE CASE WE WANT TO TEST # This should work without errors management.call_command( "migrate", all_schemas=True, excluded_schemas=["tenant1"], verbosity=0 ) # If we try to global migrate now, we should get a ProgrammingError with self.assertRaises(ProgrammingError): management.call_command("migrate", all_schemas=True, verbosity=0) # We finally apply the migration again with fake # This should work without errors management.call_command("migrate", fake=True, all_schemas=True, verbosity=0) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_bug_url_cache.py000066400000000000000000000046001463633566500250420ustar00rootroot00000000000000import unittest from django.apps import apps from django.test import TestCase, tag from django_pgschemas.test.client import TenantClient from django_pgschemas.utils import get_domain_model, get_tenant_model TenantModel = get_tenant_model() DomainModel = get_domain_model() User = apps.get_model("shared_common.User") @tag("bug") class CachedTenantSubfolderBugTestCase(TestCase): """ Tests the behavior of subfolder routing regarding caching of URL patterns. This test checks that a bug reported in issue #8. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") tenant1 = TenantModel(schema_name="tenant1") tenant1.save(verbosity=0) tenant2 = TenantModel(schema_name="tenant2") tenant2.save(verbosity=0) DomainModel.objects.create( tenant=tenant1, domain="everyone.localhost", folder="tenant1", is_primary=True ) DomainModel.objects.create( tenant=tenant2, domain="everyone.localhost", folder="tenant2", is_primary=True ) with tenant1: cls.user1 = User.objects.create(email="user1@localhost", display_name="Admin") with tenant2: cls.user2 = User.objects.create(email="user2@localhost", display_name="Admin") cls.client1 = TenantClient(tenant1) cls.client2 = TenantClient(tenant2) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_bug_in_cached_urls_1(self): self.client1.get("/tenant2/profile/advanced/") # Provoke redirect to login on tenant2 buggy_response = self.client2.get( "/tenant1/profile/advanced/" ) # Provoke redirect to login on tenant1 self.assertEqual(buggy_response.status_code, 302) self.assertEqual(buggy_response.url, "/tenant1/login/?next=/tenant1/profile/advanced/") def test_bug_in_cached_urls_2(self): self.client1.get("/tenant1/profile/advanced/") # Provoke redirect to login on tenant1 buggy_response = self.client2.get( "/tenant2/profile/advanced/" ) # Provoke redirect to login on tenant2 self.assertEqual(buggy_response.status_code, 302) self.assertEqual(buggy_response.url, "/tenant2/login/?next=/tenant2/profile/advanced/") django-pgschemas-0.15.2/dpgs_sandbox/tests/test_cache.py000066400000000000000000000013611463633566500233240ustar00rootroot00000000000000import unittest from django.conf import settings from django_pgschemas.contrib.cache import make_key, reverse_key from django_pgschemas.test.cases import FastTenantTestCase class CacheHelperTestCase(FastTenantTestCase): @classmethod def setUpClass(cls): if "default" not in settings.TENANTS: raise unittest.SkipTest("Dynamic tenants are not being used") super().setUpClass() def test_make_key(self): key = make_key(key="foo", key_prefix="", version=1) tenant_prefix = key.split(":")[0] self.assertEqual(self.tenant.schema_name, tenant_prefix) def test_reverse_key(self): key = "foo" self.assertEqual(key, reverse_key(make_key(key=key, key_prefix="", version=1))) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_checks.py000066400000000000000000000136031463633566500235230ustar00rootroot00000000000000from django.apps import apps from django.core import checks from django.test import TestCase, override_settings from django_pgschemas.checks import ( check_other_apps, check_principal_apps, check_schema_names, get_user_app, ) from django_pgschemas.utils import get_tenant_model TenantModel = get_tenant_model() BASE_DEFAULT = {"TENANT_MODEL": "shared_public.Tenant", "DOMAIN_MODEL": "shared_public.DOMAIN"} class AppChecksTestCase(TestCase): """ Tests multiple checks regarding applications in tenants. """ def setUp(self): self.app_config = apps.get_app_config("django_pgschemas") def test_core_apps_location(self): with override_settings(TENANTS={"public": {"APPS": []}, "default": BASE_DEFAULT}): errors = check_principal_apps(self.app_config) expected_errors = [ checks.Error( "Your tenant app 'shared_public' must be on the 'public' schema.", id="pgschemas.W001", ), checks.Error( "Your domain app 'shared_public' must be on the 'public' schema.", id="pgschemas.W001", ), ] self.assertEqual(errors, expected_errors) with override_settings( TENANTS={ "public": {"APPS": ["shared_public"]}, "default": {**BASE_DEFAULT, "APPS": ["shared_public"]}, } ): errors = check_principal_apps(self.app_config) expected_errors = [ checks.Error( "Your tenant app 'shared_public' in TENANTS['default']['APPS'] " "must be on the 'public' schema only.", id="pgschemas.W001", ), checks.Error( "Your domain app 'shared_public' in TENANTS['default']['APPS'] " "must be on the 'public' schema only.", id="pgschemas.W001", ), ] self.assertEqual(errors, expected_errors) def test_contenttypes_location(self): with override_settings(TENANTS={"default": {"APPS": ["django.contrib.contenttypes"]}}): errors = check_other_apps(self.app_config) expected_errors = [ checks.Warning( "'django.contrib.contenttypes' in TENANTS['default']['APPS'] must be on 'public' schema only.", id="pgschemas.W002", ) ] self.assertEqual(errors, expected_errors) with override_settings( TENANTS={"default": {}, "www": {"APPS": ["django.contrib.contenttypes"]}} ): errors = check_other_apps(self.app_config) expected_errors = [ checks.Warning( "'django.contrib.contenttypes' in TENANTS['www']['APPS'] must be on 'public' schema only.", id="pgschemas.W002", ) ] self.assertEqual(errors, expected_errors) def test_user_session_location(self): user_app = get_user_app() with override_settings(TENANTS={"default": {"APPS": ["django.contrib.sessions"]}}): errors = check_other_apps(self.app_config) expected_errors = [ checks.Warning( "'%s' must be together with '%s' in TENANTS['%s']['APPS']." % (user_app, "django.contrib.sessions", "default"), id="pgschemas.W003", ) ] self.assertEqual(errors, expected_errors) with override_settings( TENANTS={ "default": {"APPS": ["shared_common"]}, "www": {"APPS": ["shared_common", "django.contrib.sessions"]}, } ): errors = check_other_apps(self.app_config) expected_errors = [ checks.Warning( "'%s' must be together with '%s' in TENANTS['%s']['APPS']." % ("django.contrib.sessions", user_app, "default"), id="pgschemas.W003", ) ] self.assertEqual(errors, expected_errors) class NameClashCheckTestCase(TestCase): """ Tests checks regarding name clash between static and dynamic tenants. """ def setUp(self): if TenantModel is None: self.skipTest("Dynamic tenants are not being used") self.app_config = apps.get_app_config("django_pgschemas") def test_name_clash(self): backup_create = TenantModel.auto_create_schema TenantModel.auto_create_schema = False # public TenantModel.objects.create(schema_name="public") errors = check_schema_names(self.app_config) expected_errors = [ checks.Critical( "Name clash found between static and dynamic tenants: {'public'}", id="pgschemas.W004", ), ] self.assertEqual(errors, expected_errors) TenantModel.objects.all().delete() # www TenantModel.objects.create(schema_name="www") errors = check_schema_names(self.app_config) expected_errors = [ checks.Critical( "Name clash found between static and dynamic tenants: {'www'}", id="pgschemas.W004" ), ] self.assertEqual(errors, expected_errors) TenantModel.objects.all().delete() # sample TenantModel.objects.create(schema_name="sample") errors = check_schema_names(self.app_config) expected_errors = [ checks.Critical( "Name clash found between static and dynamic tenants: {'sample'}", id="pgschemas.W004", ), ] self.assertEqual(errors, expected_errors) TenantModel.objects.all().delete() TenantModel.auto_create_schema = backup_create django-pgschemas-0.15.2/dpgs_sandbox/tests/test_dynamic_tenant_test_case.py000066400000000000000000000027031463633566500273110ustar00rootroot00000000000000import unittest from django.apps import apps from django.conf import settings from django_pgschemas.test.cases import DynamicTenantTestCase Catalog = apps.get_model("shared_public.Catalog") User = apps.get_model("shared_common.User") TenantData = apps.get_model("app_tenants.TenantData") if "default" in settings.TENANTS else None class TestDynamicTenantTestCase(DynamicTenantTestCase): """ Tests the behavior of the DynamicTenantTestCase. """ @classmethod def setUpClass(cls): if "default" not in settings.TENANTS: raise unittest.SkipTest("Dynamic tenants are not being used") super().setUpClass() @classmethod def setUpTestData(cls): cls.user = User.objects.create(email="admin@localhost", display_name="Admin") cls.catalog = Catalog.objects.create() def test_random_operation1(self): if TenantData is None: self.skipTest("Dynamic tenants are not being used") TenantData.objects.create(user=self.user, catalog=self.catalog) self.assertEqual(TenantData.objects.count(), 1) def test_random_operation2(self): Catalog.objects.create() Catalog.objects.create() self.assertEqual(Catalog.objects.count(), 3) def test_random_operation3(self): Catalog.objects.all().delete() self.assertEqual(Catalog.objects.count(), 0) def test_random_operation4(self): self.assertEqual(Catalog.objects.count(), 1) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_executors.py000066400000000000000000000025131463633566500243020ustar00rootroot00000000000000import unittest from django.core import management from django.db import connections from django.test import TransactionTestCase from django_pgschemas.utils import get_domain_model, get_tenant_model TenantModel = get_tenant_model() DomainModel = get_domain_model() class ExecutorsTestCase(TransactionTestCase): """ Tests the executors. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") for i in range(10): tenant = TenantModel(schema_name=f"tenant{i + 1}") tenant.save(verbosity=0) DomainModel.objects.create( tenant=tenant, domain=f"tenant{i + 1}.localhost", is_primary=True ) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_all_schemas_in_sequential(self): # If there are no errors, then this test passed management.call_command("migrate", all_schemas=True, parallel=False, verbosity=0) connections.close_all() def test_all_schemas_in_parallel(self): # If there are no errors, then this test passed management.call_command("migrate", all_schemas=True, parallel=True, verbosity=0) connections.close_all() django-pgschemas-0.15.2/dpgs_sandbox/tests/test_external_url_resolution.py000066400000000000000000000024001463633566500272430ustar00rootroot00000000000000from io import StringIO from django.core import management from django.test import TestCase from django.urls.exceptions import NoReverseMatch class ExternalURLResolutionTestCase(TestCase): """ Tests whether URLs are properly resolved when outside the request/response cycle. In this case, we use a special management command designed to try and reverse a given URL in a given schema. """ def test_urls_for_main_error(self): with self.assertRaises(NoReverseMatch): management.call_command("reverse_url", "entries", schemas=["www"]) def test_urls_for_main_success(self): with StringIO() as buffer: management.call_command("reverse_url", "register", schemas=["www"], stdout=buffer) buffer.seek(0) self.assertEqual(buffer.read().strip(), "/register/") def test_urls_for_blog_error(self): with self.assertRaises(NoReverseMatch): management.call_command("reverse_url", "register", schemas=["blog"]) def test_urls_for_blog_success(self): with StringIO() as buffer: management.call_command("reverse_url", "entries", schemas=["blog"], stdout=buffer) buffer.seek(0) self.assertEqual(buffer.read().strip(), "/entries/") django-pgschemas-0.15.2/dpgs_sandbox/tests/test_file_storage.py000066400000000000000000000137741463633566500247370ustar00rootroot00000000000000import os import shutil import tempfile from django.core.files.base import ContentFile from django.test import TestCase, override_settings from django_pgschemas.contrib.files import TenantFileSystemStorage from django_pgschemas.schema import SchemaDescriptor from django_pgschemas.utils import get_tenant_model TenantModel = get_tenant_model() class TenantFileSystemStorageTestCase(TestCase): """ Tests the tenant file system storage. """ @classmethod def setUpClass(cls): cls.temp_dir = tempfile.mkdtemp() cls.storage = TenantFileSystemStorage(location=cls.temp_dir, base_url="/base-url/") @classmethod def tearDownClass(cls): if TenantModel is not None: for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) shutil.rmtree(cls.temp_dir) def test_path_identifier_basic(self): with SchemaDescriptor.create(schema_name=""): self.assertEquals(self.storage.get_schema_path_identifier(), "") with SchemaDescriptor.create(schema_name="public"): self.assertEquals(self.storage.get_schema_path_identifier(), "public") with SchemaDescriptor.create(schema_name="blog"): self.assertEquals(self.storage.get_schema_path_identifier(), "blog") if TenantModel is not None: with TenantModel(schema_name="tenant"): self.assertEquals(self.storage.get_schema_path_identifier(), "tenant") def test_path_identifier_method_in_tenant(self): if TenantModel is not None: TenantModel.schema_pathname = lambda x: "custom-pathname" with TenantModel(schema_name="tenant"): self.assertEquals(self.storage.get_schema_path_identifier(), "custom-pathname") del TenantModel.schema_pathname def test_path_identifier_function_in_settings(self): if TenantModel is not None: with override_settings( PGSCHEMAS_PATHNAME_FUNCTION=lambda tenant: tenant.schema_name + "-custom-pathname" ): with TenantModel(schema_name="tenant"): self.assertEquals( self.storage.get_schema_path_identifier(), "tenant-custom-pathname" ) def test_base_location(self): with SchemaDescriptor.create(schema_name=""): self.assertEquals(self.storage.base_location, self.temp_dir + "/") with SchemaDescriptor.create(schema_name="public"): self.assertEquals(self.storage.base_location, self.temp_dir + "/public/") with SchemaDescriptor.create(schema_name="blog"): self.assertEquals(self.storage.base_location, self.temp_dir + "/blog/") if TenantModel is not None: with SchemaDescriptor.create(schema_name="tenant", folder="folder"): self.assertEquals(self.storage.base_location, self.temp_dir + "/tenant/") def test_base_url(self): with SchemaDescriptor.create(schema_name=""): self.assertEquals(self.storage.base_url, "/base-url/") with SchemaDescriptor.create(schema_name="public"): self.assertEquals(self.storage.base_url, "/base-url/public/") with SchemaDescriptor.create(schema_name="blog"): self.assertEquals(self.storage.base_url, "/base-url/blog/") if TenantModel is not None: with SchemaDescriptor.create(schema_name="tenant", folder="folder"): self.assertEquals(self.storage.base_url, "/base-url/") def test_file_path(self): if TenantModel is not None: self.assertFalse(self.storage.exists("test.file")) with SchemaDescriptor.create(schema_name="tenant1"): f = ContentFile("random content") f_name = self.storage.save("test.file", f) self.assertEqual( os.path.join(self.temp_dir, "tenant1", f_name), self.storage.path(f_name) ) self.storage.delete(f_name) self.assertFalse(self.storage.exists("test.file")) def test_file_save_with_path(self): if TenantModel is not None: self.assertFalse(self.storage.exists("path/to")) with SchemaDescriptor.create(schema_name="tenant1"): self.storage.save("path/to/test.file", ContentFile("file saved with path")) self.assertTrue(self.storage.exists("path/to")) with self.storage.open("path/to/test.file") as f: self.assertEqual(f.read(), b"file saved with path") self.assertTrue( os.path.exists( os.path.join(self.temp_dir, "tenant1", "path", "to", "test.file") ) ) self.storage.delete("path/to/test.file") self.assertFalse(self.storage.exists("test.file")) def test_file_url_simple(self): with SchemaDescriptor.create(schema_name=""): self.assertEqual(self.storage.url("test.file"), "/base-url/test.file") with SchemaDescriptor.create(schema_name="public"): self.assertEqual(self.storage.url("test.file"), "/base-url/public/test.file") if TenantModel is not None: with SchemaDescriptor.create(schema_name="tenant", folder="folder"): self.assertEqual(self.storage.url("test.file"), "/base-url/test.file") def test_file_url_complex(self): if TenantModel is not None: with SchemaDescriptor.create(schema_name="tenant"): self.assertEqual( self.storage.url(r"~!*()'@#$%^&*abc`+ =.file"), "/base-url/tenant/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file", ) self.assertEqual(self.storage.url("ab\0c"), "/base-url/tenant/ab%00c") self.assertEqual(self.storage.url("a/b\\c.file"), "/base-url/tenant/a/b/c.file") self.assertEqual(self.storage.url(""), "/base-url/tenant/") self.assertEqual(self.storage.url(None), "/base-url/tenant/") django-pgschemas-0.15.2/dpgs_sandbox/tests/test_log.py000066400000000000000000000012251463633566500230410ustar00rootroot00000000000000from django.test import TestCase from django_pgschemas.log import SchemaContextFilter from django_pgschemas.schema import SchemaDescriptor class SchemaContextFilterTestCase(TestCase): """ Tests SchemaContextFilter. """ def test_filter(self): class FakeRecord: pass record = FakeRecord() scf = SchemaContextFilter() with SchemaDescriptor.create( schema_name="some-tenant", domain_url="some-tenant.some-url.com" ): scf.filter(record) self.assertEqual(record.schema_name, "some-tenant") self.assertEqual(record.domain_url, "some-tenant.some-url.com") django-pgschemas-0.15.2/dpgs_sandbox/tests/test_middleware.py000066400000000000000000000152201463633566500243750ustar00rootroot00000000000000import unittest from importlib import import_module from django.http import Http404 from django.test import RequestFactory, TestCase from django_pgschemas.middleware import TenantMiddleware from django_pgschemas.utils import get_domain_model, get_tenant_model TenantModel = get_tenant_model() DomainModel = get_domain_model() class TenantMiddlewareTestCase(TestCase): """ Tests TenantMiddleware. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") super().setUpClass() cls.factory = RequestFactory() tenant1 = TenantModel(schema_name="tenant1") tenant2 = TenantModel(schema_name="tenant2") tenant1.auto_create_schema = tenant2.auto_create_schema = False tenant1.save() tenant2.save() DomainModel(domain="tenant1.localhost", tenant=tenant1).save() DomainModel(domain="everyone.localhost", folder="tenant1", tenant=tenant1).save() DomainModel(domain="tenant2.localhost", tenant=tenant2).save() DomainModel(domain="everyone.localhost", folder="tenant2", tenant=tenant2).save() DomainModel(domain="special.localhost", folder="tenant2", tenant=tenant2).save() def middleware(self, request): def fake_get_response(request): return request return TenantMiddleware(fake_get_response)(request) def test_static_tenants_www(self): request = self.factory.get("/", HTTP_HOST="www.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "www") self.assertEqual(modified_request.tenant.domain_url, "localhost") self.assertEqual(modified_request.tenant.folder, None) self.assertEqual(modified_request.urlconf, "app_main.urls") def test_static_tenants_blog(self): request = self.factory.get("/some/random/url/", HTTP_HOST="blog.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "blog") self.assertEqual(modified_request.tenant.domain_url, "blog.localhost") self.assertEqual(modified_request.tenant.folder, None) self.assertEqual(modified_request.urlconf, "app_blog.urls") def test_dynamic_tenants_tenant1_domain(self): request = self.factory.get("/tenant2/", HTTP_HOST="tenant1.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "tenant1") self.assertEqual(modified_request.tenant.domain_url, "tenant1.localhost") self.assertEqual(modified_request.tenant.folder, None) self.assertEqual(modified_request.urlconf, "app_tenants.urls") def test_dynamic_tenants_tenant2_domain(self): request = self.factory.get("/tenant1/", HTTP_HOST="tenant2.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "tenant2") self.assertEqual(modified_request.tenant.domain_url, "tenant2.localhost") self.assertEqual(modified_request.tenant.folder, None) self.assertEqual(modified_request.urlconf, "app_tenants.urls") def test_dynamic_tenants_tenant1_folder(self): request = self.factory.get("/tenant1/some/random/url/", HTTP_HOST="everyone.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "tenant1") self.assertEqual(modified_request.tenant.domain_url, "everyone.localhost") self.assertEqual(modified_request.tenant.folder, "tenant1") self.assertEqual(modified_request.urlconf, "app_tenants.urls_dynamically_tenant_prefixed") def test_dynamic_tenants_tenant2_folder(self): request = self.factory.get("/tenant2/some/random/url/", HTTP_HOST="everyone.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "tenant2") self.assertEqual(modified_request.tenant.domain_url, "everyone.localhost") self.assertEqual(modified_request.tenant.folder, "tenant2") self.assertEqual(modified_request.urlconf, "app_tenants.urls_dynamically_tenant_prefixed") def test_dynamic_tenants_tenant1_folder_short(self): request = self.factory.get("/tenant1/", HTTP_HOST="everyone.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "tenant1") self.assertEqual(modified_request.tenant.domain_url, "everyone.localhost") self.assertEqual(modified_request.tenant.folder, "tenant1") self.assertEqual(modified_request.urlconf, "app_tenants.urls_dynamically_tenant_prefixed") def test_dynamic_module_can_be_imported(self): request = self.factory.get("/tenant1/", HTTP_HOST="everyone.localhost") modified_request = self.middleware(request) import_module(modified_request.urlconf) def test_wrong_subdomain(self): request = self.factory.get("/some/random/url/", HTTP_HOST="bad-domain.localhost") with self.assertRaises(Http404): self.middleware(request) def test_no_folder(self): request = self.factory.get("/", HTTP_HOST="special.localhost") with self.assertRaises(Http404): self.middleware(request) def test_fallback_domain_root(self): request = self.factory.get("/", HTTP_HOST="everyone.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "www") self.assertEqual(modified_request.tenant.domain_url, "everyone.localhost") self.assertEqual(modified_request.tenant.folder, None) self.assertEqual(modified_request.urlconf, "app_main.urls") def test_fallback_domain_folder(self): request = self.factory.get("/some/random/url/", HTTP_HOST="everyone.localhost") modified_request = self.middleware(request) self.assertTrue(modified_request.tenant) self.assertEqual(modified_request.tenant.schema_name, "www") self.assertEqual(modified_request.tenant.domain_url, "everyone.localhost") self.assertEqual(modified_request.tenant.folder, None) self.assertEqual(modified_request.urlconf, "app_main.urls") django-pgschemas-0.15.2/dpgs_sandbox/tests/test_middleware_redirection.py000066400000000000000000000072111463633566500267650ustar00rootroot00000000000000import unittest from django.test import RequestFactory, TestCase from django_pgschemas.middleware import TenantMiddleware from django_pgschemas.utils import get_domain_model, get_tenant_model TenantModel = get_tenant_model() DomainModel = get_domain_model() class TenantMiddlewareRedirectionTestCase(TestCase): """ Tests TenantMiddlewareRedirection. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") super().setUpClass() @classmethod def setUpTestData(cls): cls.factory = RequestFactory() tenant1 = TenantModel(schema_name="tenant1") tenant2 = TenantModel(schema_name="tenant2") tenant1.auto_create_schema = tenant2.auto_create_schema = False tenant1.save() tenant2.save() DomainModel(domain="tenant1.localhost", tenant=tenant1).save() DomainModel( domain="tenant1redirect.localhost", tenant=tenant1, is_primary=False, redirect_to_primary=True, ).save() DomainModel( domain="everyone.localhost", folder="tenant1redirect", tenant=tenant1, is_primary=False, redirect_to_primary=True, ).save() DomainModel(domain="everyone.localhost", folder="tenant2", tenant=tenant2).save() DomainModel( domain="tenant2redirect.localhost", tenant=tenant2, is_primary=False, redirect_to_primary=True, ).save() DomainModel( domain="everyone.localhost", folder="tenant2redirect", tenant=tenant2, is_primary=False, redirect_to_primary=True, ).save() def middleware(self, request): def fake_get_response(request): return request return TenantMiddleware(fake_get_response)(request) def test_domain_redirect_to_primary_domain(self): request = self.factory.get("/some/random/url/", HTTP_HOST="tenant1redirect.localhost") response = self.middleware(request) self.assertEqual(response.status_code, 301) self.assertEqual(response.url, "//tenant1.localhost/some/random/url/") self.assertEqual(response["Location"], "//tenant1.localhost/some/random/url/") def test_folder_redirect_to_primary_domain(self): request = self.factory.get( "/tenant1redirect/some/random/url/", HTTP_HOST="everyone.localhost" ) response = self.middleware(request) self.assertEqual(response.status_code, 301) self.assertEqual(response.url, "//tenant1.localhost/some/random/url/") self.assertEqual(response["Location"], "//tenant1.localhost/some/random/url/") def test_domain_redirect_to_primary_folder(self): request = self.factory.get("/some/random/url/", HTTP_HOST="tenant2redirect.localhost") response = self.middleware(request) self.assertEqual(response.status_code, 301) self.assertEqual(response.url, "//everyone.localhost/tenant2/some/random/url/") self.assertEqual(response["Location"], "//everyone.localhost/tenant2/some/random/url/") def test_folder_redirect_to_primary_folder(self): request = self.factory.get( "/tenant2redirect/some/random/url/", HTTP_HOST="everyone.localhost" ) response = self.middleware(request) self.assertEqual(response.status_code, 301) self.assertEqual(response.url, "//everyone.localhost/tenant2/some/random/url/") self.assertEqual(response["Location"], "//everyone.localhost/tenant2/some/random/url/") django-pgschemas-0.15.2/dpgs_sandbox/tests/test_schema_creation_commands.py000066400000000000000000000063671463633566500273010ustar00rootroot00000000000000import unittest from io import StringIO from unittest.mock import patch from django.core.management import call_command from django.core.management.base import CommandError from django.test import TestCase, TransactionTestCase from django_pgschemas import utils TenantModel = utils.get_tenant_model() DomainModel = utils.get_domain_model() class SchemaCreationCommandsTestCase(TestCase): """ Tests that the schema creation commands do what they are expected to do. """ def setUp(self): if TenantModel is None: self.skipTest("Dynamic tenants are not being used") def test_cloneschema(self): "Tests 'cloneschema' command" utils._create_clone_schema_function() self.assertFalse(utils.schema_exists("cloned")) call_command("cloneschema", "sample", "cloned", verbosity=0) # All good self.assertTrue(utils.schema_exists("cloned")) with self.assertRaises(CommandError): # Existing destination call_command("cloneschema", "sample", "cloned", verbosity=0) with self.assertRaises(CommandError): # Not existing source call_command("cloneschema", "nonexisting", "newschema", verbosity=0) utils.drop_schema("cloned") def test_createrefschema(self): "Tests 'createrefschema' command" utils.drop_schema("cloned") call_command("createrefschema", verbosity=0) # All good self.assertTrue(utils.schema_exists("sample")) utils.drop_schema("cloned") call_command("createrefschema", recreate=True, verbosity=0) # All good too self.assertTrue(utils.schema_exists("sample")) utils.drop_schema("cloned") call_command("createrefschema", recreate=True, verbosity=0) # All good too self.assertTrue(utils.schema_exists("sample")) class InteractiveCloneSchemaTestCase(TransactionTestCase): """ Tests the interactive behaviod of the cloneschema command. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") tenant = TenantModel(schema_name="tenant1") tenant.save(verbosity=0) DomainModel.objects.create(tenant=tenant, domain="tenant1.localhost", is_primary=True) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_interactive_cloneschema(self): answer_provider = ( n for n in [ "y", # Would you like to create a database entry? "", # Domain name, simulated wrong answer "tenant2.localhost", # Domain name, good answer ] ) def patched_input(*args, **kwargs): return next(answer_provider) with patch("builtins.input", patched_input): with StringIO() as stdout: with StringIO() as stderr: call_command( "cloneschema", "tenant1", "tenant2", verbosity=1, stdout=stdout, stderr=stderr, ) self.assertTrue(utils.schema_exists("tenant2")) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_signals.py000066400000000000000000000032021463633566500237150ustar00rootroot00000000000000from django.test import TestCase from django_pgschemas.schema import SchemaDescriptor, activate from django_pgschemas.signals import schema_activate from django_pgschemas.utils import get_tenant_model, schema_exists TenantModel = get_tenant_model() class SignalTestCase(TestCase): """ Tests signals. """ def test_schema_activate(self): response = {} params = { "schema_name": "test", "domain_url": "localhost", "folder": "folder", } def receiver(sender, schema, **kwargs): response["value"] = schema schema_activate.connect(receiver) activate(SchemaDescriptor.create(**params)) schema_activate.disconnect(receiver) for key, value in params.items(): self.assertEqual(value, getattr(response["value"], key)) class TenantDeleteCallbackTestCase(TestCase): """ Tests tenant_delete_callback. """ def setUp(self): if TenantModel is None: self.skipTest("Dynamic tenants are not being used") def test_tenant_delete_callback(self): backup_create, backup_drop = TenantModel.auto_create_schema, TenantModel.auto_drop_schema TenantModel.auto_create_schema = False TenantModel.auto_drop_schema = True tenant = TenantModel(schema_name="tenant1") tenant.save() tenant.create_schema(sync_schema=False) self.assertTrue(schema_exists("tenant1")) TenantModel.objects.all().delete() self.assertFalse(schema_exists("tenant1")) TenantModel.auto_create_schema, TenantModel.auto_drop_schema = backup_create, backup_drop django-pgschemas-0.15.2/dpgs_sandbox/tests/test_static_tenant_test_case.py000066400000000000000000000051541463633566500271570ustar00rootroot00000000000000from django.apps import apps from django.test import SimpleTestCase from django_pgschemas.test.cases import StaticTenantTestCase from django_pgschemas.utils import get_clone_reference BlogEntry = apps.get_model("app_blog.BlogEntry") User = apps.get_model("shared_common.User") class TestSetUpStaticTenantTestCase(SimpleTestCase): """ Tests the set up behavior of the StaticTenantTestCase. """ def assert_expected_error(self, klass): with self.assertRaises(AssertionError) as ctx: klass.setUpClass() self.assertEqual( str(ctx.exception), f"{klass.__name__}.schema_name must be defined to a valid static tenant", ) def test_set_up_with_empty(self): class DummyStaticTenantTestCase(StaticTenantTestCase): pass self.assert_expected_error(DummyStaticTenantTestCase) def test_set_up_with_public(self): class DummyStaticTenantTestCase(StaticTenantTestCase): schema_name = "public" self.assert_expected_error(DummyStaticTenantTestCase) def test_set_up_with_default(self): class DummyStaticTenantTestCase(StaticTenantTestCase): schema_name = "default" self.assert_expected_error(DummyStaticTenantTestCase) def test_set_up_with_clone_reference(self): class DummyStaticTenantTestCase(StaticTenantTestCase): schema_name = get_clone_reference() self.assert_expected_error(DummyStaticTenantTestCase) def test_set_up_with_non_existing(self): class DummyStaticTenantTestCase(StaticTenantTestCase): schema_name = "nonstatictenant" self.assert_expected_error(DummyStaticTenantTestCase) class TestStaticTenantTestCase(StaticTenantTestCase): """ Tests the behavior of the StaticTenantTestCase. """ schema_name = "blog" @classmethod def setUpTestData(cls): cls.user = User.objects.create(email="admin@localhost", display_name="Admin") BlogEntry.objects.create(user=cls.user) def test_correct_set_up(self): self.assertTrue(self.tenant) self.assertEqual(self.tenant.schema_name, "blog") self.assertEqual(self.tenant.domain_url, "blog.localhost") def test_random_operation1(self): BlogEntry.objects.create(user=self.user) BlogEntry.objects.create(user=self.user) self.assertEqual(BlogEntry.objects.count(), 3) def test_random_operation2(self): BlogEntry.objects.all().delete() self.assertEqual(BlogEntry.objects.count(), 0) def test_random_operation3(self): self.assertEqual(BlogEntry.objects.count(), 1) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_tenant_commands.py000066400000000000000000000163641463633566500254440ustar00rootroot00000000000000import unittest from unittest.mock import patch from django.core import management from django.core.management.base import CommandError from django.test import TestCase from django_pgschemas.management.commands import CommandScope from django_pgschemas.management.commands.whowill import Command as WhoWillCommand from django_pgschemas.utils import get_domain_model, get_tenant_model TenantModel = get_tenant_model() DomainModel = get_domain_model() class TenantCommandsTestCase(TestCase): """ Tests the functionality of tenant commands. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") tenant1 = TenantModel(schema_name="tenant1") tenant1.save(verbosity=0) DomainModel.objects.create(tenant=tenant1, domain="tenant1.localhost", is_primary=True) DomainModel.objects.create( tenant=tenant1, domain="everyone.localhost", folder="tenant1", is_primary=False ) tenant2 = TenantModel(schema_name="tenant2") tenant2.save(verbosity=0) DomainModel.objects.create(tenant=tenant2, domain="tenant2.localhost", is_primary=True) DomainModel.objects.create( tenant=tenant2, domain="everyone.localhost", folder="tenant2", is_primary=False ) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_no_schema_provided(self): command = WhoWillCommand() with self.assertRaises(CommandError) as ctx: management.call_command(command, interactive=False, verbosity=0) self.assertEqual(str(ctx.exception), "No schema provided") def test_no_all_schemas_allowed(self): command = WhoWillCommand() command.allow_wildcards = False with self.assertRaises(TypeError): management.call_command(command, all_schemas=True, verbosity=0) def test_no_static_schemas_allowed(self): command = WhoWillCommand() command.scope = CommandScope.DYNAMIC with self.assertRaises(CommandError) as ctx: management.call_command(command, static_schemas=True, verbosity=0) self.assertEqual(str(ctx.exception), "Including static schemas is NOT allowed") command = WhoWillCommand() command.allow_wildcards = False with self.assertRaises(TypeError): management.call_command(command, static_schemas=True, verbosity=0) def test_no_dynamic_schemas_allowed(self): command = WhoWillCommand() command.scope = CommandScope.STATIC with self.assertRaises(CommandError) as ctx: management.call_command(command, dynamic_schemas=True, verbosity=0) self.assertEqual(str(ctx.exception), "Including dynamic schemas is NOT allowed") command = WhoWillCommand() command.allow_wildcards = False with self.assertRaises(TypeError): management.call_command(command, dynamic_schemas=True, verbosity=0) def test_no_tenant_like_schemas_allowed(self): command = WhoWillCommand() command.scope = CommandScope.STATIC with self.assertRaises(CommandError) as ctx: management.call_command(command, tenant_schemas=True, verbosity=0) self.assertEqual(str(ctx.exception), "Including tenant-like schemas is NOT allowed") command = WhoWillCommand() command.allow_wildcards = False with self.assertRaises(TypeError): management.call_command(command, tenant_schemas=True, verbosity=0) def test_nonexisting_schema(self): with self.assertRaises(CommandError) as ctx: management.call_command("whowill", schemas=["unknown"], verbosity=0) self.assertEqual(str(ctx.exception), "No schema found for 'unknown'") def test_ambiguous_schema(self): with self.assertRaises(CommandError) as ctx: management.call_command("whowill", schemas=["tenant"], verbosity=0) self.assertEqual( str(ctx.exception), "More than one tenant found for schema 'tenant' by domain, please, narrow down the filter", ) def test_specific_schemas(self): command = WhoWillCommand() command.specific_schemas = ["blog"] with self.assertRaises(CommandError) as ctx: management.call_command(command, schemas=["www"], verbosity=0) self.assertEqual(str(ctx.exception), "This command can only run in ['blog']") def test_nonexisting_schema_excluded(self): with self.assertRaises(CommandError) as ctx: management.call_command( "whowill", all_schemas=True, excluded_schemas=["unknown"], verbosity=0 ) self.assertEqual(str(ctx.exception), "No schema found for 'unknown' (excluded)") def test_ambiguous_schema_excluded(self): with self.assertRaises(CommandError) as ctx: management.call_command( "whowill", all_schemas=True, excluded_schemas=["tenant"], verbosity=0 ) self.assertEqual( str(ctx.exception), "More than one tenant found for schema 'tenant' by domain (excluded), please, narrow down the filter", ) def test_existing_schema_excluded_ok(self): management.call_command( "whowill", all_schemas=True, excluded_schemas=["tenant1"], verbosity=0 ) def test_interactive_ok(self): def patched_input(*args, **kwargs): return "blog" with patch("builtins.input", patched_input): management.call_command("whowill", schemas=[], verbosity=0) def test_interactive_nonexisting(self): def patched_input(*args, **kwargs): return "unknown" with patch("builtins.input", patched_input): with self.assertRaises(CommandError) as ctx: management.call_command("whowill", schemas=[], verbosity=0) self.assertEqual(str(ctx.exception), "No schema found for 'unknown'") def test_mixed_ok(self): management.call_command("whowill", all_schemas=True, verbosity=0) management.call_command("whowill", static_schemas=True, verbosity=0) management.call_command("whowill", dynamic_schemas=True, verbosity=0) management.call_command("whowill", tenant_schemas=True, verbosity=0) management.call_command("whowill", schemas=["public", "sample"], verbosity=0) management.call_command( "whowill", all_schemas=True, static_schemas=True, dynamic_schemas=True, tenant_schemas=True, schemas=["public", "sample"], verbosity=0, ) management.call_command( "whowill", all_schemas=True, excluded_schemas=["public", "sample"], verbosity=0 ) management.call_command("whowill", schemas=["everyone.localhost/tenant1"], verbosity=0) management.call_command("whowill", schemas=["tenant1"], verbosity=0) management.call_command( "whowill", all_schemas=True, excluded_schemas=["everyone.localhost/tenant1"], verbosity=0, ) management.call_command( "whowill", all_schemas=True, excluded_schemas=["tenant1"], verbosity=0 ) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_tenants.py000066400000000000000000000307731463633566500237460ustar00rootroot00000000000000from contextlib import contextmanager from django.apps import apps from django.conf import settings from django.contrib.auth import authenticate from django.core.management import call_command from django.db import ProgrammingError, transaction from django.test import TestCase from django_pgschemas.schema import SchemaDescriptor, activate_public from django_pgschemas.signals import dynamic_tenant_post_sync from django_pgschemas.utils import drop_schema, get_domain_model, get_tenant_model, schema_exists TenantModel = get_tenant_model() DomainModel = get_domain_model() BlogEntry = apps.get_model("app_blog.BlogEntry") Catalog = apps.get_model("shared_public.Catalog") MainData = apps.get_model("app_main.MainData") User = apps.get_model("shared_common.User") TenantData = apps.get_model("app_tenants.TenantData") if "default" in settings.TENANTS else None class ControlledException(Exception): pass class TenantAutomaticTestCase(TestCase): """ Tests tenant automatic operations. """ def setUp(self): if TenantModel is None: self.skipTest("Dynamic tenants are not being used") def test_new_creation_deletion(self): "Tests automatic creation/deletion for new tenant's save/delete" self.assertFalse(schema_exists("tenant1")) tenant = TenantModel(schema_name="tenant1") tenant.save(verbosity=0) self.assertTrue(schema_exists("tenant1")) # Self-cleanup tenant.delete(force_drop=True) self.assertFalse(schema_exists("tenant1")) def test_existing_creation(self): "Tests automatic creation for existing tenant's save" self.assertFalse(schema_exists("tenant1")) tenant = TenantModel(schema_name="tenant1") tenant.auto_create_schema = False tenant.save(verbosity=0) self.assertFalse(schema_exists("tenant1")) tenant.auto_create_schema = True tenant.save(verbosity=0) self.assertTrue(schema_exists("tenant1")) # Self-cleanup tenant.delete(force_drop=True) self.assertFalse(schema_exists("tenant1")) def test_new_aborted_creation(self): "Tests recovery on automatic creation for new tenant's save" def signal_receiver(*args, **kwargs): raise ControlledException self.assertFalse(schema_exists("tenant1")) tenant = TenantModel(schema_name="tenant1") dynamic_tenant_post_sync.connect(signal_receiver) with self.assertRaises(ControlledException): tenant.save(verbosity=0) self.assertFalse(schema_exists("tenant1")) self.assertEqual(0, TenantModel.objects.count()) dynamic_tenant_post_sync.disconnect(signal_receiver) def test_existing_aborted_creation(self): "Tests recovery on automatic creation for new tenant's save" def signal_receiver(*args, **kwargs): raise ControlledException self.assertFalse(schema_exists("tenant1")) tenant = TenantModel(schema_name="tenant1") tenant.auto_create_schema = False tenant.save(verbosity=0) tenant.auto_create_schema = True dynamic_tenant_post_sync.connect(signal_receiver) with self.assertRaises(ControlledException): tenant.save(verbosity=0) self.assertFalse(schema_exists("tenant1")) self.assertEqual(1, TenantModel.objects.count()) dynamic_tenant_post_sync.disconnect(signal_receiver) # Self-cleanup tenant.delete(force_drop=True) self.assertEqual(0, TenantModel.objects.count()) class TenantTestCase(TestCase): """ Tests cross tenant operations. """ @classmethod def setUpClass(cls): if TenantModel is not None: tenant = TenantModel(schema_name="tenant") tenant.save(verbosity=0) catalog = Catalog.objects.create() Catalog.objects.create() with SchemaDescriptor.create(schema_name="www"): user = User.objects.create(email="main@localhost", display_name="Main User") user.set_password("weakpassword") user.save() MainData.objects.create() with SchemaDescriptor.create(schema_name="blog"): user = User.objects.create(email="blog@localhost", display_name="Blog User") user.set_password("weakpassword") user.save() BlogEntry.objects.create(user=user) if TenantModel is not None: with TenantModel.objects.first(): user = User.objects.create(email="tenant@localhost", display_name="Tenant User") user.set_password("weakpassword") user.save() TenantData.objects.create(user=user, catalog=catalog) activate_public() super().setUpClass() @classmethod def tearDownClass(cls): super().tearDownClass() for key in settings.TENANTS: if key == "default": continue drop_schema(key) if TenantModel is not None: drop_schema("tenant") call_command("migrateschema", verbosity=0) @contextmanager def assertRaises(self, *args, **kwargs): """ Since we are expecting database errors, we must use savepoints in order to make sure multiple errors can be caught in the same test case. """ sid = transaction.savepoint() with super().assertRaises(*args, **kwargs): yield transaction.savepoint_rollback(sid) def test_synced_public_apps(self): # Expected synced apps self.assertEqual(2, Catalog.objects.count()) # Not expected synced apps with self.assertRaises(ProgrammingError): list(User.objects.all()) with self.assertRaises(ProgrammingError): list(MainData.objects.all()) with self.assertRaises(ProgrammingError): list(BlogEntry.objects.all()) if TenantData is not None: with self.assertRaises(ProgrammingError): list(TenantData.objects.all()) def test_synced_main_apps(self): with SchemaDescriptor.create(schema_name="www"): # Expected synced apps self.assertEqual(2, Catalog.objects.count()) self.assertEqual(1, MainData.objects.count()) self.assertEqual(1, User.objects.count()) # Not expected synced apps with self.assertRaises(ProgrammingError): list(BlogEntry.objects.all()) if TenantData is not None: with self.assertRaises(ProgrammingError): list(TenantData.objects.all()) def test_synced_blog_apps(self): with SchemaDescriptor.create(schema_name="blog"): # Expected synced apps self.assertEqual(2, Catalog.objects.count()) self.assertEqual(1, BlogEntry.objects.count()) self.assertEqual(1, User.objects.count()) # Direct and reverse relations self.assertEqual(User.objects.first(), BlogEntry.objects.first().user) self.assertEqual(User.objects.first().blogs.first(), BlogEntry.objects.first()) # Not expected synced apps with self.assertRaises(ProgrammingError): list(MainData.objects.all()) if TenantData is not None: with self.assertRaises(ProgrammingError): list(TenantData.objects.all()) def test_synced_tenant_apps(self): if TenantModel is None: self.skipTest("Dynamic tenants are not being used") with TenantModel.objects.first(): # Expected synced apps self.assertEqual(2, Catalog.objects.count()) self.assertEqual(1, TenantData.objects.count()) self.assertEqual(1, User.objects.count()) # Direct and reverse relations self.assertEqual(User.objects.first(), TenantData.objects.first().user) self.assertEqual( User.objects.first().tenant_objects.first(), TenantData.objects.first() ) self.assertEqual(Catalog.objects.first(), TenantData.objects.first().catalog) self.assertEqual( Catalog.objects.first().tenant_objects.first(), TenantData.objects.first() ) # Not expected synced apps with self.assertRaises(ProgrammingError): list(MainData.objects.all()) with self.assertRaises(ProgrammingError): list(BlogEntry.objects.all()) def test_cross_authentication(self): with SchemaDescriptor.create(schema_name="www"): self.assertTrue(authenticate(email="main@localhost", password="weakpassword")) # good self.assertFalse(authenticate(email="blog@localhost", password="weakpassword")) # bad self.assertFalse(authenticate(email="tenant@localhost", password="weakpassword")) # bad with SchemaDescriptor.create(schema_name="blog"): self.assertTrue(authenticate(email="blog@localhost", password="weakpassword")) # good self.assertFalse(authenticate(email="main@localhost", password="weakpassword")) # bad self.assertFalse(authenticate(email="tenant@localhost", password="weakpassword")) # bad if TenantModel is not None: with TenantModel.objects.first(): self.assertTrue( authenticate(email="tenant@localhost", password="weakpassword") ) # good self.assertFalse( authenticate(email="main@localhost", password="weakpassword") ) # bad self.assertFalse( authenticate(email="blog@localhost", password="weakpassword") ) # bad # Switching to public schema activate_public() with self.assertRaises(ProgrammingError): authenticate(email="unexisting@localhost", password="unexisting") # unexisting, error class DomainTestCase(TestCase): """ Tests domain operations. """ def setUp(self): if TenantModel is None: self.skipTest("Dynamic tenants are not being used") def test_primary_domain(self): tenant1 = TenantModel(schema_name="tenant1") tenant2 = TenantModel(schema_name="tenant2") tenant1.save(verbosity=0) tenant2.save(verbosity=0) domain1 = DomainModel.objects.create(domain="tenant1.localhost", tenant=tenant1) DomainModel.objects.create( domain="tenant1-other.localhost", tenant=tenant1, is_primary=False ) self.assertEqual(tenant1.get_primary_domain(), domain1) self.assertEqual(tenant2.get_primary_domain(), None) for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_domain_string(self): tenant = TenantModel(schema_name="tenant") tenant.save(verbosity=0) domain1 = DomainModel.objects.create(domain="tenant.localhost", tenant=tenant) domain2 = DomainModel.objects.create( domain="everyone.localhost", folder="tenant", tenant=tenant ) self.assertEqual(str(domain1), "tenant.localhost") self.assertEqual(str(domain2), "everyone.localhost/tenant") tenant.delete(force_drop=True) def test_domain_absolute_url(self): tenant = TenantModel(schema_name="tenant") tenant.save(verbosity=0) subdomain = DomainModel.objects.create(domain="tenant.localhost", tenant=tenant) subfolder = DomainModel.objects.create( domain="everyone.localhost", folder="tenant", tenant=tenant ) self.assertEqual(subdomain.absolute_url(""), "//tenant.localhost/") self.assertEqual(subdomain.absolute_url("/some/path/"), "//tenant.localhost/some/path/") self.assertEqual(subdomain.absolute_url("some/path"), "//tenant.localhost/some/path") self.assertEqual(subfolder.absolute_url(""), "//everyone.localhost/tenant/") self.assertEqual( subfolder.absolute_url("/some/path/"), "//everyone.localhost/tenant/some/path/" ) self.assertEqual( subfolder.absolute_url("some/path"), "//everyone.localhost/tenant/some/path" ) tenant.delete(force_drop=True) def test_domain_redirect_save(self): tenant = TenantModel(schema_name="tenant") tenant.save(verbosity=0) domain = DomainModel.objects.create( domain="tenant.localhost", tenant=tenant, redirect_to_primary=True ) self.assertTrue(domain.is_primary) self.assertFalse(domain.redirect_to_primary) tenant.delete(force_drop=True) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_test_client.py000066400000000000000000000103741463633566500246020ustar00rootroot00000000000000import unittest from django.test import TestCase from django_pgschemas.schema import SchemaDescriptor from django_pgschemas.test.client import TenantClient, TenantRequestFactory from django_pgschemas.utils import get_domain_model, get_tenant_model TenantModel = get_tenant_model() DomainModel = get_domain_model() class TenantRequestFactoryTestCase(TestCase): """ Test the behavior of the TenantRequestFactory. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") tenant = TenantModel(schema_name="tenant1") tenant.save(verbosity=0) DomainModel.objects.create(tenant=tenant, domain="tenant1.localhost", is_primary=True) cls.request = TenantRequestFactory(tenant) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_get(self): request = self.request.get("/not/important/") self.assertEqual( request.build_absolute_uri("/whatever/"), "http://tenant1.localhost/whatever/" ) def test_post(self): request = self.request.post("/not/important/") self.assertEqual( request.build_absolute_uri("/whatever/"), "http://tenant1.localhost/whatever/" ) def test_put(self): request = self.request.put("/not/important/") self.assertEqual( request.build_absolute_uri("/whatever/"), "http://tenant1.localhost/whatever/" ) def test_patch(self): request = self.request.patch("/not/important/") self.assertEqual( request.build_absolute_uri("/whatever/"), "http://tenant1.localhost/whatever/" ) def test_delete(self): request = self.request.delete("/not/important/") self.assertEqual( request.build_absolute_uri("/whatever/"), "http://tenant1.localhost/whatever/" ) class DynamicTenantClientTestCase(TestCase): """ Test the behavior of the TenantClient with a dynamic tenant. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") tenant = TenantModel(schema_name="tenant1") tenant.save(verbosity=0) DomainModel.objects.create(tenant=tenant, domain="tenant1.localhost", is_primary=True) cls.tenant_client = TenantClient(tenant) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_get(self): response = self.tenant_client.get("/profile/") self.assertEqual(response.status_code, 200) def test_post(self): response = self.tenant_client.post("/profile/") self.assertEqual(response.status_code, 200) def test_put(self): response = self.tenant_client.put("/profile/") self.assertEqual(response.status_code, 200) def test_patch(self): response = self.tenant_client.patch("/profile/") self.assertEqual(response.status_code, 200) def test_delete(self): response = self.tenant_client.delete("/profile/") self.assertEqual(response.status_code, 200) class StaticTenantClientTestCase(TestCase): """ Test the behavior of the TenantClient with a static tenant. """ @classmethod def setUpClass(cls): tenant = SchemaDescriptor.create(schema_name="blog", domain_url="blog.localhost") cls.tenant_client = TenantClient(tenant) @classmethod def tearDownClass(cls): pass def test_get(self): response = self.tenant_client.get("/entries/") self.assertEqual(response.status_code, 200) def test_post(self): response = self.tenant_client.post("/entries/") self.assertEqual(response.status_code, 200) def test_put(self): response = self.tenant_client.put("/entries/") self.assertEqual(response.status_code, 200) def test_patch(self): response = self.tenant_client.patch("/entries/") self.assertEqual(response.status_code, 200) def test_delete(self): response = self.tenant_client.delete("/entries/") self.assertEqual(response.status_code, 200) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_urlresolvers.py000066400000000000000000000143241463633566500250330ustar00rootroot00000000000000import sys import unittest from importlib import import_module from django.test import RequestFactory, TestCase from django.urls import reverse from django_pgschemas.middleware import TenantMiddleware from django_pgschemas.schema import SchemaDescriptor, activate_public from django_pgschemas.urlresolvers import TenantPrefixPattern, get_urlconf_from_schema from django_pgschemas.utils import get_domain_model, get_tenant_model TenantModel = get_tenant_model() DomainModel = get_domain_model() class URLResolversTestCase(TestCase): """ Tests TenantPrefixPattern and prefixed reverse. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") def reverser_func(self, name, domain, path="/"): """ Reverses `name` in the urlconf returned by processing `domain` at `path`. """ def fake_get_response(request): return request factory = RequestFactory() request = factory.get(path, HTTP_HOST=domain) modified_request = TenantMiddleware(fake_get_response)(request) with modified_request.tenant: urlconf = import_module(modified_request.urlconf) reverse_response = reverse(name, urlconf=urlconf) del sys.modules[modified_request.urlconf] # required to simulate new thread return reverse_response cls.reverser = reverser_func # This comes from app_tenants/urls.py cls.paths = { "tenant-home": "/", "profile": "/profile/", "advanced-profile": "/profile/advanced/", } for i in range(1, 4): schema_name = f"tenant{i}" tenant = TenantModel(schema_name=schema_name) tenant.save(verbosity=0) DomainModel.objects.create(tenant=tenant, domain=f"{schema_name}.localhost") DomainModel.objects.create( tenant=tenant, domain="everyone.localhost", folder=schema_name ) # primary activate_public() super().setUpClass() @classmethod def tearDownClass(cls): super().tearDownClass() for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_tenant_prefix(self): tpp = TenantPrefixPattern() for tenant in TenantModel.objects.all(): # Try with folder tenant.domain_url = "everyone.localhost" # This should be set by middleware tenant.folder = tenant.schema_name # This should be set by middleware with tenant: self.assertEqual(tpp.tenant_prefix, tenant.get_primary_domain().folder + "/") # Try with subdomain tenant.domain_url = ( f"{tenant.schema_name}.localhost" # This should be set by middleware ) tenant.folder = "" # This should be set by middleware with tenant: self.assertEqual(tpp.tenant_prefix, "/") with SchemaDescriptor.create( schema_name="tenant1", domain_url="unexisting-domain.localhost" ): self.assertEqual(tpp.tenant_prefix, "/") def test_unprefixed_reverse(self): for tenant in TenantModel.objects.all(): domain = f"{tenant.schema_name}.localhost" for name, path in self.paths.items(): self.assertEqual(self.reverser(name, domain), path) def test_prefixed_reverse(self): for tenant in TenantModel.objects.all(): domain = "everyone.localhost" for name, path in self.paths.items(): self.assertEqual( self.reverser(name, domain, f"/{tenant.schema_name}/"), f"/{tenant.schema_name}{path}", ) class URLConfFactoryTestCase(TestCase): """ Tests get_urlconf_from_schema. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") schema_name = "tenant1" tenant = TenantModel(schema_name=schema_name) tenant.save(verbosity=0) DomainModel.objects.create(tenant=tenant, domain=f"{schema_name}.localhost") DomainModel.objects.create( tenant=tenant, domain="everyone.localhost", folder=schema_name ) # primary activate_public() super().setUpClass() @classmethod def tearDownClass(cls): super().tearDownClass() for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def test_public(self): schema = SchemaDescriptor.create(schema_name="public") urlconf = get_urlconf_from_schema(schema) self.assertEqual(urlconf, None) def test_sample(self): schema = SchemaDescriptor.create(schema_name="sample") urlconf = get_urlconf_from_schema(schema) self.assertEqual(urlconf, None) def test_www(self): schema = SchemaDescriptor.create(schema_name="www", domain_url="localhost") urlconf = get_urlconf_from_schema(schema) self.assertEqual(urlconf, "app_main.urls") def test_blog(self): schema = SchemaDescriptor.create(schema_name="blog", domain_url="blog.localhost") urlconf = get_urlconf_from_schema(schema) self.assertEqual(urlconf, "app_blog.urls") def test_tenant1_unprefixed(self): schema = TenantModel.objects.get(schema_name="tenant1") schema.domain_url = "tenant1.localhost" urlconf = get_urlconf_from_schema(schema) self.assertEqual(urlconf, "app_tenants.urls") def test_tenant1_prefixed(self): schema = TenantModel.objects.get(schema_name="tenant1") schema.domain_url = "everyone.localhost" schema.folder = "tenant1" urlconf = get_urlconf_from_schema(schema) self.assertEqual(urlconf, "app_tenants.urls_dynamically_tenant_prefixed") self.assertTrue(sys.modules.get("app_tenants.urls_dynamically_tenant_prefixed")) def test_tenant1_broken_request(self): schema = TenantModel.objects.get(schema_name="tenant1") urlconf = get_urlconf_from_schema(schema) self.assertEqual(urlconf, None) django-pgschemas-0.15.2/dpgs_sandbox/tests/test_utils.py000066400000000000000000000136211463633566500234230ustar00rootroot00000000000000from django.conf import settings from django.core.exceptions import ValidationError from django.db import connection from django.db.utils import DatabaseError from django.test import TestCase, override_settings from django_pgschemas import schema, utils class UtilsTestCase(TestCase): """ Tests utility functions. """ valid_identifiers = ["___", "a_a0", "_a0_", "a" * 63] invalid_identifiers = ["", " ", "^", ".", "&", "{", "(", "@", "!", "a" * 64] valid_schema_names = ["a_pg", "w_pg_a", "_pg_awa", "pgwa"] + valid_identifiers invalid_schema_names = ["pg_a", "pg_"] + invalid_identifiers def test_get_tenant_model(self): if "default" not in settings.TENANTS: self.skipTest("Dynamic tenants are not being used") self.assertEqual(utils.get_tenant_model()._meta.model_name, "tenant") def test_get_domain_model(self): if "default" not in settings.TENANTS: self.skipTest("Dynamic tenants are not being used") self.assertEqual(utils.get_domain_model()._meta.model_name, "domain") def test_get_tenant_database_alias(self): self.assertEqual(utils.get_tenant_database_alias(), "default") with override_settings(PGSCHEMAS_TENANT_DB_ALIAS="something"): self.assertEqual(utils.get_tenant_database_alias(), "something") def test_get_limit_set_calls(self): self.assertFalse(utils.get_limit_set_calls()) with override_settings(PGSCHEMAS_LIMIT_SET_CALLS=True): self.assertTrue(utils.get_limit_set_calls()) def test_get_clone_reference(self): if "default" in settings.TENANTS: self.assertEqual(utils.get_clone_reference(), "sample") else: self.assertEqual(utils.get_clone_reference(), None) def test_is_valid_identifier(self): for identifier in self.valid_identifiers: self.assertTrue(utils.is_valid_identifier(identifier)) for identifier in self.invalid_identifiers: self.assertFalse(utils.is_valid_identifier(identifier)) def test_is_valid_schema_name(self): for schema_name in self.valid_schema_names: self.assertTrue(utils.is_valid_schema_name(schema_name)) for schema_name in self.invalid_schema_names: self.assertFalse(utils.is_valid_schema_name(schema_name)) def test_check_schema_name(self): for schema_name in self.valid_schema_names: utils.check_schema_name(schema_name) for schema_name in self.invalid_schema_names: with self.assertRaises(ValidationError): utils.check_schema_name(schema_name) def test_remove_www(self): self.assertEqual(utils.remove_www("localhost"), "localhost") self.assertEqual(utils.remove_www("www.localhost"), "localhost") self.assertEqual(utils.remove_www("wwwlocalhost"), "wwwlocalhost") self.assertEqual(utils.remove_www("www."), "") def test_run_in_public_schema(self): @utils.run_in_public_schema def inner(): cursor = connection.cursor() cursor.execute("SHOW search_path") self.assertEqual(cursor.fetchone(), ("public",)) cursor.close() with schema.SchemaDescriptor.create(schema_name="test"): inner() cursor = connection.cursor() cursor.execute("SHOW search_path") self.assertEqual(cursor.fetchone(), ("test, public",)) cursor.close() def test_schema_exists(self): self.assertTrue(utils.schema_exists("public")) self.assertTrue(utils.schema_exists("www")) self.assertTrue(utils.schema_exists("blog")) self.assertFalse(utils.schema_exists("default")) if "default" in settings.TENANTS: self.assertTrue(utils.schema_exists("sample")) self.assertFalse(utils.schema_exists("tenant")) else: self.assertFalse(utils.schema_exists("sample")) self.assertFalse(utils.schema_exists("tenant")) def test_dynamic_models_exist(self): if "default" in settings.TENANTS: self.assertTrue(utils.dynamic_models_exist()) else: self.assertFalse(utils.dynamic_models_exist()) utils.drop_schema("public") self.assertFalse(utils.dynamic_models_exist()) def test_create_drop_schema(self): self.assertFalse( utils.create_schema("public", check_if_exists=True) ) # Schema existed already self.assertTrue(utils.schema_exists("public")) # Schema exists self.assertTrue(utils.drop_schema("public")) # Schema was dropped self.assertFalse(utils.drop_schema("public")) # Schema no longer exists self.assertFalse(utils.schema_exists("public")) # Schema doesn't exist self.assertTrue(utils.create_schema("public", sync_schema=False)) # Schema was created self.assertTrue(utils.schema_exists("public")) # Schema exists def test_clone_schema(self): if "default" not in settings.TENANTS: self.skipTest("Dynamic tenants are not being used") utils._create_clone_schema_function() self.assertFalse(utils.schema_exists("sample2")) # Schema doesn't exist previously utils.clone_schema("sample", "sample2", dry_run=True) # Dry run self.assertFalse(utils.schema_exists("sample2")) # Schema won't exist, dry run utils.clone_schema("sample", "sample2") # Real run, schema was cloned self.assertTrue(utils.schema_exists("sample2")) # Schema exists with self.assertRaises(DatabaseError): utils.clone_schema("sample", "sample2") # Schema already exists, error self.assertTrue(utils.schema_exists("sample2")) # Schema still exists def test_create_or_clone_schema(self): if "default" not in settings.TENANTS: self.skipTest("Dynamic tenants are not being used") self.assertFalse(utils.create_or_clone_schema("sample")) # Schema existed django-pgschemas-0.15.2/dpgs_sandbox/tests/test_whowill_command.py000066400000000000000000000077301463633566500254520ustar00rootroot00000000000000import unittest from io import StringIO from django.core import management from django.test import TestCase from django_pgschemas.utils import get_domain_model, get_tenant_model TenantModel = get_tenant_model() DomainModel = get_domain_model() class WhoWillCommandTestCase(TestCase): """ Tests the whowill management command. """ @classmethod def setUpClass(cls): if TenantModel is None: raise unittest.SkipTest("Dynamic tenants are not being used") tenant = TenantModel(schema_name="tenant1") tenant.save(verbosity=0) DomainModel.objects.create(tenant=tenant, domain="tenant1.localhost", is_primary=True) @classmethod def tearDownClass(cls): for tenant in TenantModel.objects.all(): tenant.delete(force_drop=True) def split_output(self, buffer): buffer.seek(0) return set(buffer.read().strip().splitlines()) def test_all_schemas(self): with StringIO() as buffer: management.call_command("whowill", all_schemas=True, stdout=buffer) self.assertEqual( self.split_output(buffer), {"public", "sample", "localhost", "blog.localhost", "tenant1.localhost"}, ) def test_static_schemas(self): with StringIO() as buffer: management.call_command("whowill", static_schemas=True, stdout=buffer) self.assertEqual( self.split_output(buffer), {"public", "sample", "localhost", "blog.localhost"} ) def test_tenant_like_schemas(self): with StringIO() as buffer: management.call_command("whowill", tenant_schemas=True, stdout=buffer) self.assertEqual(self.split_output(buffer), {"sample", "tenant1.localhost"}) def test_dynamic_schemas(self): with StringIO() as buffer: management.call_command("whowill", dynamic_schemas=True, stdout=buffer) self.assertEqual(self.split_output(buffer), {"tenant1.localhost"}) def test_specific_schemas(self): with StringIO() as buffer: management.call_command("whowill", schemas=["www", "blog", "tenant1"], stdout=buffer) self.assertEqual( self.split_output(buffer), {"localhost", "blog.localhost", "tenant1.localhost"} ) # Same test cases as before, but excluding one def test_all_schemas_minus_one(self): with StringIO() as buffer: management.call_command( "whowill", all_schemas=True, excluded_schemas=["blog"], stdout=buffer ) self.assertEqual( self.split_output(buffer), {"public", "sample", "localhost", "tenant1.localhost"} ) def test_static_schemas_minus_one(self): with StringIO() as buffer: management.call_command( "whowill", static_schemas=True, excluded_schemas=["sample"], stdout=buffer ) self.assertEqual(self.split_output(buffer), {"public", "localhost", "blog.localhost"}) def test_tenant_like_schemas_minus_one(self): with StringIO() as buffer: management.call_command( "whowill", tenant_schemas=True, excluded_schemas=["tenant1"], stdout=buffer ) self.assertEqual(self.split_output(buffer), {"sample"}) def test_dynamic_schemas_minus_one(self): with StringIO() as buffer: management.call_command( "whowill", dynamic_schemas=True, excluded_schemas=["public"], stdout=buffer ) self.assertEqual(self.split_output(buffer), {"tenant1.localhost"}) def test_specific_schemas_minus_one(self): with StringIO() as buffer: management.call_command( "whowill", schemas=["www", "blog", "tenant1"], excluded_schemas=["www"], stdout=buffer, ) self.assertEqual(self.split_output(buffer), {"blog.localhost", "tenant1.localhost"}) django-pgschemas-0.15.2/dpgs_sandbox/views.py000066400000000000000000000007021463633566500212130ustar00rootroot00000000000000from django.http import HttpResponse RESPONSE_TEMPLATE = """ Path: {path} User: {user} Schema: {schema} Domain: {domain} Folder: {folder} """ def generic(request): return HttpResponse( RESPONSE_TEMPLATE.format( path=request.get_full_path(), user=request.user, schema=request.tenant.schema_name, domain=request.tenant.domain_url, folder=request.tenant.folder, ) ) django-pgschemas-0.15.2/dpgs_sandbox/wsgi.py000066400000000000000000000002551463633566500210320ustar00rootroot00000000000000import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dpgs_sandbox.settings") application = get_wsgi_application() django-pgschemas-0.15.2/poetry.lock000066400000000000000000002720271463633566500172400ustar00rootroot00000000000000# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" optional = false python-versions = ">=3.6" files = [ {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] [[package]] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.7" files = [ {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, ] [package.dependencies] typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] dev = ["attrs[docs,tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "autobahn" version = "23.1.2" description = "WebSocket client & server library, WAMP real-time framework" optional = false python-versions = ">=3.7" files = [ {file = "autobahn-23.1.2.tar.gz", hash = "sha256:c5ef8ca7422015a1af774a883b8aef73d4954c9fcd182c9b5244e08e973f7c3a"}, ] [package.dependencies] cryptography = ">=3.4.6" hyperlink = ">=21.0.0" setuptools = "*" txaio = ">=21.2.1" [package.extras] all = ["PyGObject (>=3.40.0)", "argon2_cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi @ git+https://github.com/ethereum/eth-abi.git@v4.0.0-beta.2", "flatbuffers (>=22.12.6)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2)", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "rlp (>=2.0.1)", "service_identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "ujson (>=4.0.2)", "web3[ipfs] @ git+https://github.com/ethereum/web3.py.git@v6.0.0-beta.9", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] compress = ["python-snappy (>=0.6.0)"] dev = ["backports.tempfile (>=1.0)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (<5)", "humanize (>=0.5.1)", "mypy (>=0.610)", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx_rtd_theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=4.2.8)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=22.10.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] encryption = ["pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service_identity (>=18.1.0)"] nvx = ["cffi (>=1.14.5)"] scram = ["argon2_cffi (>=20.1.0)", "cffi (>=1.14.5)", "passlib (>=1.7.4)"] serialization = ["cbor2 (>=5.2.0)", "flatbuffers (>=22.12.6)", "msgpack (>=1.0.2)", "py-ubjson (>=0.16.1)", "ujson (>=4.0.2)"] twisted = ["attrs (>=20.3.0)", "twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] ui = ["PyGObject (>=3.40.0)"] xbr = ["base58 (>=2.1.0)", "cbor2 (>=5.2.0)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi @ git+https://github.com/ethereum/eth-abi.git@v4.0.0-beta.2", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "rlp (>=2.0.1)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "web3[ipfs] @ git+https://github.com/ethereum/web3.py.git@v6.0.0-beta.9", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)"] [[package]] name = "automat" version = "22.10.0" description = "Self-service finite-state machines for the programmer on the go." optional = false python-versions = "*" files = [ {file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"}, {file = "Automat-22.10.0.tar.gz", hash = "sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e"}, ] [package.dependencies] attrs = ">=19.2.0" six = "*" [package.extras] visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] [[package]] name = "babel" version = "2.12.1" description = "Internationalization utilities" optional = false python-versions = ">=3.7" files = [ {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [[package]] name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" optional = false python-versions = ">=3.6" files = [ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, ] [package.extras] tzdata = ["tzdata"] [[package]] name = "black" version = "23.3.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.7" files = [ {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = "*" files = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] [package.dependencies] pycparser = "*" [[package]] name = "channels" version = "2.4.0" description = "Brings async, event-driven capabilities to Django. Django 2.2 and up only." optional = false python-versions = ">=3.5" files = [ {file = "channels-2.4.0-py2.py3-none-any.whl", hash = "sha256:80a5ad1962ae039a3dcc0a5cb5212413e66e2f11ad9e9db8004834436daf3400"}, {file = "channels-2.4.0.tar.gz", hash = "sha256:08e756406d7165cb32f6fc3090c0643f41ca9f7e0f7fada0b31194662f20f414"}, ] [package.dependencies] asgiref = ">=3.2,<4.0" daphne = ">=2.3,<3.0" Django = ">=2.2" [package.extras] tests = ["async-generator (>=1.10,<2.0)", "async-timeout (>=3.0,<4.0)", "coverage (>=4.5,<5.0)", "pytest (>=4.4,<5.0)", "pytest-asyncio (>=0.10,<1.0)", "pytest-django (>=3.4,<4.0)"] [[package]] name = "chardet" version = "5.1.0" description = "Universal encoding detector for Python 3" optional = false python-versions = ">=3.7" files = [ {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"}, {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"}, ] [[package]] name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] [[package]] name = "click" version = "8.1.3" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "constantly" version = "15.1.0" description = "Symbolic constants in Python" optional = false python-versions = "*" files = [ {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, ] [[package]] name = "coverage" version = "6.5.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.7" files = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "cryptography" version = "41.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, ] [package.dependencies] cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] nox = ["nox"] pep8test = ["black", "check-sdist", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "daphne" version = "2.5.0" description = "Django ASGI (HTTP/WebSocket) server" optional = false python-versions = "*" files = [ {file = "daphne-2.5.0-py2.py3-none-any.whl", hash = "sha256:aa64840015709bbc9daa3c4464a4a4d437937d6cda10a9b51e913eb319272553"}, {file = "daphne-2.5.0.tar.gz", hash = "sha256:1ca46d7419103958bbc9576fb7ba3b25b053006e22058bc97084ee1a7d44f4ba"}, ] [package.dependencies] asgiref = ">=3.2,<4.0" autobahn = ">=0.18" twisted = {version = ">=18.7", extras = ["tls"]} [package.extras] tests = ["hypothesis (==4.23)", "pytest (>=3.10,<4.0)", "pytest-asyncio (>=0.8,<1.0)"] [[package]] name = "django" version = "4.2.3" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ {file = "Django-4.2.3-py3-none-any.whl", hash = "sha256:f7c7852a5ac5a3da5a8d5b35cc6168f31b605971441798dac845f17ca8028039"}, {file = "Django-4.2.3.tar.gz", hash = "sha256:45a747e1c5b3d6df1b141b1481e193b033fd1fdbda3ff52677dc81afdaacbaed"}, ] [package.dependencies] asgiref = ">=3.6.0,<4" "backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} sqlparse = ">=0.3.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] [[package]] name = "doc8" version = "0.8.1" description = "Style checker for Sphinx (or other) RST documentation" optional = false python-versions = "*" files = [ {file = "doc8-0.8.1-py2.py3-none-any.whl", hash = "sha256:4d58a5c8c56cedd2b2c9d6e3153be5d956cf72f6051128f0f2255c66227df721"}, {file = "doc8-0.8.1.tar.gz", hash = "sha256:4d1df12598807cf08ffa9a1d5ef42d229ee0de42519da01b768ff27211082c12"}, ] [package.dependencies] chardet = "*" docutils = "*" Pygments = "*" restructuredtext-lint = ">=0.7" six = "*" stevedore = "*" [[package]] name = "docutils" version = "0.17.1" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] [[package]] name = "hyperlink" version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, ] [package.dependencies] idna = ">=2.5" [[package]] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] [[package]] name = "incremental" version = "22.10.0" description = "\"A small library that versions your Python projects.\"" optional = false python-versions = "*" files = [ {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, ] [package.extras] mypy = ["click (>=6.0)", "mypy (==0.812)", "twisted (>=16.4.0)"] scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] [[package]] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" version = "23.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] [[package]] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] [[package]] name = "pbr" version = "5.11.1" description = "Python Build Reasonableness" optional = false python-versions = ">=2.6" files = [ {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] [[package]] name = "platformdirs" version = "3.8.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"}, {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"}, ] [package.extras] docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, ] [[package]] name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, ] [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" [[package]] name = "pycparser" version = "2.21" description = "C parser in Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] [[package]] name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, ] [package.extras] plugins = ["importlib-metadata"] [[package]] name = "pyopenssl" version = "23.2.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.6" files = [ {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, ] [package.dependencies] cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] [[package]] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." optional = false python-versions = ">=3.7" files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "restructuredtext-lint" version = "1.4.0" description = "reStructuredText linter" optional = false python-versions = "*" files = [ {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] [package.dependencies] docutils = ">=0.11,<1.0" [[package]] name = "ruff" version = "0.0.275" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ {file = "ruff-0.0.275-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5e6554a072e7ce81eb6f0bec1cebd3dcb0e358652c0f4900d7d630d61691e914"}, {file = "ruff-0.0.275-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1cc599022fe5ffb143a965b8d659eb64161ab8ab4433d208777eab018a1aab67"}, {file = "ruff-0.0.275-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5206fc1cd8c1c1deadd2e6360c0dbcd690f1c845da588ca9d32e4a764a402c60"}, {file = "ruff-0.0.275-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c4e6468da26f77b90cae35319d310999f471a8c352998e9b39937a23750149e"}, {file = "ruff-0.0.275-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0dbdea02942131dbc15dd45f431d152224f15e1dd1859fcd0c0487b658f60f1a"}, {file = "ruff-0.0.275-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:22efd9f41af27ef8fb9779462c46c35c89134d33e326c889971e10b2eaf50c63"}, {file = "ruff-0.0.275-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c09662112cfa22d7467a19252a546291fd0eae4f423e52b75a7a2000a1894db"}, {file = "ruff-0.0.275-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80043726662144876a381efaab88841c88e8df8baa69559f96b22d4fa216bef1"}, {file = "ruff-0.0.275-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5859ee543b01b7eb67835dfd505faa8bb7cc1550f0295c92c1401b45b42be399"}, {file = "ruff-0.0.275-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c8ace4d40a57b5ea3c16555f25a6b16bc5d8b2779ae1912ce2633543d4e9b1da"}, {file = "ruff-0.0.275-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8347fc16aa185aae275906c4ac5b770e00c896b6a0acd5ba521f158801911998"}, {file = "ruff-0.0.275-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ec43658c64bfda44fd84bbea9da8c7a3b34f65448192d1c4dd63e9f4e7abfdd4"}, {file = "ruff-0.0.275-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:508b13f7ca37274cceaba4fb3ea5da6ca192356323d92acf39462337c33ad14e"}, {file = "ruff-0.0.275-py3-none-win32.whl", hash = "sha256:6afb1c4422f24f361e877937e2a44b3f8176774a476f5e33845ebfe887dd5ec2"}, {file = "ruff-0.0.275-py3-none-win_amd64.whl", hash = "sha256:d9b264d78621bf7b698b6755d4913ab52c19bd28bee1a16001f954d64c1a1220"}, {file = "ruff-0.0.275-py3-none-win_arm64.whl", hash = "sha256:a19ce3bea71023eee5f0f089dde4a4272d088d5ac0b675867e074983238ccc65"}, {file = "ruff-0.0.275.tar.gz", hash = "sha256:a63a0b645da699ae5c758fce19188e901b3033ec54d862d93fcd042addf7f38d"}, ] [[package]] name = "service-identity" version = "23.1.0" description = "Service identity verification for pyOpenSSL & cryptography." optional = false python-versions = ">=3.8" files = [ {file = "service_identity-23.1.0-py3-none-any.whl", hash = "sha256:87415a691d52fcad954a500cb81f424d0273f8e7e3ee7d766128f4575080f383"}, {file = "service_identity-23.1.0.tar.gz", hash = "sha256:ecb33cd96307755041e978ab14f8b14e13b40f1fbd525a4dc78f46d2b986431d"}, ] [package.dependencies] attrs = ">=19.1.0" cryptography = "*" pyasn1 = "*" pyasn1-modules = "*" [package.extras] dev = ["pyopenssl", "service-identity[docs,idna,mypy,tests]"] docs = ["furo", "myst-parser", "pyopenssl", "sphinx", "sphinx-notfound-page"] idna = ["idna"] mypy = ["idna", "mypy", "types-pyopenssl"] tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.7" files = [ {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] [[package]] name = "sphinx" version = "1.8.6" description = "Python documentation generator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "Sphinx-1.8.6-py2.py3-none-any.whl", hash = "sha256:5973adbb19a5de30e15ab394ec8bc05700317fa83f122c349dd01804d983720f"}, {file = "Sphinx-1.8.6.tar.gz", hash = "sha256:e096b1b369dbb0fcb95a31ba8c9e1ae98c588e601f08eada032248e1696de4b1"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=1.3,<2.0 || >2.0" colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} docutils = ">=0.11,<0.18" imagesize = "*" Jinja2 = ">=2.3" packaging = "*" Pygments = ">=2.0" requests = ">=2.0.0" setuptools = "*" six = ">=1.5" snowballstemmer = ">=1.1" sphinxcontrib-websupport = "*" [package.extras] test = ["enum34", "flake8 (>=3.5.0)", "flake8-import-order", "html5lib", "mock", "mypy", "pytest", "pytest-cov", "typed-ast"] websupport = ["sqlalchemy (>=0.9)", "whoosh (>=2.0)"] [[package]] name = "sphinx-django-command" version = "0.1.3" description = "Sphinx plugin for help Django commands documentation." optional = false python-versions = "*" files = [ {file = "sphinx-django-command-0.1.3.tar.gz", hash = "sha256:837adc9146c51c7a35768e9b2f3f544056a39d0616d72550f957282cd8c678fa"}, ] [package.dependencies] Django = "*" docutils = "*" sphinx = "*" [[package]] name = "sphinx-rtd-theme" version = "0.4.3" description = "Read the Docs theme for Sphinx" optional = false python-versions = "*" files = [ {file = "sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl", hash = "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4"}, {file = "sphinx_rtd_theme-0.4.3.tar.gz", hash = "sha256:728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a"}, ] [package.dependencies] sphinx = "*" [[package]] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." optional = false python-versions = ">=3.5" files = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-websupport" version = "1.2.4" description = "Sphinx API for Web Apps" optional = false python-versions = ">=3.5" files = [ {file = "sphinxcontrib-websupport-1.2.4.tar.gz", hash = "sha256:4edf0223a0685a7c485ae5a156b6f529ba1ee481a1417817935b20bde1956232"}, {file = "sphinxcontrib_websupport-1.2.4-py2.py3-none-any.whl", hash = "sha256:6fc9287dfc823fe9aa432463edd6cea47fa9ebbf488d7f289b322ffcfca075c7"}, ] [package.dependencies] sphinxcontrib-serializinghtml = "*" [package.extras] lint = ["flake8"] test = ["Sphinx", "pytest", "sqlalchemy", "whoosh"] [[package]] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." optional = false python-versions = ">=3.5" files = [ {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, ] [package.extras] dev = ["build", "flake8"] doc = ["sphinx"] test = ["pytest", "pytest-cov"] [[package]] name = "stevedore" version = "5.1.0" description = "Manage dynamic plugins for Python applications" optional = false python-versions = ">=3.8" files = [ {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, ] [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "twisted" version = "22.10.0" description = "An asynchronous networking framework written in Python" optional = false python-versions = ">=3.7.1" files = [ {file = "Twisted-22.10.0-py3-none-any.whl", hash = "sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0"}, {file = "Twisted-22.10.0.tar.gz", hash = "sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31"}, ] [package.dependencies] attrs = ">=19.2.0" Automat = ">=0.8.0" constantly = ">=15.1" hyperlink = ">=17.1.1" idna = {version = ">=2.4", optional = true, markers = "extra == \"tls\""} incremental = ">=21.3.0" pyopenssl = {version = ">=21.0.0", optional = true, markers = "extra == \"tls\""} service-identity = {version = ">=18.1.0", optional = true, markers = "extra == \"tls\""} twisted-iocpsupport = {version = ">=1.0.2,<2", markers = "platform_system == \"Windows\""} typing-extensions = ">=3.6.5" "zope.interface" = ">=4.4.2" [package.extras] all-non-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] conch-nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] contextvars = ["contextvars (>=2.4,<3)"] dev = ["coverage (>=6b1,<7)", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)"] dev-release = ["pydoctor (>=22.9.0,<22.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)"] gtk-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pygobject", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] macos-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] osx-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.0,<7.0)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] [[package]] name = "twisted-iocpsupport" version = "1.0.3" description = "An extension for use in the twisted I/O Completion Ports reactor." optional = false python-versions = "*" files = [ {file = "twisted-iocpsupport-1.0.3.tar.gz", hash = "sha256:afb00801fdfbaccf0d0173a722626500023d4a19719ac9f129d1347a32e2fc66"}, {file = "twisted_iocpsupport-1.0.3-cp310-cp310-win32.whl", hash = "sha256:a379ef56a576c8090889f74441bc3822ca31ac82253cc61e8d50631bcb0c26d0"}, {file = "twisted_iocpsupport-1.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1ea2c3fbdb739c95cc8b3355305cd593d2c9ec56d709207aa1a05d4d98671e85"}, {file = "twisted_iocpsupport-1.0.3-cp311-cp311-win32.whl", hash = "sha256:7efcdfafb377f32db90f42bd5fc5bb32cd1e3637ee936cdaf3aff4f4786ab3bf"}, {file = "twisted_iocpsupport-1.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1dbfac706972bf9ec5ce1ddbc735d2ebba406ad363345df8751ffd5252aa1618"}, {file = "twisted_iocpsupport-1.0.3-cp36-cp36m-win32.whl", hash = "sha256:1ddfc5fa22ec6f913464b736b3f46e642237f17ac41be47eed6fa9bd52f5d0e0"}, {file = "twisted_iocpsupport-1.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:1bdccbb22199fc69fd7744d6d2dfd22d073c028c8611d994b41d2d2ad0e0f40d"}, {file = "twisted_iocpsupport-1.0.3-cp37-cp37m-win32.whl", hash = "sha256:db11c80054b52dbdea44d63d5474a44c9a6531882f0e2960268b15123088641a"}, {file = "twisted_iocpsupport-1.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:67bec1716eb8f466ef366bbf262e1467ecc9e20940111207663ac24049785bad"}, {file = "twisted_iocpsupport-1.0.3-cp38-cp38-win32.whl", hash = "sha256:98a6f16ab215f8c1446e9fc60aaed0ab7c746d566aa2f3492a23cea334e6bebb"}, {file = "twisted_iocpsupport-1.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:4f249d0baac836bb431d6fa0178be063a310136bc489465a831e3abd2d7acafd"}, {file = "twisted_iocpsupport-1.0.3-cp39-cp39-win32.whl", hash = "sha256:aaca8f30c3b7c80d27a33fe9fe0d0bac42b1b012ddc60f677175c30e1becc1f3"}, {file = "twisted_iocpsupport-1.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:dff43136c33665c2d117a73706aef6f7d6433e5c4560332a118fe066b16b8695"}, {file = "twisted_iocpsupport-1.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8faceae553cfadc42ad791b1790e7cdecb7751102608c405217f6a26e877e0c5"}, {file = "twisted_iocpsupport-1.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6f8c433faaad5d53d30d1da6968d5a3730df415e2efb6864847267a9b51290cd"}, {file = "twisted_iocpsupport-1.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3f39c41c0213a81a9ce0961e30d0d7650f371ad80f8d261007d15a2deb6d5be3"}, ] [[package]] name = "txaio" version = "23.1.1" description = "Compatibility API between asyncio/Twisted/Trollius" optional = false python-versions = ">=3.7" files = [ {file = "txaio-23.1.1-py2.py3-none-any.whl", hash = "sha256:aaea42f8aad50e0ecfb976130ada140797e9dcb85fad2cf72b0f37f8cefcb490"}, {file = "txaio-23.1.1.tar.gz", hash = "sha256:f9a9216e976e5e3246dfd112ad7ad55ca915606b60b84a757ac769bd404ff704"}, ] [package.extras] all = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] dev = ["pep8 (>=1.6.2)", "pyenchant (>=1.6.6)", "pytest (>=2.6.4)", "pytest-cov (>=1.8.1)", "sphinx (>=1.2.3)", "sphinx-rtd-theme (>=0.1.9)", "sphinxcontrib-spelling (>=2.1.2)", "tox (>=2.1.1)", "tox-gh-actions (>=2.2.0)", "twine (>=1.6.5)", "wheel"] twisted = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] [[package]] name = "typing-extensions" version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, ] [[package]] name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] [[package]] name = "urllib3" version = "2.0.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "zope-interface" version = "6.0" description = "Interfaces for Python" optional = false python-versions = ">=3.7" files = [ {file = "zope.interface-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990"}, {file = "zope.interface-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d"}, {file = "zope.interface-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85"}, {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995"}, {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f"}, {file = "zope.interface-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410"}, {file = "zope.interface-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28"}, {file = "zope.interface-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52"}, {file = "zope.interface-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30"}, {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464"}, {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518"}, {file = "zope.interface-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb"}, {file = "zope.interface-6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788"}, {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca"}, {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a"}, {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc"}, {file = "zope.interface-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373"}, {file = "zope.interface-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f"}, {file = "zope.interface-6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8"}, {file = "zope.interface-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58"}, {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446"}, {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f"}, {file = "zope.interface-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8"}, {file = "zope.interface-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2"}, {file = "zope.interface-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c"}, {file = "zope.interface-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5"}, {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8"}, {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2"}, {file = "zope.interface-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5"}, {file = "zope.interface-6.0.tar.gz", hash = "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d"}, ] [package.dependencies] setuptools = "*" [package.extras] docs = ["Sphinx", "repoze.sphinx.autointerface"] test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.8.1" content-hash = "eb07451579e35244b85e08ea934d7a6e757ffbdb844a26a0ee5cd455995aa0c4" django-pgschemas-0.15.2/pyproject.toml000066400000000000000000000031561463633566500177530ustar00rootroot00000000000000[tool.poetry] name = "django-pgschemas" version = "0.15.2" description = "Multi-tenancy on Django using PostgreSQL schemas." license = "MIT" authors = ["Lorenzo Peña "] readme = "README.rst" repository = "https://github.com/lorinkoz/django-pgschemas" documentation = "https://django-pgschemas.readthedocs.io/" keywords = ["django", "tenants", "schemas", "multi-tenancy", "postgresql"] classifiers = [ "Development Status :: 4 - Beta", "Framework :: Django", "Framework :: Django :: 4.0", "Framework :: Django :: 4.1", "Framework :: Django :: 4.2", ] [tool.poetry.dependencies] python = "^3.8.1" django = "^4.0" [tool.poetry.dev-dependencies] black = "^23.3.0" channels = "^2.1" coverage = {extras = ["toml"], version = "^6.3"} doc8 = "^0.8.0" ruff = "^0.0.275" sphinx = "^1.8" sphinx_rtd_theme = "^0.4.2" sphinx-django-command = "^0.1.3" [tool.black] line-length = 100 [tool.ruff] select = ["I", "E", "F"] line-length = 120 [tool.ruff.isort] combine-as-imports = true known-first-party = ["django_pgschemas"] [tool.mypy] strict_optional = true disallow_untyped_defs = true check_untyped_defs = true warn_unused_ignores = true [[tool.mypy.overrides]] module = [ "channels.*", "psycopg.*", "psycopg2.*", ] ignore_missing_imports = true [[tool.mypy.overrides]] module = [ "docs.*", "django_pgschemas.contrib.*", "django_pgschemas.postgresql_backend.*", "dpgs_sandbox.*", ] ignore_errors = true [tool.coverage.run] source = ["django_pgschemas"] omit = ["django_pgschemas/contrib/channels*"] [build-system] requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api"