pax_global_header00006660000000000000000000000064146042774500014523gustar00rootroot0000000000000052 comment=62f1067fac455044663ef0814f255c3279b5efd0 django-pgtrigger-4.11.1/000077500000000000000000000000001460427745000150415ustar00rootroot00000000000000django-pgtrigger-4.11.1/.circleci/000077500000000000000000000000001460427745000166745ustar00rootroot00000000000000django-pgtrigger-4.11.1/.circleci/config.yml000066400000000000000000000050251460427745000206660ustar00rootroot00000000000000version: 2.1 orbs: opus10: executors: python-pg: parameters: pg_version: type: "string" default: "14.4" working_directory: /code docker: - image: opus10/circleci-public-django-app:2023-10-06 environment: # Ensure makefile commands are not wrapped in "docker-compose run" EXEC_WRAPPER: '' DATABASE_URL: postgres://root@localhost/circle_test?sslmode=disable TOX_PARALLEL_NO_SPINNER: 1 - image: cimg/postgres:<> environment: POSTGRES_USER: root POSTGRES_DB: circle_test POSTGRES_PASSWORD: password commands: test: steps: - checkout - restore_cache: key: v4-{{ checksum "poetry.lock" }} - run: make dependencies - run: make full-test-suite - save_cache: key: v4-{{ checksum "poetry.lock" }} paths: - /home/circleci/.cache/pypoetry/ - /code/.venv - /code/.tox jobs: test_pg_min: executor: name: opus10/python-pg pg_version: "13.11" steps: - opus10/test test_pg_max: executor: name: opus10/python-pg pg_version: "16.0" steps: - opus10/test lint: executor: opus10/python-pg steps: - checkout - restore_cache: key: v4-{{ checksum "poetry.lock" }} - run: make dependencies - run: make lint check_changelog: executor: opus10/python-pg steps: - checkout - restore_cache: key: v4-{{ checksum "poetry.lock" }} - run: make dependencies - run: git tidy-log origin/master.. - run: make tidy-lint deploy: executor: opus10/python-pg steps: - checkout - run: ssh-add -D - run: echo "${GITHUB_DEVOPS_PRIVATE_SSH_KEY_BASE64}" | base64 --decode | ssh-add - > /dev/null - restore_cache: key: v4-{{ checksum "poetry.lock" }} - run: make dependencies - run: poetry run python devops.py deploy workflows: version: 2 on_commit: jobs: - test_pg_min - test_pg_max - lint - check_changelog: filters: branches: ignore: master - deploy: context: public-django-app requires: - test_pg_min - test_pg_max - lint filters: branches: only: master django-pgtrigger-4.11.1/.editorconfig000066400000000000000000000003221460427745000175130ustar00rootroot00000000000000root = true [*] charset = utf-8 end_of_line = lf indent_size = 4 indent_style = space insert_final_newline = true trim_trailing_whitespace = true [*.{yaml,yml}] indent_size = 2 [makefile] indent_style = tab django-pgtrigger-4.11.1/.git-tidy/000077500000000000000000000000001460427745000166515ustar00rootroot00000000000000django-pgtrigger-4.11.1/.git-tidy/commit.tpl000066400000000000000000000007161460427745000206660ustar00rootroot00000000000000# Remember - commit messages are used to generate release notes! # Use the following template when writing a commit message or # use "git tidy-commit" to commit a properly-formatted message. # # ---- Commit Message Format ---- # # {{ schema.summary.help }} # # {{ schema.description.help }} # {% for entry in schema %} {% if entry.label not in ['summary', 'description'] %} # {{ entry.label.replace('_', '-').title() }}: {{ entry.help }} {% endif %} {% endfor %}django-pgtrigger-4.11.1/.git-tidy/commit.yaml000066400000000000000000000006411460427745000210260ustar00rootroot00000000000000- label: type name: Type help: The type of change. type: string choices: - api-break - bug - feature - trivial - label: summary name: Summary help: A high-level summary of the changes. type: string - label: description name: Description help: An in-depth description of the changes. type: string condition: ['!=', 'type', 'trivial'] multiline: True required: False django-pgtrigger-4.11.1/.git-tidy/log.tpl000066400000000000000000000015471460427745000201620ustar00rootroot00000000000000{% if output == ':github/pr' %} **Heads up!** This is what the release notes will look like based on the commits. {% endif %} {% if not range %} # Changelog {% endif %} {% for tag, commits_by_tag in commits.exclude('summary', '.*\[skip ci\].*', match=True).group('tag').items() %} ## {{ tag|default('Unreleased', True) }} {% if tag.date %}({{ tag.date.date() }}){% endif %} {% for type, commits_by_type in commits_by_tag.group('type', ascending_keys=True, none_key_last=True).items() %} ### {{ type|default('Other', True)|title }} {% for commit in commits_by_type %} {% if not commit.is_parsed %} - {{ commit.sha[:7] }}: Commit could not be parsed. {% else %} - {{ commit.summary }} [{{ commit.author_name }}, {{ commit.sha[:7] }}] {% if commit.description %} {{ commit.description|indent(4) }} {% endif %} {% endif %} {% endfor %} {% endfor %} {% endfor %}django-pgtrigger-4.11.1/.gitcommit.tpl000066400000000000000000000005261460427745000176370ustar00rootroot00000000000000# Remember - commit messages are used to generate release notes! # Use the following template when writing a commit message or # use "git tidy-commit" to commit a properly-formatted message. # # ---- Commit Message Format ---- # # A high-level summary of the changes. # # An in-depth description of the changes. # # Type: The type of change. django-pgtrigger-4.11.1/.gitignore000066400000000000000000000136101460427745000170320ustar00rootroot00000000000000 # Created by https://www.gitignore.io/api/vim,osx,python,django,pycharm,komodoedit,elasticbeanstalk,visualstudiocode # Edit at https://www.gitignore.io/?templates=vim,osx,python,django,pycharm,komodoedit,elasticbeanstalk,visualstudiocode ### Django ### *.log *.pot *.pyc __pycache__/ local_settings.py db.sqlite3 media # If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ # in your Git repository. Update and uncomment the following line accordingly. # /staticfiles/ ### Django.Python Stack ### # Byte-compiled / optimized / DLL files *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ pip-wheel-metadata/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo # Django stuff: db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ ### ElasticBeanstalk ### .elasticbeanstalk/ ### KomodoEdit ### *.komodoproject .komodotools ### OSX ### # General .DS_Store .AppleDouble .LSOverride # Icon must end with two \r Icon # Thumbnails ._* # Files that might appear in the root of a volume .DocumentRevisions-V100 .fseventsd .Spotlight-V100 .TemporaryItems .Trashes .VolumeIcon.icns .com.apple.timemachine.donotpresent # Directories potentially created on remote AFP share .AppleDB .AppleDesktop Network Trash Folder Temporary Items .apdisk ### PyCharm ### # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 # User-specific stuff .idea/**/workspace.xml .idea/**/tasks.xml .idea/**/usage.statistics.xml .idea/**/dictionaries .idea/**/shelf # Generated files .idea/**/contentModel.xml # Sensitive or high-churn files .idea/**/dataSources/ .idea/**/dataSources.ids .idea/**/dataSources.local.xml .idea/**/sqlDataSources.xml .idea/**/dynamic.xml .idea/**/uiDesigner.xml .idea/**/dbnavigator.xml # Gradle .idea/**/gradle.xml .idea/**/libraries # Gradle and Maven with auto-import # When using Gradle or Maven with auto-import, you should exclude module files, # since they will be recreated, and may cause churn. Uncomment if using # auto-import. # .idea/modules.xml # .idea/*.iml # .idea/modules # *.iml # *.ipr # CMake cmake-build-*/ # Mongo Explorer plugin .idea/**/mongoSettings.xml # File-based project format *.iws # IntelliJ out/ # mpeltonen/sbt-idea plugin .idea_modules/ # JIRA plugin atlassian-ide-plugin.xml # Cursive Clojure plugin .idea/replstate.xml # Crashlytics plugin (for Android Studio and IntelliJ) com_crashlytics_export_strings.xml crashlytics.properties crashlytics-build.properties fabric.properties # Editor-based Rest Client .idea/httpRequests # Android studio 3.1+ serialized cache file .idea/caches/build_file_checksums.ser ### PyCharm Patch ### # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 # *.iml # modules.xml # .idea/misc.xml # *.ipr # Sonarlint plugin .idea/sonarlint ### Python ### # Byte-compiled / optimized / DLL files # C extensions # Distribution / packaging # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. # Installer logs # Unit test / coverage reports # Translations # Django stuff: # Flask stuff: # Scrapy stuff: # Sphinx documentation # PyBuilder # Jupyter Notebook # IPython # pyenv # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. # celery beat schedule file # SageMath parsed files # Environments # Spyder project settings # Rope project settings # mkdocs documentation # mypy # Pyre type checker ### Vim ### # Swap [._]*.s[a-v][a-z] [._]*.sw[a-p] [._]s[a-rt-v][a-z] [._]ss[a-gi-z] [._]sw[a-p] # Session Session.vim Sessionx.vim # Temporary .netrwhist *~ # Auto-generated tag files tags # Persistent undo [._]*.un~ ### VisualStudioCode ### .vscode/* !.vscode/settings.json !.vscode/tasks.json !.vscode/launch.json !.vscode/extensions.json ### VisualStudioCode Patch ### # Ignore all local history of files .history # End of https://www.gitignore.io/api/vim,osx,python,django,pycharm,komodoedit,elasticbeanstalk,visualstudiocode # Ignore custom Docker compose DB data .db # Ignore PyCharm .idea/ # Ignore local poetry settings poetry.toml # Ignore PyCharm idea folder .idea django-pgtrigger-4.11.1/.readthedocs.yml000066400000000000000000000002701460427745000201260ustar00rootroot00000000000000version: 2 build: os: ubuntu-22.04 tools: python: "3.8" mkdocs: configuration: mkdocs.yml fail_on_warning: false python: install: - requirements: docs/requirements.txt django-pgtrigger-4.11.1/CHANGELOG.md000066400000000000000000000530711460427745000166600ustar00rootroot00000000000000# Changelog ## 4.11.1 (2024-04-06) ### Trivial - Fix ReadTheDocs builds. [Wesley Kendall, 9c7f377] ## 4.11.0 (2023-11-26) ### Feature - Django 5.0 compatibility [Wesley Kendall, 2e1366e] Support and test against Django 5 with psycopg2 and psycopg3. ## 4.10.1 (2023-11-26) ### Trivial - Change docs so that deferred (AFTER) triggers return NULL [Peter Thomassen, 59be2e9] - Fix typo in cookbook (`exclude` instead of `filter`) [Peter Thomassen, 3714c08] - Fix typo in docs model protection example [Peter Thomassen, 00b86af] - Do additional error checking for arguments supplied to `FSM` trigger. [Wesley Kendall, e203834] ## 4.10.0 (2023-11-23) ### Bug - Fix issues updating triggers in non-atomic migrations [Artem Streltsov, 888c4cf] Fields of trigger conditions can now have their types updated in non-atomic migrations. ## 4.9.0 (2023-10-16) ### Feature - Utilities to build trigger conditions based on field changes. [Wesley Kendall, 1471a35] The following utility classes have been added to help trigger authors construct trigger conditions based on changes to underlying fields: - `pgtrigger.AnyChange`: If any supplied fields change, trigger the condition. - `pgtrigger.AnyDontChange`: If any supplied fields don't change, trigger the condition. - `pgtrigger.AllChange`: If all supplied fields change, trigger the condition. - `pgtrigger.AllDontChange`: If all supplied fields don't change, trigger the condition. A section in the docs was added for more examples on their use. ## 4.8.0 (2023-10-09) ### Bug - Create triggers in unmigrated apps when `--run-syncdb` is specified [Daniel Hillier, b762f9d] Triggers are now properly created when running `manage.py migration --run-syncdb` for unmigrated apps. - Recreate triggers when a migration that removed triggers is unapplied [Daniel Hillier, c223910] Reverting a migration of dropped models will now successfully recreate triggers. ### Feature - Add Python 3.12 support and use Mkdocs for documentation [Wesley Kendall, c2b389f] Python 3.12 and Postgres 16 are supported now, along with having revamped docs using Mkdocs and the Material theme. Python 3.7 support was dropped. ### Trivial - Fix minor log formatting issues [Wesley Kendall, 76ab946] ## 4.7.0 (2023-06-08) ### Feature - Added Python 3.11, Django 4.2, and Psycopg 3 support [Wesley Kendall, 27dc243] Python 3.11, Django 4.2, and Psycopg 3 are now supported and tested. Django 2.2 support has been dropped. ## 4.6.0 (2022-10-07) ### Feature - Added ``pgtrigger.Func`` for accessing model properties in function declarations. [Wesley Kendall, 4bd6abf] When describing a trigger in ``Meta``, it's not possible to access model meta properties like ``db_table``. ``pgtrigger.Func`` solves this by exposing ``meta``, ``fields``, and ``columns`` variables that can be used in a format string. See the trigger cookbook in the docs for an example. - Added ``ReadOnly`` trigger for uneditable models and fields [Wesley Kendall, 0a3c162] The ``pgtrigger.ReadOnly`` trigger protects updates on models and takes an optional ``fields`` or ``exclude`` argument to specify which fields are read only. If no arguments are provided, the entire model is read only. ### Trivial - Updated with latest Django template [Wesley Kendall, 84b46f1] ## 4.5.3 (2022-09-19) ### Trivial - Fix typo in documentation [Francisco Couzo, def5432] - Fix issues when using Django's dummy database. [Wesley Kendall, cc1cb95] - Fixed minor documentation typos [Wes Kendall, dc473ff] ## 4.5.2 (2022-09-06) ### Trivial - Add Soft-Delete Model Manager example to docs [Jason Oppel, 3a46ae7] ## 4.5.1 (2022-09-01) ### Trivial - Remove unused migration code and restructure docs [Wes Kendall, a8793fc] - Optimize test suite [Wes Kendall, 863fa93] ## 4.5.0 (2022-08-31) ### Bug - Migrations properly serialize dynamic triggers and add better support for reverse migrations [Wes Kendall, 2eb3014] Triggers that override ``get_func`` or otherwise generate dynamic SQL are properly reflected in migrations when the underlying implementation changes. Along with this, migrations now serialize SQL objects instead of trigger classes, making it more robust when reversing migrations or updating underlying implementations of existing triggers. This change updates the hashes of all triggers and thus re-creates all triggers when running ``makemigrations`` or when manually installing them. ## 4.4.0 (2022-08-27) ### Bug - Pruning/installations fixed for Postgres versions 12 and under. [Wes Kendall, 22d60e9] Paritioned table support introduced a bug in using trigger management commands for Postgres 12 and under. This has been fixed. ### Trivial - Local development enhancements [Wes Kendall, a4d3c9c] ## 4.3.4 (2022-08-26) ### Trivial - Test against Django 4.1 and other CI improvements [Wes Kendall, 813f67e] ## 4.3.3 (2022-08-24) ### Trivial - Fix ReadTheDocs builds [Wes Kendall, 3870643] ## 4.3.2 (2022-08-20) ### Trivial - Fix release note rendering and code formatting changes [Wes Kendall, c834606] ## 4.3.1 (2022-08-19) ### Trivial - Fixed ReadTheDocs builds [Wes Kendall, 2cd0c9e] ## 4.3.0 (2022-08-18) ### Feature - Support for partitioned tables [Wes Kendall, 863b8cb] Installation commands and all core trigger functionality works with partitioned tables. Users will need to run ``python manage.py pgtrigger install`` to upgrade existing trigger installations, otherwise they will appear as outdated when running ``python manage.py pgtrigger ls``. Although outdated triggers will still run successfully for non-partitioned tables, this backwards compatibility will be removed in version 5. ## 4.2.1 (2022-08-18) ### Trivial - Do schema editor patching in ``App.ready()`` instead of module load [Wes Kendall, cce99ce] ## 4.2.0 (2022-08-18) ### Bug - Ensure interoperability with other Postgres backends [Wes Kendall, 1c4f480] ``django-pgtrigger`` trigger migrations are interoperable with Postgis and other Postgres-flavored database backends. ## 4.1.0 (2022-08-17) ### Bug - Allow altering columns from trigger conditions [Wes Kendall, 1178457] Previously if one changed the column type of a field used in a trigger condition, installation would fail because Postgres doesn't allow this. The schema editor was patched to allow for this behavior, dropping and recreating triggers when column types are altered. ## 4.0.1 (2022-08-15) ### Trivial - Fixed minor issue in settings preventing docs from being built [Wes Kendall, 5ad18f8] ## 4.0.0 (2022-08-15) ### Api-Break - Multi-database and registry behavior changed [Wes Kendall, 0663807] There were four key additions around multi-datbase and multi-schema support: 1. When using a multi-datbase environment, ``django-pgtrigger`` now uses ``allow_migrate`` of the router rather than ``db_for_write`` to determine if a trigger should be installed for a model. 2. Management commands were changed to operate on one database at a time to be consistent with Django management commands. Install, uninstall, prune, disable, enable, and ls all take an optional ``--database`` argument. 3. ``pgtrigger.ignore``, ``pgtrigger.constraints``, and ``pgtrigger.schema`` were all updated to take a ``databases`` argument, defaulting to working on every postgres database when used for dynamic runtime behavior. 4. The Postgres function used by ``pgtrigger.ignore`` is always installed in the public schema by default. It is referenced using its fully-qualified path. The schema can be changed with ``settings.PGTRIGGER_SCHEMA``. Setting it to ``None`` will use the schema in the search path. Because of this change, the SQL for installed triggers changes, which causes triggers to appear as outdated when listing them. This can be fixed by running ``manage.py pgtrigger install`` to re-install triggers. Along with this, there were a few other breaking changes to the API: 1. ``pgtrigger.get`` was renamed to ``pgtrigger.registered``. 2. ``manage.py pgtrigger ls`` shows the trigger status followed by the URI in each line of output. type: api-break ### Bug - Reference ``UpdateSearchVector`` trigger columns correctly [Wes Kendall, 7d40894] Columns configured in the ``UpdateSearchVector`` trigger were previously referenced in SQL by their model field name and not their column name. ### Feature - Added multi-schema support [Wes Kendall, 98342f2] ``django-pgtrigger`` didn't handle multiple schemas well, causing some issues for legacy installation commands. Multiple schema support is a first-class citizen. Depending on the database setup, you can now take advantage of the ``--schema`` options for management commands to dynamically set the schema. Docs were added that overview multi-schema support. ### Trivial - Added docs for using triggers in abstract models [Wes Kendall, cd215ac] - Refactored project structure [Wes Kendall, 4d53eef] ## 3.4.0 (2022-08-11) ### Bug - Fixed issues using ``pgtrigger.ignore`` with multiple databases [Wes Kendall, 557f0e1] ``pgtrigger.ignore`` now uses the connection of the database router when ignoring triggers. ### Feature - Add ``pgtrigger.UpdateSearchVector`` to keep search vectors updated [Wes Kendall, 671e8be] When using Django's full-text search, one can keep a ``SearchVectorField`` updated with the relevant document fields by using ``pgtrigger.UpdateSearchVector``. An example was added to the trigger cookbook. - Added ``pgtrigger.constraints`` for runtime configuration of deferrable triggers [Wes Kendall, 4b77b7b] ``pgtrigger.constraints`` mimics Postgres's ``SET CONSTRAINTS`` statement, allowing one to dynamically modify when a deferrable trigger runs. Documentation was also added for deferrable triggers with an example in the cookbook. - Added deferrable triggers [Wes Kendall, fe4f16e] Triggers now have an optional ``timing`` argument. If set, triggers will be created as "CONSTRAINT" triggers that can be deferred. When ``timing`` is set to ``pgtrigger.Immediate``, the trigger will run at the end of a statement. ``pgtrigger.Deferred`` will cause the trigger to run at the end of the transaction. Note that deferrable triggers must have both ``pgtrigger.After`` and ``pgtrigger.Row`` values set for the ``when`` and ``level`` attributes. ## 3.3.0 (2022-08-10) ### Bug - Fixes ignoring triggers with nested transactions [Wes Kendall, d32113d] ``pgtrigger.ignore`` avoids injecting SQL when transactions are in a failed state, allowing for one to use nested transactions while ignoring triggers. - Fixed issue re-installing triggers with different conditions. [Wes Kendall, 68e29d2] Triggers with conditions that change were not successfully re-installed with ``pgtrigger.install``. Note that this only affects legacy installation and not installation with the new migration system. ## 3.2.0 (2022-08-08) ### Feature - Support proxy models on default many-to-many "through" relationships. [Wes Kendall, 4cb0f65] Previously one had to use an unmanaged model to declare triggers on default many-to-many "through" relationships. Users can now define a proxy model on these instead. Support for unmanaged models was dropped. ## 3.1.0 (2022-08-08) ### Api-Break - Integration with Django's migration system. [Wes Kendall, 6916c14] Triggers are fully integrated with Django's migration system, and they are no longer installed at the end of migrations by default. Users instead need to run ``python manage.py makemigrations`` to make trigger migrations for their applications. Triggers for models in third-party apps are declared with proxy models. Triggers for default many-to-many "through" models are declared with unmanaged models. For instructions on upgrading or preserving legacy behavior, see the frequently asked questions of the docs. ### Bug - Fixed issues with proxy models and M2M "through" models. [Wes Kendall, 52aa81f] Proxy models weren't creating migrations, and M2M "through" models are handled by making an unmanaged model that points to the right DB table. ### Feature - Remove dependency on ``django-pgconnection``. [Wes Kendall, af0c908] Users no longer have to wrap ``settings.DATABASES`` with ``django-pgconnection`` in order to use the ``pgtrigger.ignore`` function. ## 2.5.1 (2022-07-31) ### Trivial - Updated with latest Django template, fixing doc builds [Wes Kendall, 4b175a4] ## 2.5.0 (2022-07-30) ### Bug - Ignore non-postgres databases in global operations [Wes Kendall, a1aff5d] Some operations, such as pruning triggers, would iterate over all databases in a project, including non-postgres ones. This fix ignores non-postgres databases. - Fixes transaction leak when using ``pgtrigger.ignore()`` [Wes Kendall, 1501d7e] ``pgtrigger.ignore()`` would continue to ignore triggers until the end of the transaction once the context manager exited. This is now fixed. - Fixed more issues related to custom table names [Wes Kendall, a0e1f6d] Fixes and test cases were added for custom table names that collide with reserved words. - Wrap table names to avoid SQL command conflicts [Zac Miller, 86ee983] Prevents models/tables with names like Order from causing Syntax errors and add PyCharm .idea/ folder to .gitignore ### Feature - Triggers can be specified in model Meta options [Wes Kendall, 5c1cfec] Triggers can now be specified with the ``triggers`` attribute of a model's Meta options. This still works alongside the old method of using ``pgtrigger.register``. ## 2.4.1 (2022-02-24) ### Trivial - Updated with the latest template, dropped 3.6 supported, added Docker-based development [Wes Kendall, 25e0f0d] ## 2.4.0 (2021-08-15) ### Bug - Ensure that generated postgres IDs are lowercase [Wes Kendall, 5c12f66] django-pgtrigger now ensures that generated postgres IDs are lowercase. Postgres IDs are case insensitive, and it django-pgtrigger had issues dealing with names that had a mix of cases. ### Feature - Add the "declare" portion of a trigger as a top-level attribute [Wes Kendall, cd18512] Previously one had to subclass a trigger and override ``get_declare`` in order to change how the "DECLARE" fragment of a trigger was rendered. Users can now provide ``declare`` to the instantiation of a trigger. The documentation was updated to reflect this change. ### Trivial - Fix broken code examples in docs [Wes Kendall, 372719c] ## 2.3.3 (2021-08-15) ### Trivial - Adjusted max length of trigger names to 47 characters [Wes Kendall, 528140f] - Updated to the latest Django app template [Wes Kendall, d2d5328] - Change "Delete" to "Update" in tutorial docs [Rich Rauenzahn, 2839a78] ## 2.3.2 (2021-05-30) ### Trivial - Fixing tags after organization migration [Wes Kendall, 0ba84d2] ## 2.3.1 (2021-05-29) ### Bug - Throw errors on invalid trigger definitions. [Wes Kendall, 28f1329] Previously triggers were installed with a broad try/except in order to ignore errors when installing duplicate triggers. This caused invalid triggers to not be installed with no errors thrown. The code was updated to catch the specific exception for duplicate triggers and allow other trigger errors to surface. A failing test case was added. - Fix for wrong argument supplied at _get_database fn call [arpit o.O, 2f7cea1] ### Trivial - Updated with the latest django app template [Wes Kendall, 9a71227] - Fix incorrect name in example [Simon Willison, 069e05a] ## 2.2.1 (2021-02-23) ### Trivial - Optionally change "other" DB name if set at all [Tómas Árni Jónasson, 5b24058] ## 2.2.0 (2021-02-09) ### Feature - Multiple database support [Wes Kendall, b09ba73] Supports multiple-database functionality in all core functions and management commands. By default, all functions and management commands operate over all databases in a multi-database setup. This behavior can be overridden with the ``--database`` flag. When calling ``manage.py migrate``, only the database being migrated will have relevant triggers installed. This fits into how Django supports multi-database migrations. ## 2.1.0 (2020-10-20) ### Bug - Fixed possibility of duplicate trigger function names [Wes Kendall, b9b1552] django-pgtrigger previously enforced that no model could have the same trigger name, however, the trigger function being called is a globally unique name that needs to be checked. django-pgtrigger now adds a hash to the trigger function and installed trigger name based on the registered model. This prevents a global collision for trigger functions. Note that this change will make it appear like no triggers are installed. Upgrading to this version will involve dropping and re-creating existing triggers. ## 2.0.0 (2020-10-12) ### Api-Break - Trigger management commands [Wes Kendall, be26d33] Adds the ability to manage triggers by name with the ``manage.py pgtrigger`` management command. This change includes the following subcommands: - ``manage.py pgtrigger ls``: List all triggers, their installation status, and whether they are enabled or disabled. - ``manage.py pgtrigger install``: Install triggers. - ``manage.py pgtrigger uninstall``: Uninstall triggers. - ``manage.py pgtrigger enable``: Enable triggers. - ``manage.py pgtrigger disable``: Disable triggers. - ``manage.py pgtrigger prune``: Prune triggers. Because of this change, names are now enforced for every trigger and must be unique for every model. Users that wish to upgrade to this version must now supply a ``name`` keyword argument to their triggers. Docs were updated with references to the new management commands. ## 1.3.0 (2020-07-23) ### Feature - Extend the ``pgtrigger.SoftDelete`` trigger to support more field types. [Wes Kendall, 4dd8cf8] ``pgtrigger.SoftDelete`` takes an optional "value" argument to assign to the soft-deleted attribute upon deletion. This allows for more flexibility in soft-delete models that might, for example, set a ``CharField`` to "inactive". - ``pgtrigger.FSM`` enforces a finite state machine on a field. [Wes Kendall, bd3980e] The ``pgtrigger.FSM`` trigger allows a user to configure a field and a set of valid transitions for the field. An error will be raised if any transitions happen that are not part of the valid transitions list. The docs were updated with an example of how to use ``pgtrigger.FSM``. ### Trivial - Added trigger cookbook example for how to track history and model changes. [Wes Kendall, 114a70a] - Add "versioning" example to trigger cookbook. [Wes Kendall, 842ad5b] - Added trigger cookbook example of freezing a published model [Wes Kendall, 994e9da] ## 1.2.0 (2020-07-23) ### Feature - Added ``pgtrigger.ignore`` for dynamically ignoring triggers. [Wes Kendall, b3557bb] ``pgtrigger.ignore`` can be used to ignore triggers per thread of execution. Docs were updated with examples of how to use ``pgtrigger.ignore`` and how to utilize it to create "official" interfaces. - Allow custom naming of triggers [Wes Kendall, 864d653] Triggers can be given a "name" attribute that is used when generating the trigger and obtaining it from the registry. This will not only make trigger management in the future easier, but it will also make it possible to dynamically ignore specific triggers registered to models. ## 1.1.0 (2020-07-21) ### Feature - Added "Referencing" construct for statement-level triggers. [Wes Kendall, 20d958e] The ``pgtrigger.Referencing`` construct allows one to reference transition tables in statement-level triggers. - Added statement-level triggers. [Wes Kendall, c0cc365] django-pgtrigger now has a "level" construct for specifying row and statement-level triggers. All triggers default to being row-level triggers. ### Trivial - Support the "INSTEAD OF" construct for views on SQL triggers. [Wes Kendall, 79f9d54] - Updated docs and added a quick start section [Wes Kendall, 9ce7b29] ## 1.0.1 (2020-06-29) ### Trivial - Updated README and updated with the latest public django app template. [Wes Kendall, 001ef68] ## 1.0.0 (2020-06-27) ### Api-Break - Initial release of django-pgtrigger. [Wes Kendall, 1f737f0] ``django-pgtrigger`` provides primitives for configuring `Postgres triggers `__ on Django models. Models can be decorated with `pgtrigger.register` and supplied with `pgtrigger.Trigger` objects. These will automatically be installed after migrations. Users can use Django idioms such as ``Q`` and ``F`` objects to declare trigger conditions, alleviating the need to write raw SQL for a large amount of use cases. ``django-pgtrigger`` comes built with some derived triggers for expressing common patterns. For example, `pgtrigger.Protect` can protect operations on a model, such as deletions or updates (e.g. an append-only model). The `pgtrigger.Protect` trigger can even target protecting operations on specific updates of fields (e.g. don't allow updates if ``is_active`` is ``False`` on a model). Another derived trigger, `pgtrigger.SoftDelete`, can soft-delete models by setting a field to ``False`` when a deletion happens on the model. django-pgtrigger-4.11.1/CONTRIBUTING.md000066400000000000000000000052461460427745000173010ustar00rootroot00000000000000# Contributing Guide This project was created using footing. For more information about footing, go to the [footing docs](https://github.com/Opus10/footing). ## Setup Set up your development environment with: git clone git@github.com:Opus10/django-pgtrigger.git cd django-pgtrigger make docker-setup `make docker-setup` will set up a development environment managed by Docker. Install docker [here](https://www.docker.com/get-started) and be sure it is running when executing any of the commands below. If you prefer a native development environment, `make conda-setup` will set up a development environment managed by [Conda](https://conda.io). The database must be ran manually. ## Testing and Validation Run the tests on one Python version with: make test Run the full test suite against all supported Python versions with: make full-test-suite Validate the code with: make lint If your code fails the linter checks, fix common errors with: make lint-fix ## Committing This project uses [git-tidy](https://github.com/Opus10/git-tidy) to produce structured commits with git trailers. Information from commit messages is used to generate release notes and bump the version properly. To do a structured commit with `git-tidy`, do: make tidy-commit All commits in a pull request must be tidy commits that encapsulate a change. Ideally entire features or bug fixes are encapsulated in a single commit. Squash all of your commits into a tidy commit with: make tidy-squash To check if your commits pass linting, do: make tidy-lint Note, the above command lints every commit since branching from master. You can also run `make shell` and run `git tidy` commands inside the docker environment to do other flavors of `git tidy` commands. ## Documentation [Mkdocs Material](https://squidfunk.github.io/mkdocs-material/) documentation can be built with: make docs A shortcut for serving them is: make docs-serve ## Releases and Versioning Anything that is merged into the master branch will be automatically deployed to PyPI. Documentation will be published to a ReadTheDocs at `https://django-pgtrigger.readthedocs.io/`. The following files will be generated and should *not* be edited by a user: - `CHANGELOG.md` - Contains an automatically-generated change log for each release. This project uses [Semantic Versioning](http://semver.org) by analyzing `Type:` trailers on git commit messages (trailers are added when using `git tidy-commit`). In order to bump the minor version, use "feature" or "bug" as the type. In order to bump the major version, use "api-break". The patch version will be updated automatically if none of these tags are present. django-pgtrigger-4.11.1/LICENSE000066400000000000000000000026601460427745000160520ustar00rootroot00000000000000Copyright (c) 2023, Opus 10 All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL OPUS 10 BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. django-pgtrigger-4.11.1/Makefile000066400000000000000000000121731460427745000165050ustar00rootroot00000000000000# Makefile for packaging and testing django-pgtrigger # # This Makefile has the following targets: # # setup - Sets up the development environment # dependencies - Installs dependencies # docs - Build documentation # docs-serve - Serve documentation # lint - Run code linting and static checks # lint-fix - Fix common linting errors # test - Run tests using pytest # full-test-suite - Run full test suite using tox # shell - Run a shell in a virtualenv # docker-teardown - Spin down docker resources OS = $(shell uname -s) PACKAGE_NAME=django-pgtrigger MODULE_NAME=pgtrigger SHELL=bash DATABASE_URL?=postgres://postgres:postgres@db:5432/postgres ifeq (${OS}, Linux) DOCKER_CMD?=sudo docker DOCKER_RUN_ARGS?=-v /home:/home -v $(shell pwd):/code -e EXEC_WRAPPER="" -u "$(shell id -u):$(shell id -g)" -v /etc/passwd:/etc/passwd # The user can be passed to docker exec commands in Linux. # For example, "make shell user=root" for access to apt-get commands user?=$(shell id -u) group?=$(shell id ${user} -u) EXEC_WRAPPER?=$(DOCKER_CMD) exec --user="$(user):$(group)" -it $(PACKAGE_NAME) else ifeq (${OS}, Darwin) DOCKER_CMD?=docker DOCKER_RUN_ARGS?=-v ~/:/home/circleci -v $(shell pwd):/code -e EXEC_WRAPPER="" EXEC_WRAPPER?=$(DOCKER_CMD) exec -it $(PACKAGE_NAME) endif # Docker run mounts the local code directory, SSH (for git), and global git config information DOCKER_RUN_CMD?=$(DOCKER_CMD)-compose run --name $(PACKAGE_NAME) $(DOCKER_RUN_ARGS) -d app # Print usage of main targets when user types "make" or "make help" .PHONY: help help: ifndef run @echo "Please choose one of the following targets: \n"\ " docker-setup: Setup Docker development environment\n"\ " conda-setup: Setup Conda development environment\n"\ " lock: Lock dependencies\n"\ " dependencies: Install dependencies\n"\ " shell: Start a shell\n"\ " test: Run tests\n"\ " tox: Run tests against all versions of Python\n"\ " lint: Run code linting and static checks\n"\ " lint-fix: Fix common linting errors\n"\ " docs: Build documentation\n"\ " docs-serve: Serve documentation\n"\ " docker-teardown: Spin down docker resources\n"\ "\n"\ "View the Makefile for more documentation" @exit 2 else $(EXEC_WRAPPER) $(run) endif # Pull the latest container and start a detached run .PHONY: docker-start docker-start: $(DOCKER_CMD)-compose pull $(DOCKER_RUN_CMD) # Lock dependencies .PHONY: lock lock: $(EXEC_WRAPPER) poetry lock --no-update # Install dependencies .PHONY: dependencies dependencies: mkdir -p .venv $(EXEC_WRAPPER) poetry install --no-ansi .PHONY: multi-db-setup multi-db-setup: -$(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE DATABASE ${MODULE_NAME}_local_other WITH TEMPLATE ${MODULE_NAME}_local" $(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE SCHEMA IF NOT EXISTS \"order\"" $(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE SCHEMA IF NOT EXISTS receipt;" # Set up git configuration .PHONY: git-setup git-setup: $(EXEC_WRAPPER) git tidy --template -o .gitcommit.tpl $(EXEC_WRAPPER) git config --local commit.template .gitcommit.tpl # Sets up the local database .PHONY: db-setup db-setup: -psql postgres -c "CREATE USER postgres;" -psql postgres -c "ALTER USER postgres SUPERUSER;" -psql postgres -c "CREATE DATABASE ${MODULE_NAME}_local OWNER postgres;" -psql postgres -c "GRANT ALL PRIVILEGES ON DATABASE ${MODULE_NAME}_local to postgres;" $(EXEC_WRAPPER) python manage.py migrate # Sets up a conda development environment .PHONY: conda-create conda-create: -conda env create -f environment.yml --force $(EXEC_WRAPPER) poetry config virtualenvs.create false --local # Sets up a Conda development environment .PHONY: conda-setup conda-setup: EXEC_WRAPPER=conda run -n ${PACKAGE_NAME} --no-capture-output conda-setup: conda-create lock dependencies git-setup db-setup # Sets up a Docker development environment .PHONY: docker-setup docker-setup: docker-teardown docker-start lock dependencies git-setup # Spin down docker resources .PHONY: docker-teardown docker-teardown: $(DOCKER_CMD)-compose down --remove-orphans # Run a shell .PHONY: shell shell: $(EXEC_WRAPPER) /bin/bash # Run pytest .PHONY: test test: $(EXEC_WRAPPER) pytest # Run full test suite .PHONY: full-test-suite full-test-suite: $(EXEC_WRAPPER) tox # Build documentation .PHONY: docs docs: $(EXEC_WRAPPER) mkdocs build # Serve documentation .PHONY: docs-serve docs-serve: $(EXEC_WRAPPER) mkdocs serve # Run code linting and static analysis. Ensure docs can be built .PHONY: lint lint: $(EXEC_WRAPPER) black . --check $(EXEC_WRAPPER) ruff check ${MODULE_NAME} $(EXEC_WRAPPER) footing update --check $(EXEC_WRAPPER) bash -c 'make docs' # Fix common linting errors .PHONY: lint-fix lint-fix: $(EXEC_WRAPPER) black . $(EXEC_WRAPPER) ruff check ${MODULE_NAME} --fix # Lint commit messages .PHONY: tidy-lint tidy-lint: $(EXEC_WRAPPER) git tidy-lint origin/master.. # Perform a tidy commit .PHONY: tidy-commit tidy-commit: $(EXEC_WRAPPER) git tidy-commit # Perform a tidy squash .PHONY: tidy-squash tidy-squash: $(EXEC_WRAPPER) git tidy-squash origin/master django-pgtrigger-4.11.1/README.md000066400000000000000000000106701460427745000163240ustar00rootroot00000000000000# django-pgtrigger `django-pgtrigger` helps you write [Postgres triggers](https://www.postgresql.org/docs/current/sql-createtrigger.html) for your Django models. ## Why should I use triggers? Triggers can solve a variety of complex problems more reliably, performantly, and succinctly than application code. For example, * Protecting operations on rows or columns (`pgtrigger.Protect`). * Making read-only models or fields (`pgtrigger.ReadOnly`). * Soft-deleting models (`pgtrigger.SoftDelete`). * Snapshotting and tracking model changes ([django-pghistory](https://django-pghistory.readthedocs.io/)). * Enforcing field transitions (`pgtrigger.FSM`). * Keeping a search vector updated for full-text search (`pgtrigger.UpdateSearchVector`). * Building official interfaces (e.g. enforcing use of `User.objects.create_user` and not `User.objects.create`). * Versioning models, mirroring fields, computing unique model hashes, and the list goes on... All of these examples require no overridden methods, no base models, and no signal handling. ## Quick start Install `django-pgtrigger` with `pip3 install django-pgtrigger` and add `pgtrigger` to `settings.INSTALLED_APPS`. `pgtrigger.Trigger` objects are added to `triggers` in model `Meta`. `django-pgtrigger` comes with several trigger classes, such as `pgtrigger.Protect`. In the following, we're protecting the model from being deleted: ```python import pgtrigger class ProtectedModel(models.Model): """This model cannot be deleted!""" class Meta: triggers = [ pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) ] ``` When migrations are created and executed, `ProtectedModel` will raise an exception anytime a deletion is attempted. Let's extend this example further and only protect deletions on inactive objects. In this example, the trigger conditionally runs when the row being deleted (the `OLD` row in trigger terminology) is still active: ```python import pgtrigger class ProtectedModel(models.Model): """Active object cannot be deleted!""" is_active = models.BooleanField(default=True) class Meta: triggers = [ pgtrigger.Protect( name="protect_deletes", operation=pgtrigger.Delete, condition=pgtrigger.Q(old__is_active=True) ) ] ``` `django-pgtrigger` uses `pgtrigger.Q` and `pgtrigger.F` objects to conditionally execute triggers based on the `OLD` and `NEW` rows. Combining these Django idioms with `pgtrigger.Trigger` objects can solve a wide variety of problems without ever writing SQL. Users, however, can still use raw SQL for complex cases. Triggers are installed like other database objects. Run `python manage.py makemigrations` and `python manage.py migrate` to install triggers. If triggers are new to you, don't worry. The [pgtrigger docs](https://django-pgtrigger.readthedocs.io/) cover triggers in more detail and provide many examples. ## Compatibility `django-pgtrigger` is compatible with Python 3.8 - 3.12, Django 3.2 - 5.0, Psycopg 2 - 3, and Postgres 12 - 16. ## Documentation [View the django-pgtrigger docs here](https://django-pgtrigger.readthedocs.io/) to learn more about: * Trigger basics and motivation for using triggers. * How to use the built-in triggers and how to build custom ones. * Installing triggers on third-party models, many-to-many fields, and other advanced scenarios. * Writing conditional triggers. * Ignoring triggers dynamically and deferring trigger execution. * Multiple database, schema, and partitioning support. * Frequently asked questions, common issues, and upgrading. * The commands, settings, and module. ## Installation Install `django-pgtrigger` with: pip3 install django-pgtrigger After this, add `pgtrigger` to the `INSTALLED_APPS` setting of your Django project. ## Other Material After you've read the docs, check out [this tutorial](https://wesleykendall.github.io/django-pgtrigger-tutorial/) with interactive examples from a Django meetup talk. The [DjangoCon 2021 talk](https://www.youtube.com/watch?v=Tte3d4JjxCk) also breaks down triggers and shows several examples. ## Contributing Guide For information on setting up django-pgtrigger for development and contributing changes, view [CONTRIBUTING.md](CONTRIBUTING.md). ## Primary Authors - [Wes Kendall](https://github.com/wesleykendall) ## Other Contributors - @jzmiller1 - @rrauenza - @ralokt - @adamchainz - @danifus - @kekekekule - @peterthomassen - @pfouque django-pgtrigger-4.11.1/devops.py000066400000000000000000000144011460427745000167130ustar00rootroot00000000000000#!/usr/bin/env python3 """ Devops functions for this package. Includes functions for automated package deployment, changelog generation, and changelog checking. This script is generated by the template at https://github.com/Opus10/public-django-app-template Do not change this script! Any fixes or updates to this script should be made to https://github.com/Opus10/public-django-app-template """ import os import subprocess import sys import tempfile from packaging import version CIRCLECI_ENV_VAR = "CIRCLECI" class Error(Exception): """Base exception for this script""" class NotOnCircleCIError(Error): """Thrown when not running on CircleCI""" def _check_git_version(): """Verify git version""" git_version = _shell_stdout("git --version | rev | cut -f 1 -d' ' | rev") if version.parse(git_version) < version.parse("2.22.0"): raise RuntimeError(f"Must have git version >= 2.22.0 (version = {git_version})") def _shell(cmd, check=True, stdin=None, stdout=None, stderr=None): # pragma: no cover """Runs a subprocess shell with check=True by default""" return subprocess.run(cmd, shell=True, check=check, stdin=stdin, stdout=stdout, stderr=stderr) def _shell_stdout(cmd, check=True): """Runs a shell command and returns stdout""" ret = _shell(cmd, stdout=subprocess.PIPE, check=check) return ret.stdout.decode("utf-8").strip() if ret.stdout else "" def _configure_git(): """Configure git name/email and verify git version""" _check_git_version() _shell('git config --local user.email "wesleykendall@protonmail.com"') _shell('git config --local user.name "Opus 10 Devops"') _shell("git config push.default current") def _find_latest_tag(): return _shell_stdout("git describe --tags --abbrev=0", check=False) def _find_sem_ver_update(): """ Find the semantic version string based on the commit log. Defaults to returning "patch" """ sem_ver = "patch" latest_tag = _find_latest_tag() log_section = f"{latest_tag}..HEAD" if latest_tag else "" cmd = ( f"git log {log_section} --pretty='%(trailers:key=type,valueonly)'" " | grep -q {sem_ver_type}" ) change_types_found = { change_type: _shell(cmd.format(sem_ver_type=change_type), check=False).returncode == 0 for change_type in ["bug", "feature", "api-break"] } if change_types_found["api-break"]: sem_ver = "major" elif change_types_found["bug"] or change_types_found["feature"]: sem_ver = "minor" return sem_ver def _update_package_version(): """Apply semantic versioning to package based on git commit messages""" # Obtain the current version old_version = _shell_stdout("poetry version | rev | cut -f 1 -d' ' | rev") if old_version == "0.0.0": old_version = "" latest_tag = _find_latest_tag() if old_version and version.parse(old_version) != version.parse(latest_tag): raise RuntimeError( f'The latest tag "{latest_tag}" and the current version' f' "{old_version}" do not match.' ) # Find out the sem-ver tag to apply sem_ver = _find_sem_ver_update() _shell(f"poetry version {sem_ver}") # Get the new version new_version = _shell_stdout("poetry version | rev | cut -f 1 -d' ' | rev") if new_version == old_version: raise RuntimeError(f'Version update could not be applied (version = "{old_version}")') return old_version, new_version def _generate_changelog_and_tag(old_version, new_version): """Generates a change log using git-tidy and tags repo""" # Tag the version temporarily so that changelog generation # renders properly _shell(f'git tag -f -a {new_version} -m "Version {new_version}"') # Generate the full changelog and copy it to docs/release_notes.md _shell("git tidy-log > CHANGELOG.md") _shell("cp CHANGELOG.md docs/release_notes.md") # Generate a requirements.txt for readthedocs.org _shell("poetry export --with dev --without-hashes -f requirements.txt > docs/requirements.txt") _shell('echo "." >> docs/requirements.txt') # Add all updated files _shell("git add pyproject.toml CHANGELOG.md docs/release_notes.md docs/requirements.txt") # Use [skip ci] to ensure CircleCI doesnt recursively deploy _shell( 'git commit --no-verify -m "Release version' f' {new_version} [skip ci]" -m "Type: trivial"' ) # Create release notes just for this release so that we can use them in # the commit message with tempfile.NamedTemporaryFile() as commit_msg_file: _shell(f'echo "{new_version}\n" > {commit_msg_file.name}') tidy_log_args = f"^{old_version} HEAD" if old_version else "HEAD" _shell(f"git tidy-log {tidy_log_args} >> {commit_msg_file.name}") # Update the tag so that it includes the latest release messages and # the automated commit _shell(f"git tag -d {new_version}") _shell(f"git tag -f -a {new_version} -F {commit_msg_file.name} --cleanup=whitespace") def _publish_to_pypi(): """ Uses poetry to publish to pypi """ if "PYPI_USERNAME" not in os.environ or "PYPI_PASSWORD" not in os.environ: raise RuntimeError("Must set PYPI_USERNAME and PYPI_PASSWORD env vars") _shell("poetry config http-basic.pypi ${PYPI_USERNAME} ${PYPI_PASSWORD}") _shell("poetry build") _shell("poetry publish -vvv -n", stdout=subprocess.PIPE) def _build_and_push_distribution(): """ Builds and pushes distribution to PyPI, along with pushing the tags back to the repo """ _publish_to_pypi() # Push the code changes after succcessful pypi deploy _shell("git push --follow-tags") def deploy(): """Deploys the package and uploads documentation.""" # Ensure proper environment if not os.environ.get(CIRCLECI_ENV_VAR): # pragma: no cover raise NotOnCircleCIError("Must be on CircleCI to run this script") _configure_git() old_version, new_version = _update_package_version() _generate_changelog_and_tag(old_version, new_version) _build_and_push_distribution() print(f"Deployment complete. Latest version is {new_version}") if __name__ == "__main__": if sys.argv[-1] == "deploy": deploy() else: raise RuntimeError(f'Invalid subcommand "{sys.argv[-1]}"') django-pgtrigger-4.11.1/docker-compose.yml000066400000000000000000000006121460427745000204750ustar00rootroot00000000000000version: "3.3" services: db: image: cimg/postgres:14.4 volumes: - ./.db:/var/lib/postgresql/data environment: - POSTGRES_NAME=postgres - POSTGRES_USER=postgres - POSTGRES_PASSWORD=postgres app: image: opus10/circleci-public-django-app environment: - DATABASE_URL=postgres://postgres:postgres@db:5432/postgres depends_on: - db django-pgtrigger-4.11.1/docs/000077500000000000000000000000001460427745000157715ustar00rootroot00000000000000django-pgtrigger-4.11.1/docs/advanced_db.md000066400000000000000000000072171460427745000205340ustar00rootroot00000000000000# Advanced Database Setups Here we cover details about more advanced database setups that might impact how you use triggers. ## Multiple Databases Triggers are migrated for multiple database just like models. If you define a custom router, triggers will be installed based on `allow_migrate`. See the [the Django docs on multiple databases](https://docs.djangoproject.com/en/4.1/topics/db/multi-db/) for more info. !!! warning If you migrate triggers and afterwards change the behavior of the router's `allow_migrate`, you risk having orphaned triggers installed on tables. The management commands and core installation functions work the same way, targetting an individual database like Django's `migrate` command. Each command can be supplied with a `-d` or `--database` option. For example, `python manage.py pgtrigger install --database other` will install all of the triggers on the `other` database. If `allow_migrate` ignores a particular model for a database, the installation status will show as `UNALLOWED` when using `python manage.py pgtrigger ls`. !!! note If you've configured `settings.INSTALL_ON_MIGRATE`, triggers will be installed for the same database as the `migrate` command. Dynamic runtime functions [pgtrigger.ignore][], [pgtrigger.schema][], and [pgtrigger.constraints][] operate on all postgres databases at once unless the `databases` argument is provided. ## Schemas There are two common ways of using Postgres schemas in Django, both of which work with `django-pgtrigger`: 1. Create a database in `settings.DATABASES` for each schema, configuring the `search_path` in the `OPTIONS`. 2. Use an app like [django-tenants](https://github.com/django-tenants/django-tenants) to dynamically set the `search_path` for a single database. When using the first approach, use the multi-database support detailed in the previous section. For the second approach, `django-pgtrigger` comes with the following functionality to dynamically set the `search_path`: 1. Pass `--schema` (`-s`) arguments for management commands. For example, this sets `search_path` to `myschema,public` and shows trigger installation status relative to those schemas: ``` python manage.py pgtrigger ls -s my_schema -s public ``` 2. Programmatically set the search path with [pgtrigger.schema][]. For example, this sets the `search_path` to `myschema,public`: ```python with pgtrigger.schema("myschema", "public"): # seach_path is set to "myschema,public". Any nested invocations of # pgtrigger.schema will append to the path if not currently # present ``` !!! note If you find yourself wrapping the `django-pgtrigger` API with [pgtrigger.schema][], open an issue and let us know about your use case. We may consider making it a first-class citizen in the API if it's common. The final thing to keep in mind with multi-schema support is that [pgtrigger.ignore][] uses a special Postgres function for ignoring triggers that's installed under the public schema. The function is always referenced with a fully-qualified name. If you don't use the public schema, configure the schema with `settings.PGTRIGGER_SCHEMA`. Setting this to `None` uses a relative path when installing and calling the function. ## Partitions `django-pgtrigger` supports tables that use [Postgres table partitioning](https://www.postgresql.org/docs/current/ddl-partitioning.html) with no additional configuration. !!! note Row-level triggers are only available for partitioned tables in Postgres 13 and above. Triggers cannot be installed or uninstalled on a per-partition basis. Installing a trigger on a partitioned table installs it for all partitions.django-pgtrigger-4.11.1/docs/advanced_installation.md000066400000000000000000000106451460427745000226470ustar00rootroot00000000000000# Advanced Installation ## Third-party models Install triggers on third-party models by declaring them on a proxy model. For example, here we protect Django's `User` model from being deleted: ```python class UserProxy(User): class Meta: proxy = True triggers = [ pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete) ] ``` ## Default many-to-many "through" models Similar to third-party models, we can also install triggers against default many-to-many "through" models by using a proxy model. Here we protect Django `User` group relationships from being deleted: ```python class UserGroupTriggers(User.groups.through): class Meta: proxy = True triggers = [ pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete) ] ``` !!! warning Django doesn't fully support making proxy models from default through relationships. Reversing migrations can sometimes throw `InvalidBases` errors. We recommend creating a custom through model when possible. See the [Django docs on making custom "through" models](https://docs.djangoproject.com/en/4.0/topics/db/models/#extra-fields-on-many-to-many-relationships). ## Declaring triggers in base models Triggers can be declared in an abstract model and inherited. Here is a base model for soft-delete models: ```python class BaseSoftDelete(models.Model): is_active = models.BooleanField(default=True) class Meta: abstract = True triggers = [pgtrigger.SoftDelete(name="soft_delete", field="is_active")] ``` Keep in mind that `Meta` class inheritance follows standard Django convention. If the child model defines a `Meta` class, you will need to inherit the parent's `Meta` class like so: ```python class ChildModel(BaseSoftDelete): class Meta(BaseSoftDelete.Meta): ordering = ["is_active"] ``` ## Programmatically registering triggers Triggers can be registered programmatically with [pgtrigger.register][]. It can be used as a decorator on a model or called like so: ```python # Register a protection trigger for a model pgtrigger.register(pgtrigger.Protect(...))(MyModel) ``` !!! warning Although triggers can be registered programmatically, we don't recommend doing this except for advanced use cases. Registering a trigger to a model of a third-party app will create migrations in that app. This could result in migrations not being added to your codebase, which can result in triggers not being installed. ## Turning off migration integration `django-pgtrigger` patches Django's migration system so that triggers are installed and updated in migrations. If this is undesirable, you can disable the migration integration by setting `settings.PGTRIGGER_MIGRATIONS` to `False`. After this, you are left with two options: 1. Manually install triggers with the commands detailed in the next section. 2. Run trigger installation after every `python manage.py migrate` by setting `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True`. Keep in mind that reversing migrations can cause issues when installing triggers this way. !!! warning There are known issues with installing triggers after migrations that cannot be fixed. For example, reversing migrations can result in trigger installation errors, and race conditions can happen if triggers are installed after the underlying tables have been migrated. ## Manual installation, enabling, and disabling The following commands allow one to manually manage trigger installation and are detailed more in the [Commands](commands.md) section: !!! danger The commands are are global operations. Use these commands with extreme caution, especially if the triggers are managed by migrations. If you need to temporarily ignore a trigger inside your application, see the [Ignoring Trigggers](ignoring_triggers.md) section. * `python manage.py pgtrigger install`: Install triggers * `python manage.py pgtrigger uninstall`: Uninstall triggers * `python manage.py pgtrigger enable`: Enable triggers * `python manage.py pgtrigger disable`: Disable triggers * `python manage.py pgtrigger prune`: Uninstall triggers created by `django-pgtrigger` that are no longer in the codebase. ## Showing installation status Use `python manage.py pgtrigger ls` to see the installation status of individual triggers or all triggers at once. View the [Commands](commands.md) section for descriptions of the different installation states. django-pgtrigger-4.11.1/docs/basics.md000066400000000000000000000131751460427745000175660ustar00rootroot00000000000000# Basics ## The anatomy of a trigger Postgres triggers are database functions written in PL/pgSQL that execute based on events and conditions. The [pgtrigger.Trigger][] object is the base class for all triggers in `django-pgtrigger`. Its attributes mirror the syntax required for [making a Postgres trigger](https://www.postgresql.org/docs/current/sql-createtrigger.html). Here are the most common attributes you'll use: * **name** The identifying name of trigger. Is unique for every model and must be less than 48 characters. * **operation** The table operation that fires a trigger. Operations are [pgtrigger.Update][], [pgtrigger.Insert][], [pgtrigger.Delete][], [pgtrigger.Truncate][], or [pgtrigger.UpdateOf][]. They can be `OR`ed together (e.g. `pgtrigger.Insert | pgtrigger.Update`) to configure triggers on a combination of operations. !!! note [pgtrigger.UpdateOf][] fires when columns appear in an `UPDATE` statement. It will not fire if other triggers update the columns. See the notes in the [Postgres docs](https://www.postgresql.org/docs/12/sql-createtrigger.html) for more information. !!! note Some conditions cannot be combined. For example, [pgtrigger.UpdateOf][] cannot be combined with other operations. * **when** When the trigger should run in relation to the operation. [pgtrigger.Before][] executes the trigger before the operation, and vice versa for [pgtrigger.After][]. [pgtrigger.InsteadOf][] is used for SQL views. !!! note [pgtrigger.Before][] and [pgtrigger.After][] can be used on SQL views under some circumstances. See [the Postgres docs](https://www.postgresql.org/docs/12/sql-createtrigger.html) for a breakdown. * **condition** *(optional)* Conditionally execute the trigger based on the `OLD` or `NEW` rows. Use [pgtrigger.Q][] and [pgtrigger.F][] objects for constructing `WHERE` clauses with the `OLD` and `NEW` rows. See [the conditional triggers section](conditional.md) for more details and other utilities. !!! note Be sure to familiarize yourself with `OLD` and `NEW` rows when writing conditions by consulting the [Postgres docs](https://www.postgresql.org/docs/current/plpgsql-trigger.html). For example, `OLD` is always `NULL` in [pgtrigger.Insert][] triggers. Here are attributes you'll need when writing more complex triggers. * **func** The raw PL/pgSQL function that is executed. !!! note This is *not* the entire declared trigger function, but rather the snippet of PL/pgSQL that is nested in the `DECLARE ... BEGIN ... END` portion of the trigger. * **declare** *(optional)* Define additional variable declarations as a list of `(variable_name, variable_type)` tuples. For example `declare=[('my_var_1', 'BOOLEAN'), ('my_var_2', 'JSONB')]`. * **level** *(optional, default=pgtrigger.Row)* Configures the trigger to fire once for every row ([pgtrigger.Row][]) or once for every statement ([pgtrigger.Statement][]). * **referencing** *(optional)* References the `OLD` and `NEW` rows as transition tables in statement-level triggers. For example, `pgtrigger.Referencing(old='old_table_name', new='new_table_name')` will make an `old_table_name` and `new_table_name` table available as transition tables. See [this StackExchange answer](https://dba.stackexchange.com/a/177468) for additional details, and see the [Cookbook](cookbook.md) for an example. * **timing** *(optional)* Create a deferrable `CONSTRAINT` trigger when set. Use [pgtrigger.Immediate][] to execute the trigger at the end of a statement and [pgtrigger.Deferred][] to execute it at the end of a transaction. !!! note Deferrable triggers must have the `level` set to [pgtrigger.Row][] and `when` set to [pgtrigger.After][]. ## Defining and installing triggers Triggers are defined in the `triggers` attribute of the model `Meta` class. For example, this trigger protects the model from being deleted: ```python from django.db import models import pgtrigger class CannotDelete(models.Model): class Meta: triggers = [ pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete) ] ``` Triggers are installed by first running `python manage.py makemigrations` and then `python manage.py migrate`. If you'd like to install a trigger on a model of a third-party app, see the [Advanced Installation](advanced_installation.md) section. This section also covers how you can manually install, enable, and disable triggers globally. ## The advantages over signals and model methods There are three key advantages to using triggers over implementing the logic in a [Django signal handler](https://docs.djangoproject.com/en/4.1/topics/signals/) or by overriding model methods: 1. **Reliability**: Unlike Python code, triggers run alongside queries in the database, ensuring that nothing falls through the cracks. On the other hand, signals and model methods can provide a false sense of security. For example, signals aren't fired for `bulk_create`, and custom model methods aren't called in data migrations by default. Third party apps that bypass the ORM will also not work reliably. 2. **Complexity**: Complexity can balloon when trying to override models, managers, or querysets to accomplish the same logic a trigger can support. Even simple routines such as conditionally running code based on a changed field are difficult to implement correctly and prone to race conditions. 3. **Performance**: Triggers can perform SQL queries without needing to do expensive round trips to the database to fetch data. This can be a major performance enhancement for routines like history tracking or data denormalization. django-pgtrigger-4.11.1/docs/commands.md000066400000000000000000000055301460427745000201170ustar00rootroot00000000000000# Commands `django-pgtrigger` comes with the `python manage.py pgtrigger` command, which has several subcommands that are described below. ## ls List all triggers managed by `django-pgtrigger`. **Options** [uris ...] Trigger URIs to list. -d, --database List triggers on this database. -s, --schema Use this schema as the search path. Can be provided multiple times. **Ouput** The following installation status markers are displayed: - `INSTALLED`: The trigger is installed and up to date - `OUTDATED`: The trigger is installed, but it has not been migrated to the current version. - `UNINSTALLED`: The trigger is not installed. - `PRUNE`: A trigger is no longer in the codebase and still installed. - `UNALLOWED`: Trigger installation is not allowed for this database. Only applicable in a multi-database environment. Note that every installed trigger, including ones that will be pruned, will show whether they are enabled or disabled. Disabled triggers are installed but do not run. ## install Install triggers. If no arguments are provided, all triggers are installed and orphaned triggers are pruned. **Options** [uris ...] Trigger URIs to install. -d, --database Install triggers on this database. -s, --schema Use this schema as the search path. Can be provided multiple times. ## uninstall Uninstall triggers. If no arguments are provided, all triggers are uninstalled and orphaned triggers will be pruned. !!! danger Running `uninstall` will globally uninstall triggers. If you need to temporarily ignore a trigger, see the [Ignoring Execution](ignoring_triggers.md) section. **Options** [uris ...] Trigger URIs to uninstall. -d, --database Uninstall triggers on this database. -s, --schema Use this schema as the search path. Can be provided multiple times. ## enable Enable triggers. **Options** [uris ...] Trigger URIs to enable. -d, --database Enable triggers on this database. -s, --schema Use this schema as the search path. Can be provided multiple times. ## disable Disable triggers. !!! danger Running `disable` will globally disable the execution of triggers. If you need to temporarily ignore a trigger, see the [Ignoring Execution](ignoring_triggers.md) section. **Options** [uris ...] Trigger URIs to enable. -d, --database Disable triggers on this database. -s, --schema Use this schema as the search path. Can be provided multiple times. ## prune Uninstall any triggers managed by `django-pgtrigger` that are no longer in the codebase. !!! note Pruning happens automatically when doing `python manage.py pgtrigger install` or `python manage.py pgtrigger uninstall`. **Options** -d, --database Prune triggers on this database. -s, --schema Use this schema as the search path. Can be provided multiple times. django-pgtrigger-4.11.1/docs/conditional.md000066400000000000000000000135771460427745000206330ustar00rootroot00000000000000# Conditional Triggers Here's a brief guide on the many ways one can create conditional row-level triggers using `django-pgtrigger`. We start with the high-level utilities and make our way towards lower-level ones. Remember, row-level triggers have access to either the `NEW` row being inserted or updated, or the `OLD` row being updated or deleted. These variables are copies of the row and can be used in the conditions of the trigger. Updates triggers, for example, can conditionally execute based on both the values of the row before the update (the `OLD` row) and the row after the modification (the `NEW` row). !!! note Consult the [Postgres docs](https://www.postgresql.org/docs/current/plpgsql-trigger.html) for more information on these variables. We'll first dive into update-based triggers and the utilities `django-pgtrigger` provides for detecting changes on models. ## Field Change Conditions The following conditions are provided out of the box for conveniently expressing field changes: - [pgtrigger.AnyChange][]: If any supplied fields change, trigger the condition. - [pgtrigger.AnyDontChange][]: If any supplied fields don't change, trigger the condition. - [pgtrigger.AllChange][]: If all supplied fields change, trigger the condition. - [pgtrigger.AllDontChange][]: If all supplied fields don't change, trigger the condition. For example, let's use this model: ```python class MyModel(models.Model): int_field = models.IntegerField() char_field = models.CharField(null=True) dt_field = models.DateTimeField(auto_now=True) ``` The following trigger will raise an exception if an update happens that doesn't change a single field. ```python pgtrigger.Protect(operation=pgtrigger.Update, condition=~pgtrigger.AnyChange()) ``` This is also equivalent to doing: ```python pgtrigger.Protect(operation=pgtrigger.Update, condition=pgtrigger.AllDontChange()) ``` !!! remember If no arguments are provided to any of these utilities, they operate over all fields on the model. Let's say we want to block updates if any changes happen to the int or char fields: ```python pgtrigger.Protect( operation=pgtrigger.Update, condition=pgtrigger.AnyChange("int_field", "char_field") ) ``` This is how the [pgtrigger.ReadOnly][] trigger is implemented. Underneath the hood, the condition looks like this: ```sql OLD.int_field IS DISTINCT FROM NEW.int_field OR OLD.char_field IS DISTINCT FROM NEW.char_field ``` !!! note `IS DISTINCT FROM` helps ensure that nullable objects are correctly compared since null never equals null. One can also exclude fields in the condition. For example, this condition fires only if every field but the excluded ones change: ```python pgtrigger.AllChange(exclude=["dt_field"]) ``` To automatically ignore `auto_now` and `auto_now_add` datetime fields, do: ```python # Fires on changes to any fields except auto_now and auto_now_add fields pgtrigger.AnyChange(exclude_auto=True) ``` !!! remember Included and excluded fields can both be supplied. Included fields are used as the initial fields before `exclude` and `exclude_auto` remove fields. ## Targetting old and new fields with `pgtrigger.Q` and `pgtrigger.F` We previously covered various change condition utilties. These only operate over update-based triggers. One can create fine-grained trigger conditions for all operations by using [pgtrigger.Q][] and [pgtrigger.F][] constructs. For example, let's use our model from above again: ```python class MyModel(models.Model): int_field = models.IntegerField() char_field = models.CharField(null=True) dt_field = models.DateTimeField(auto_now=True) ``` The following condition will fire whenever the old row has an `int_field` greater than zero: ```python pgtrigger.Q(old__int_field__gt=0) ``` Similar to Django's syntax, the [pgtrigger.Q][] object can reference the `old__` and `new__` row. The [pgtrigger.F][] object can also be used for doing comparisons. For example, here we only fire when the `int_field` of the old row is greater than the int field of the new row. ```python pgtrigger.Q(old__int_field__gt=pgtrigger.F("new__int_field")) ``` Remember to use the `__df` operator for `DISTINCT FROM` and `__ndf` for `NOT DISTINCT FROM`. This is generally the behavior one desires when checking for changes of nullable fields. For example, this condition fires only when `char_field` is not distinct from its old version. ```python pgtrigger.Q(old__char_field__ndf=pgtrigger.F("new__char_field")) ``` !!! note The above is equivalent to doing `pgtrigger.AnyDontChange("char_field")` Finally, [pgtrigger.Q][] objects can be negated, and-ed, and or-ed just like django `Q` objects: ```python pgtrigger.Q(old__char_field__ndf=pgtrigger.F("new__char_field")) | pgtrigger.Q(new__int_field=0) ``` ## Raw SQL conditions The utilities above should handle the majority of use cases when expressing conditions; however, users can still express raw SQL with [pgtrigger.Condition][]. For example, here's a condition that fires if any field changes: ```python pgtrigger.Condition("OLD.* IS DISTINCT FROM NEW.*") ``` !!! note The above is equivalent to `pgtrigger.AnyChange()`. ## Conditions across multiple models Remember, trigger conditions can only be expressed based on the rows of the current model. One can't, for example, reference a joined foreign key's value. This isn't a limitation in `django-pgtrigger` but rather a limitation in the database. Custom conditional logic than spans multiple tables must happen inside the function as an `if/else` type of statement. [See this resource](https://www.postgresqltutorial.com/postgresql-plpgsql/plpgsql-if-else-statements/) for an example of what this looks like. Currently `django-pgtrigger` doesn't handle this case out of the box; one must write raw SQL to express `if/else` logic. Reach out [to the author](mailto:wesleykendall@protonmail.com) if you have a need for this to be more easily expressed in `django-pgtrigger`. django-pgtrigger-4.11.1/docs/contributing.md000066400000000000000000000052461460427745000210310ustar00rootroot00000000000000# Contributing Guide This project was created using footing. For more information about footing, go to the [footing docs](https://github.com/Opus10/footing). ## Setup Set up your development environment with: git clone git@github.com:Opus10/django-pgtrigger.git cd django-pgtrigger make docker-setup `make docker-setup` will set up a development environment managed by Docker. Install docker [here](https://www.docker.com/get-started) and be sure it is running when executing any of the commands below. If you prefer a native development environment, `make conda-setup` will set up a development environment managed by [Conda](https://conda.io). The database must be ran manually. ## Testing and Validation Run the tests on one Python version with: make test Run the full test suite against all supported Python versions with: make full-test-suite Validate the code with: make lint If your code fails the linter checks, fix common errors with: make lint-fix ## Committing This project uses [git-tidy](https://github.com/Opus10/git-tidy) to produce structured commits with git trailers. Information from commit messages is used to generate release notes and bump the version properly. To do a structured commit with `git-tidy`, do: make tidy-commit All commits in a pull request must be tidy commits that encapsulate a change. Ideally entire features or bug fixes are encapsulated in a single commit. Squash all of your commits into a tidy commit with: make tidy-squash To check if your commits pass linting, do: make tidy-lint Note, the above command lints every commit since branching from master. You can also run `make shell` and run `git tidy` commands inside the docker environment to do other flavors of `git tidy` commands. ## Documentation [Mkdocs Material](https://squidfunk.github.io/mkdocs-material/) documentation can be built with: make docs A shortcut for serving them is: make docs-serve ## Releases and Versioning Anything that is merged into the master branch will be automatically deployed to PyPI. Documentation will be published to a ReadTheDocs at `https://django-pgtrigger.readthedocs.io/`. The following files will be generated and should *not* be edited by a user: - `CHANGELOG.md` - Contains an automatically-generated change log for each release. This project uses [Semantic Versioning](http://semver.org) by analyzing `Type:` trailers on git commit messages (trailers are added when using `git tidy-commit`). In order to bump the minor version, use "feature" or "bug" as the type. In order to bump the major version, use "api-break". The patch version will be updated automatically if none of these tags are present. django-pgtrigger-4.11.1/docs/cookbook.md000066400000000000000000000410271460427745000201250ustar00rootroot00000000000000# Trigger Cookbook Here we provide examples using the built-in triggers of `django-pgtrigger` and triggers that require raw SQL. While most examples are practical application examples, some exist to illustrate a starting point of how one can use triggers for more complex cases. ## Read-only models and fields Ensure a set of fields on a model are read-only with [pgtrigger.ReadOnly][]. This trigger takes one of the following optional arguments: * **fields**: A list of read-only fields. * **exclude**: Fields to exclude. All other fields will be read-only. If no arguments are provided, the entire model will be read-only. For example, here we have a model with a read-only `created_at` timestamp. Any changes to this field will result in an exception: ```python class TimestampedModel(models.Model): """Ensure created_at timestamp is read only""" created_at = models.DateTimeField(auto_now_add=True) editable_value = models.TextField() class Meta: triggers = [ pgtrigger.ReadOnly( name="read_only_created_at", fields=["created_at"] ) ] ``` !!! note A condition is automatically generated and cannot be supplied to [pgtrigger.ReadOnly][]. ## Validating field transitions Similar to how one can configure a finite state machine on a model field with [django-fsm](https://github.com/viewflow/django-fsm), the [pgtrigger.FSM][] trigger ensures that a field can only do configured transitions. The example below ensures that the `status` field of a model can only transition from "unpublished" to "published" and from "published" to "inactive". Any other updates on the `status` field will result in an exception: ```python class MyModel(models.Model): """Enforce valid transitions of the "status" field""" status = models.CharField(max_length=32, default="unpublished") class Meta: triggers = [ pgtrigger.FSM( name="status_fsm", field="status", transitions=[ ("unpublished", "published"), ("published", "inactive"), ] ) ] ``` !!! note [pgtrigger.FSM][] can be supplied with a `condition` to only enforce the state transitions when a condition is met. !!! note The [pgtrigger.FSM][] trigger only works for non-null `CharField` fields. ## Mirroring a field Here we create a [pgtrigger.Trigger][] that runs before an update or insert to keep two fields in sync. ```python class MyModel(models.Model): int_field = models.IntField() in_sync_int = models.IntField(help_text="Stays the same as int_field") class Meta: triggers = [ pgtrigger.Trigger( name="keep_in_sync", operation=pgtrigger.Update | pgtrigger.Insert, when=pgtrigger.Before, func="NEW.in_sync_int = NEW.int_field; RETURN NEW;", ) ] ``` !!! note When writing a [pgtrigger.Before][] trigger, be sure to return the row over which the operation should be applied. Returning no row will prevent the operation from happening. See [the Postgres docs](https://www.postgresql.org/docs/current/plpgsql-trigger.html) for more information. ## Soft-delete models Rather than fully deleting a model, one can "soft-delete" it by setting a field to an inactive state. The [pgtrigger.SoftDelete][] takes the field as an argument and a value to set on delete, which defaults to `False`. For example: ```python class SoftDeleteModel(models.Model): # This field is set to false when the model is deleted is_active = models.BooleanField(default=True) class Meta: triggers = [ pgtrigger.SoftDelete(name="soft_delete", field="is_active") ] m = SoftDeleteModel.objects.create() m.delete() # The model will still exist, but it is no longer active assert not SoftDeleteModel.objects.get().is_active ``` [pgtrigger.SoftDelete][] works with nullable `CharField`, `IntField`, and `BooleanField` fields. Let's extend this example with the assumption that we're mostly interested in active objects and don't want to see soft-deleted items when pulling data from QuerySets. The addition of the custom Model Manager below along with changes to SoftDeleteModel ensures that QuerySets using `objects` (e.g., `Foo.objects.all()`) will automatically filter out soft-deleted items and only return active objects. ```python class NotDeletedManager(models.Manager): """Automatically filters out soft deleted objects from QuerySets""" def get_queryset(self): return super().get_queryset().exclude(is_active=False) class SoftDeleteModel(models.Model): # This field is set to false when the model is deleted is_active = models.BooleanField(default=True) all_objects = models.ModelManager() # access deleted objects too objects = NotDeletedManager() # filter out soft deleted objects class Meta: triggers = [ pgtrigger.SoftDelete(name="soft_delete", field="is_active") ] # Return both active/deleted data via Django Admin, dumpdata, etc. default_manager_name = "all_objects" ``` We can still get to both the deleted and active items by using the `all_objects` Model Manager like so: ```python MyModelName.all_objects.all() ``` Please also note the addition of `default_manager_name` to Meta. This attribute configures Django to use `all_objects` (i.e. the built-in `models.Manager` in this case) as its default Model Manager internally. This allows access to soft deleted objects via the Django Admin Page, dumpdata, and other Django internals. !!! note When using [pgtrigger.SoftDelete][], keep in mind that Django will still perform cascading operations. For example, a foreign key to `SoftDeleteModel` with `on_delete=models.CASCADE` will be deleted by Django when the parent model is soft deleted. ## Append-only models Here we create an append-only model using the [pgtrigger.Protect][] trigger for the `UPDATE` and `DELETE` operations: ```python class AppendOnlyModel(models.Model): my_field = models.IntField() class Meta: triggers = [ pgtrigger.Protect( name="append_only", operation=(pgtrigger.Update | pgtrigger.Delete) ) ] ``` !!! note This table can still be truncated. Although Django doesn't support this database operation, one can still protect against this by adding the[pgtrigger.Truncate][] operation. ## Official interfaces [pgtrigger.Protect][] triggers can be combined with [pgtrigger.ignore][] to create "official" interfaces for doing database operations in your application. Here we protect inserts on our custom `User` model and force engineers to use `create_user` to create them: ```python @pgtrigger.ignore("my_app.User:protect_inserts") def create_user(**kwargs): return User.objects.create(**kwargs) class User(models.Model): class Meta: triggers = [ pgtrigger.Protect(name="protect_inserts", operation=pgtrigger.Insert) ] ``` We've ignored the protection trigger for the `create_user` function by providing its full path to [pgtrigger.ignore][]. All users must use `create_user` to create `User` objects, otherwise an exception will happen. !!! note Ignoring triggers is covered in the [Ignoring Execution](ignoring_triggers.md) section. ## Conditional deletion protection Here we only allow models with a `deletable` flag to be deleted: ```python class DynamicDeletionModel(models.Model): is_deletable = models.BooleanField(default=False) class Meta: triggers = [ pgtrigger.Protect( name="protect_deletes", operation=pgtrigger.Delete, condition=pgtrigger.Q(old__is_deletable=False) ) ] ``` ## Redundant update protection Here we raise an error when someone makes a redundant update to the database: ```python class RedundantUpdateModel(models.Model): redundant_field1 = models.BooleanField(default=False) redundant_field2 = models.BooleanField(default=False) class Meta: triggers = [ pgtrigger.Protect( name="protect_redundant_updates", operation=pgtrigger.Update, condition=pgtrigger.AnyDontChange() ) ] ``` ## Freezing published models Here we have a `Post` model with a `status` field. We only allow edits to this model when its `status` is not "published". ```python class Post(models.Model): status = models.CharField(default="unpublished") content = models.TextField() class Meta: triggers = [ pgtrigger.Protect( name="freeze_published_model", operation=pgtrigger.Update, condition=pgtrigger.Q(old__status="published") ) ] ``` We extend this example by allowing a published model to be able to be edited, but only when transitioning it to an "inactive" status. ```python class Post(models.Model): status = models.CharField(default="unpublished") content = models.TextField() class Meta: triggers = [ pgtrigger.Protect( name="freeze_published_model_allow_deactivation", operation=pgtrigger.Update, condition=( pgtrigger.Q(old__status="published") & ~pgtrigger.Q(new__status="inactive") ) ] ``` ## Versioned models Here we write a [pgtrigger.Trigger][] trigger that dynamically increments a model version before an update is applied. We do this with two triggers: 1. One that protects updating the `version` field of the model. We don't want people tampering with this field. 2. A trigger that increments the `version` of the `NEW` row before an update is applied. We ignore updating the version if there are no changes. ```python class Versioned(models.Model): """ This model is versioned. The "version" field is incremented on every update, and users cannot directly update the "version" field. """ version = models.IntegerField(default=0) char_field = models.CharField(max_length=32) class Meta: triggers = [ # Protect anyone editing the version field directly pgtrigger.Protect( name="protect_updates", operation=pgtrigger.Update, condition=pgtrigger.AnyChange("version") ), # Increment the version field on changes pgtrigger.Trigger( name="versioning", when=pgtrigger.Before, operation=pgtrigger.Update, func="NEW.version = NEW.version + 1; RETURN NEW;", # Don't increment version on redundant updates. condition=pgtrigger.AnyChange() ) ] ``` !!! note The return value from [pgtrigger.Before][] triggers is what Postgres uses when executing the operation. `NULL` values tell Postgres to ignore the operation entirely. ## Keeping a search vector updated When using [Postgres full-text search](https://docs.djangoproject.com/en/4.0/ref/contrib/postgres/search/), keep `django.contrib.postgres.search.SearchVectorField` fields updated using [pgtrigger.UpdateSearchVector][] Here we keep a search vector updated based on changes to the `title` and `body` fields of a model: ```python class DocumentModel(models.Model): search_vector = SearchVectorField() title = models.CharField(max_length=128) body = models.TextField() class Meta: triggers = [ pgtrigger.UpdateSearchVector( name="add_title_and_body_to_vector", vector_field="search_vector", document_fields=["title", "body"], ) ] ``` [pgtrigger.UpdateSearchVector][] uses Postgres's `tsvector_update_trigger` to keep the search vector updated. See the [Postgres docs](https://www.postgresql.org/docs/current/textsearch-features.html#TEXTSEARCH-UPDATE-TRIGGERS) for more info. !!! note [pgtrigger.UpdateSearchVector][] triggers are incompatible with [pgtrigger.ignore][] and will raise a `RuntimeError` if used. ## Statement-level triggers and transition tables So far most of the examples have been for triggers that fire once per row. Statement-level triggers are fired once per statement and allow more flexibility and performance tuning for some scenarios. Instead of `OLD` and `NEW` rows, statement-level triggers can use "transition tables" to access temporary tables of old and new rows. One can use the [pgtrigger.Referencing][] construct to configure this. See [this StackExchange example](https://dba.stackexchange.com/a/177468) for more explanations about transition tables. !!! note Transition tables are only available in Postgres 10 and up. Here we have a history model that keeps track of changes to a field in the tracked model. We create a statement-level trigger that logs the old and new fields to the history model: ```python class HistoryModel(models.Model): old_field = models.CharField(max_length=32) new_field = models.CharField(max_length=32) class TrackedModel(models.Model): field = models.CharField(max_length=32) class Meta: triggers = [ pgtrigger.Trigger( name="track_history", level=pgtrigger.Statement, when=pgtrigger.After, operation=pgtrigger.Update, referencing=pgtrigger.Referencing(old="old_values", new="new_values"), func=f""" INSERT INTO {HistoryModel._meta.db_table}(old_field, new_field) SELECT old_values.field AS old_field, new_values.field AS new_field FROM old_values JOIN new_values ON old_values.id = new_values.id; RETURN NULL; """, ) ] ``` With this statement-level trigger, we have the benefit that only one additional query is performed, even on bulk inserts to the tracked model. Here's some example code to illustrate what the results look like. ```python TrackedModel.objects.bulk_create([LoggedModel(field='old'), LoggedModel(field='old')]) # Update all fields to "new" TrackedModel.objects.update(field='new') # The trigger should have tracked these updates print(HistoryModel.values('old_field', 'new_field')) >>> [{ 'old_field': 'old', 'new_field': 'new' }, { 'old_field': 'old', 'new_field': 'new' }] ``` !!! note When considering use of statment-level triggers for performance reasons, keep in mind that additional queries executed by triggers do not involve expensive round-trips from the application. A less-complex row-level trigger may be worth the performance cost. ## Ensuring child models exist Consider a `Profile` model that has a `OneToOne` to Django's `User` model: ```python class Profile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE) ``` We use a "deferrable" trigger to ensure a `Profile` exists for every `User`. Deferrable triggers can execute at the end of a transaction, allowing us to check for the existence of a `Profile` after creating a `User`. This example is continued in the [Deferrable Triggers](deferrable.md) section. ## Tracking model history and changes Check out [django-pghistory](https://django-pghistory.readthedocs.io) to snapshot model changes and attach context from your application (e.g. the authenticated user) to the event. ## Model properties in the func When writing triggers in the model `Meta`, it's not possible to access properties of the model like the database name or fields. [pgtrigger.Func][] solves this by exposing the following variables you can use in a template string: * **meta**: The `._meta` of the model. * **fields**: The fields of the model, accessible as attributes. * **columns**: The field columns. `columns.field_name` will return the database column of the `field_name` field. For example, say that we have the following model and trigger: ```python class MyModel(models.Model): text_field = models.TextField() class Meta: triggers = [ pgtrigger.Trigger( func=pgtrigger.Func( """ # This is only pseudocode SELECT {columns.text_field} FROM {meta.db_table}; """ ) ) ] ``` Above the [pgtrigger.Func][] references the table name of the model and the column of `text_field`. !!! note Remember to escape curly bracket characters when using [pgtrigger.Func][]. django-pgtrigger-4.11.1/docs/css/000077500000000000000000000000001460427745000165615ustar00rootroot00000000000000django-pgtrigger-4.11.1/docs/css/mkdocs-material.css000066400000000000000000000001441460427745000223460ustar00rootroot00000000000000.md-typeset__table { min-width: 100%; } .md-typeset table:not([class]) { display: table; }django-pgtrigger-4.11.1/docs/css/mkdocstrings.css000066400000000000000000000001651460427745000220040ustar00rootroot00000000000000div.doc-contents:not(.first) { padding-left: 25px; border-left: .05rem solid var(--md-typeset-table-color); }django-pgtrigger-4.11.1/docs/deferrable.md000066400000000000000000000072771460427745000204230ustar00rootroot00000000000000# Deferrable Triggers Triggers are "deferrable" if their execution can be postponed until the end of the transaction. This behavior can be desirable for certain situations. For example, here we ensure a `Profile` model always exists for every `User`: ```python class Profile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE) class UserProxy(User): class Meta: proxy = True triggers = [ pgtrigger.Trigger( name="profile_for_every_user", when=pgtrigger.After, operation=pgtrigger.Insert, timing=pgtrigger.Deferred, func=f""" IF NOT EXISTS (SELECT FROM {Profile._meta.db_table} WHERE user_id = NEW.id) THEN RAISE EXCEPTION 'Profile does not exist for user %', NEW.id; END IF; RETURN NULL; """ ) ] ``` This trigger ensures that any creation of a `User` will fail if a `Profile` does not exist. Note that we must create them both in a transaction: ```python # This will succeed since the user has a profile when # the transaction completes with transaction.atomic(): user = User.objects.create() Profile.objects.create(user=user) # This will fail since it is not in a transaction user = User.objects.create() Profile.objects.create(user=user) ``` ## Ignoring deferrable triggers Deferrable triggers can be ignored, but remember that they execute at the very end of a transaction. If [pgtrigger.ignore][] does not wrap the transaction, the deferrable trigger will not be ignored. Here is a correct way of ignoring the deferrable trigger from the initial example: ```python with pgtrigger.ignore("my_app.UserProxy:profile_for_every_user"): # Use durable=True, otherwise we may be wrapped in a parent # transaction with transaction.atomic(durable=True): # We no longer need a profile for a user... User.objects.create(...) ``` Here's an example of code that will fail: ```python with transaction.atomic(): # This ignore does nothing for this trigger. `pgtrigger.ignore` # will no longer be in effect by the time the trigger runs at the # end of the transaction. with pgtrigger.ignore("my_app.UserProxy:profile_for_every_user"): # The trigger will raise an exception User.objects.create(...) ``` ## Adjusting runtime behavior When a deferrable trigger is declared, the `timing` attribute can be adjusted at runtime using [pgtrigger.constraints][]. This function mimics Postgres's `SET CONSTRAINTS` statement. Check [the Postgres docs for more info](https://www.postgresql.org/docs/current/sql-set-constraints.html). [pgtrigger.constraints][] takes the new timing value and a list of trigger URIs over which to apply the value. The value is in effect until the end of the transaction. Let's take our original example. We can set the trigger to immediately run, causing it to throw an error: ```python with transaction.atomic(): user = User.objects.create(...) # Make the deferrable trigger fire immediately. This will cause an exception # because a profile has not yet been created for the user pgtrigger.constraints(pgtrigger.Immediate, "auth.User:profile_for_every_user") ``` Keep in mind that the constraint settings stay in effect until the end of the transaction. If a parent transaction wraps our code, timing overrides will persist. !!! tip You can do the opposite of our example, creating triggers with `timing=pgtrigger.Immediate` and deferring their execution dynamically. !!! note In a multi-schema setup, only triggers in the schema search path will be overridden with [pgtrigger.constraints][]. django-pgtrigger-4.11.1/docs/faq.md000066400000000000000000000063021460427745000170630ustar00rootroot00000000000000# Frequently Asked Questions ## Triggers are scary, don't you think? Enforcing data integrity in application code is what you should be afraid of. Triggers, like uniqueness constraints, are a blessing that help solidify your data modeling. The best way to ensure triggers are behaving correctly is to: 1. Write tests for them. 2. Run `python manage.py pgtrigger ls` to verify they are installed in production. ## Why not implement logic with signals or model methods? See [this section from the docs](basics.md#advantages_of_triggers) ## Why can't I join foreign keys in conditions? Postgres only allows columns of the changed rows to be used in trigger conditions, and data cannot be joined. That's why, for example, one cannot write a condition like `Q(old__foreign_key__field="value")`. Conditional logic like this must be performed in the trigger function itself by manually joining data. ## How do I test triggers? Manipulate your models in your test suite and verify the expected result happens. If you've turned off migrations for your test suite, call [pgtrigger.install][] after the database is set up or set `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True` to ensure triggers are installed for your tests. !!! warning Be sure the `settings.PGTRIGGER_INSTALL_ON_MIGRATE` setting is isolated to your test suite, otherwise it could cause unexpected problems in production when reversing migrations. When a failure-based trigger like [pgtrigger.Protect][] fails, a `django.db.utils.InternalError` is raised and can be caught in your test function. Keep in mind that this will place the current transaction in an errored state. If you'd like to test triggers like this without needing to use a transaction test case, wrap the assertion in `transaction.atomic`. This is illustrated below with pseudocode using [pytest-django](https://pytest-django.readthedocs.io/en/latest/). ```python from djagno.db import transaction from django.db.utils import InternalError import pytest @pytest.mark.django_db def test_protection_trigger(): with pytest.raises(InternalError, match="Cannot delete"), transaction.atomic(): # Try to delete protected model # Since the above assertion is wrapped in transaction.atomic, we will still # have a valid transaction in our test case here ``` ## How do I disable triggers? Use [pgtrigger.ignore][] if you need to temporarily ignore triggers in your application (see [Ignoring Execution](ignoring_triggers.md)). Only use the core installation commands if you want to disable or uninstall triggers globally across the **entire application** (see the [Commands](commands.md) section). ## How can I reference the table name in a custom function? When writing a trigger in `Meta`, it's not possible to access other model meta properties like `db_table`. Use [pgtrigger.Func][] to get around this. See [this example from the cookbook](cookbook.md#func_model_properties). ## How can I contact the author? The primary author, Wes Kendall, loves to talk to users. Message him at [wesleykendall@protonmail.com](mailto:wesleykendall@protonmail.com) for any feedback. Any questions, feature requests, or bugs should be reported as [issues here](https://github.com/Opus10/django-pgtrigger/issues). django-pgtrigger-4.11.1/docs/further_reading.md000066400000000000000000000004401460427745000214610ustar00rootroot00000000000000# Talks and Tutorials Check out [this tutorial](https://wesleykendall.github.io/django-pgtrigger-tutorial/) with interactive examples from a Django meetup talk. The [DjangoCon 2021 talk](https://www.youtube.com/watch?v=Tte3d4JjxCk/) also breaks down triggers and shows several examples.django-pgtrigger-4.11.1/docs/ignoring_triggers.md000066400000000000000000000047141460427745000220430ustar00rootroot00000000000000# Ignoring Execution [pgtrigger.ignore][] is a decorator and context manager that temporarily ignores triggers for a single thread of execution. Here we ignore deletion protection: ```python class CannotDelete(models.Model): class Meta: triggers = [ pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) ] # Bypass deletion protection with pgtrigger.ignore("my_app.CannotDelete:protect_deletes"): CannotDelete.objects.all().delete() ``` As shown above, [pgtrigger.ignore][] takes a trigger URI that is formatted as `{app_label}.{model_name}:{trigger_name}`. Multiple trigger URIs can be given to [pgtrigger.ignore][], and [pgtrigger.ignore][] can be nested. If no trigger URIs are provided to [pgtrigger.ignore][], all triggers are ignored. !!! tip See all trigger URIs with `python manage.py pgtrigger ls` By default, [pgtrigger.ignore][] configures ignoring triggers on every postgres database. This can be changed with the `databases` argument. !!! important Remember, [pgtrigger.ignore][] ignores the execution of a trigger on a per-thread basis. This is very different from disabling a trigger or uninstalling a trigger globally. See the [Advanced Installation](advanced_installation.md) section for more details on managing the installation of triggers. ## Transaction notes [pgtrigger.ignore][] flushes a temporary Postgres variable at the end of the context manager if running in a transaction. This could cause issues for transactions that are in an errored state. Here's an example of when this case happens: ```python with transaction.atomic(): with ptrigger.ignore("app.Model:protect_inserts"): try: # Create an object that raises an integrity error app.Model.objects.create(unique_key="duplicate") except IntegrityError: # Ignore the integrity error pass # When we exit the context manager here, it will try to flush # a local Postgres variable. This causes an error because the transaction # is in an errored state. ``` If you're ignoring triggers and handling database errors, there are two ways to prevent this error from happening: 1. Wrap the outer transaction in `with pgtrigger.ignore.session():` so that the session is completed outside the transaction. 2. Wrap the inner `try/except` in `with transaction.atomic():` so that the errored part of the transaction is rolled back before the [pgtrigger.ignore][] context manager ends. django-pgtrigger-4.11.1/docs/index.md000066400000000000000000000106321460427745000174240ustar00rootroot00000000000000# django-pgtrigger `django-pgtrigger` helps you write [Postgres triggers](https://www.postgresql.org/docs/current/sql-createtrigger.html) for your Django models. ## Why should I use triggers? Triggers can solve a variety of complex problems more reliably, performantly, and succinctly than application code. For example, * Protecting operations on rows or columns ([pgtrigger.Protect][]). * Making read-only models or fields ([pgtrigger.ReadOnly][]). * Soft-deleting models ([pgtrigger.SoftDelete][]). * Snapshotting and tracking model changes ([django-pghistory](https://django-pghistory.readthedocs.io)). * Enforcing field transitions ([pgtrigger.FSM][]). * Keeping a search vector updated for full-text search ([pgtrigger.UpdateSearchVector][]). * Building official interfaces (e.g. enforcing use of `User.objects.create_user` and not `User.objects.create`). * Versioning models, mirroring fields, computing unique model hashes, and the list goes on... All of these examples require no overridden methods, no base models, and no signal handling. ## Quick start Install `django-pgtrigger` with `pip3 install django-pgtrigger` and add `pgtrigger` to `settings.INSTALLED_APPS`. [pgtrigger.Trigger][] objects are added to `triggers` in model `Meta`. `django-pgtrigger` comes with several trigger classes, such as [pgtrigger.Protect][]. In the following, we're protecting the model from being deleted: ```python class ProtectedModel(models.Model): """This model cannot be deleted!""" class Meta: triggers = [ pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete) ] ``` When migrations are created and executed, `ProtectedModel` will raise an exception anytime a deletion is attempted. Let's extend this example further and only protect deletions on active objects. In this example, the trigger conditionally runs when the row being deleted (the `OLD` row in trigger terminology) is still active: ```python class ProtectedModel(models.Model): """Active object cannot be deleted!""" is_active = models.BooleanField(default=True) class Meta: triggers = [ pgtrigger.Protect( name='protect_deletes', operation=pgtrigger.Delete, condition=pgtrigger.Q(old__is_active=True) ) ] ``` `django-pgtrigger` uses [pgtrigger.Q][] and [pgtrigger.F][] objects to conditionally execute triggers based on the `OLD` and `NEW` rows. Combining these Django idioms with [pgtrigger.Trigger][] objects can solve a wide variety of problems without ever writing SQL. Users, however, can still use raw SQL for complex cases. Triggers are installed like other database objects. Run `python manage.py makemigrations` and `python manage.py migrate` to install triggers. ## Compatibility `django-pgtrigger` is compatible with Python 3.8 - 3.12, Django 3.2 - 5.0, Psycopg 2 - 3, and Postgres 12 - 16. ## Next steps We recommend everyone first read: * [Installation](installation.md) for how to install the library. * [Basics](basics.md) for an overview and motivation. After this, there are several usage guides: * [Cookbook](cookbook.md) for trigger examples. * [Conditional Triggers](conditional.md) for all the ways one can create conditional triggers. * [Ignoring Execution](ignoring_triggers.md) for dynamically ignoring triggers. * [Deferrable Triggers](deferrable.md) for deferring trigger execution. * [Advanced Installation](advanced_installation.md) for installing triggers on third-party models, many-to-many models, programmatic installation, and more. * [Advanced Database Setups](advanced_db.md) for notes on how triggers work in multi-database, mutli-schema, or partitioned database setups. There's additional help in these sections: * [FAQ](faq.md) for common questions like testing and disabling triggers. * [Troubleshooting](troubleshooting.md) for advice on known issues. * [Upgrading](upgrading.md) for upgrading to new major versions. * [Further Reading](further_reading.md) for additional reading and videos. Finally, core API information exists in these sections: * [Settings](settings.md) for all available Django settings. * [Commands](commands.md) for using the `python manage.py pgtrigger` management commands. * [Module](module.md) for documentation of the `pgtrigger` module. * [Release Notes](release_notes.md) for information about every release. * [Contributing Guide](contributing.md) for details on contributing to the codebase. django-pgtrigger-4.11.1/docs/installation.md000066400000000000000000000002501460427745000210110ustar00rootroot00000000000000# Installation Install `django-pgtrigger` with: pip3 install django-pgtrigger After this, add `pgtrigger` to the `INSTALLED_APPS` setting of your Django project.django-pgtrigger-4.11.1/docs/module.md000066400000000000000000000022101460427745000175730ustar00rootroot00000000000000# Module Below are the core classes and functions of the `pgtrigger` module. ## Level clause :::pgtrigger.Row :::pgtrigger.Statement ## When clause :::pgtrigger.After :::pgtrigger.Before :::pgtrigger.InsteadOf ## Operation clause :::pgtrigger.Insert :::pgtrigger.Update :::pgtrigger.Delete :::pgtrigger.Truncate :::pgtrigger.UpdateOf ## Referencing clause :::pgtrigger.Referencing ## Timing clause :::pgtrigger.Immediate :::pgtrigger.Deferred ## Func clause :::pgtrigger.Func ## Conditions :::pgtrigger.Condition :::pgtrigger.AnyChange :::pgtrigger.AnyDontChange :::pgtrigger.AllChange :::pgtrigger.AllDontChange :::pgtrigger.Q :::pgtrigger.F :::pgtrigger.IsDistinctFrom :::pgtrigger.IsNotDistinctFrom ## Triggers :::pgtrigger.Trigger :::pgtrigger.Protect :::pgtrigger.ReadOnly :::pgtrigger.SoftDelete :::pgtrigger.FSM :::pgtrigger.UpdateSearchVector ## Runtime execution :::pgtrigger.constraints :::pgtrigger.ignore :::pgtrigger.schema ## Registry :::pgtrigger.register :::pgtrigger.registered ## Installation :::pgtrigger.install :::pgtrigger.uninstall :::pgtrigger.enable :::pgtrigger.disable :::pgtrigger.prunable :::pgtrigger.prune django-pgtrigger-4.11.1/docs/release_notes.md000066400000000000000000000530711460427745000211510ustar00rootroot00000000000000# Changelog ## 4.11.1 (2024-04-06) ### Trivial - Fix ReadTheDocs builds. [Wesley Kendall, 9c7f377] ## 4.11.0 (2023-11-26) ### Feature - Django 5.0 compatibility [Wesley Kendall, 2e1366e] Support and test against Django 5 with psycopg2 and psycopg3. ## 4.10.1 (2023-11-26) ### Trivial - Change docs so that deferred (AFTER) triggers return NULL [Peter Thomassen, 59be2e9] - Fix typo in cookbook (`exclude` instead of `filter`) [Peter Thomassen, 3714c08] - Fix typo in docs model protection example [Peter Thomassen, 00b86af] - Do additional error checking for arguments supplied to `FSM` trigger. [Wesley Kendall, e203834] ## 4.10.0 (2023-11-23) ### Bug - Fix issues updating triggers in non-atomic migrations [Artem Streltsov, 888c4cf] Fields of trigger conditions can now have their types updated in non-atomic migrations. ## 4.9.0 (2023-10-16) ### Feature - Utilities to build trigger conditions based on field changes. [Wesley Kendall, 1471a35] The following utility classes have been added to help trigger authors construct trigger conditions based on changes to underlying fields: - `pgtrigger.AnyChange`: If any supplied fields change, trigger the condition. - `pgtrigger.AnyDontChange`: If any supplied fields don't change, trigger the condition. - `pgtrigger.AllChange`: If all supplied fields change, trigger the condition. - `pgtrigger.AllDontChange`: If all supplied fields don't change, trigger the condition. A section in the docs was added for more examples on their use. ## 4.8.0 (2023-10-09) ### Bug - Create triggers in unmigrated apps when `--run-syncdb` is specified [Daniel Hillier, b762f9d] Triggers are now properly created when running `manage.py migration --run-syncdb` for unmigrated apps. - Recreate triggers when a migration that removed triggers is unapplied [Daniel Hillier, c223910] Reverting a migration of dropped models will now successfully recreate triggers. ### Feature - Add Python 3.12 support and use Mkdocs for documentation [Wesley Kendall, c2b389f] Python 3.12 and Postgres 16 are supported now, along with having revamped docs using Mkdocs and the Material theme. Python 3.7 support was dropped. ### Trivial - Fix minor log formatting issues [Wesley Kendall, 76ab946] ## 4.7.0 (2023-06-08) ### Feature - Added Python 3.11, Django 4.2, and Psycopg 3 support [Wesley Kendall, 27dc243] Python 3.11, Django 4.2, and Psycopg 3 are now supported and tested. Django 2.2 support has been dropped. ## 4.6.0 (2022-10-07) ### Feature - Added ``pgtrigger.Func`` for accessing model properties in function declarations. [Wesley Kendall, 4bd6abf] When describing a trigger in ``Meta``, it's not possible to access model meta properties like ``db_table``. ``pgtrigger.Func`` solves this by exposing ``meta``, ``fields``, and ``columns`` variables that can be used in a format string. See the trigger cookbook in the docs for an example. - Added ``ReadOnly`` trigger for uneditable models and fields [Wesley Kendall, 0a3c162] The ``pgtrigger.ReadOnly`` trigger protects updates on models and takes an optional ``fields`` or ``exclude`` argument to specify which fields are read only. If no arguments are provided, the entire model is read only. ### Trivial - Updated with latest Django template [Wesley Kendall, 84b46f1] ## 4.5.3 (2022-09-19) ### Trivial - Fix typo in documentation [Francisco Couzo, def5432] - Fix issues when using Django's dummy database. [Wesley Kendall, cc1cb95] - Fixed minor documentation typos [Wes Kendall, dc473ff] ## 4.5.2 (2022-09-06) ### Trivial - Add Soft-Delete Model Manager example to docs [Jason Oppel, 3a46ae7] ## 4.5.1 (2022-09-01) ### Trivial - Remove unused migration code and restructure docs [Wes Kendall, a8793fc] - Optimize test suite [Wes Kendall, 863fa93] ## 4.5.0 (2022-08-31) ### Bug - Migrations properly serialize dynamic triggers and add better support for reverse migrations [Wes Kendall, 2eb3014] Triggers that override ``get_func`` or otherwise generate dynamic SQL are properly reflected in migrations when the underlying implementation changes. Along with this, migrations now serialize SQL objects instead of trigger classes, making it more robust when reversing migrations or updating underlying implementations of existing triggers. This change updates the hashes of all triggers and thus re-creates all triggers when running ``makemigrations`` or when manually installing them. ## 4.4.0 (2022-08-27) ### Bug - Pruning/installations fixed for Postgres versions 12 and under. [Wes Kendall, 22d60e9] Paritioned table support introduced a bug in using trigger management commands for Postgres 12 and under. This has been fixed. ### Trivial - Local development enhancements [Wes Kendall, a4d3c9c] ## 4.3.4 (2022-08-26) ### Trivial - Test against Django 4.1 and other CI improvements [Wes Kendall, 813f67e] ## 4.3.3 (2022-08-24) ### Trivial - Fix ReadTheDocs builds [Wes Kendall, 3870643] ## 4.3.2 (2022-08-20) ### Trivial - Fix release note rendering and code formatting changes [Wes Kendall, c834606] ## 4.3.1 (2022-08-19) ### Trivial - Fixed ReadTheDocs builds [Wes Kendall, 2cd0c9e] ## 4.3.0 (2022-08-18) ### Feature - Support for partitioned tables [Wes Kendall, 863b8cb] Installation commands and all core trigger functionality works with partitioned tables. Users will need to run ``python manage.py pgtrigger install`` to upgrade existing trigger installations, otherwise they will appear as outdated when running ``python manage.py pgtrigger ls``. Although outdated triggers will still run successfully for non-partitioned tables, this backwards compatibility will be removed in version 5. ## 4.2.1 (2022-08-18) ### Trivial - Do schema editor patching in ``App.ready()`` instead of module load [Wes Kendall, cce99ce] ## 4.2.0 (2022-08-18) ### Bug - Ensure interoperability with other Postgres backends [Wes Kendall, 1c4f480] ``django-pgtrigger`` trigger migrations are interoperable with Postgis and other Postgres-flavored database backends. ## 4.1.0 (2022-08-17) ### Bug - Allow altering columns from trigger conditions [Wes Kendall, 1178457] Previously if one changed the column type of a field used in a trigger condition, installation would fail because Postgres doesn't allow this. The schema editor was patched to allow for this behavior, dropping and recreating triggers when column types are altered. ## 4.0.1 (2022-08-15) ### Trivial - Fixed minor issue in settings preventing docs from being built [Wes Kendall, 5ad18f8] ## 4.0.0 (2022-08-15) ### Api-Break - Multi-database and registry behavior changed [Wes Kendall, 0663807] There were four key additions around multi-datbase and multi-schema support: 1. When using a multi-datbase environment, ``django-pgtrigger`` now uses ``allow_migrate`` of the router rather than ``db_for_write`` to determine if a trigger should be installed for a model. 2. Management commands were changed to operate on one database at a time to be consistent with Django management commands. Install, uninstall, prune, disable, enable, and ls all take an optional ``--database`` argument. 3. ``pgtrigger.ignore``, ``pgtrigger.constraints``, and ``pgtrigger.schema`` were all updated to take a ``databases`` argument, defaulting to working on every postgres database when used for dynamic runtime behavior. 4. The Postgres function used by ``pgtrigger.ignore`` is always installed in the public schema by default. It is referenced using its fully-qualified path. The schema can be changed with ``settings.PGTRIGGER_SCHEMA``. Setting it to ``None`` will use the schema in the search path. Because of this change, the SQL for installed triggers changes, which causes triggers to appear as outdated when listing them. This can be fixed by running ``manage.py pgtrigger install`` to re-install triggers. Along with this, there were a few other breaking changes to the API: 1. ``pgtrigger.get`` was renamed to ``pgtrigger.registered``. 2. ``manage.py pgtrigger ls`` shows the trigger status followed by the URI in each line of output. type: api-break ### Bug - Reference ``UpdateSearchVector`` trigger columns correctly [Wes Kendall, 7d40894] Columns configured in the ``UpdateSearchVector`` trigger were previously referenced in SQL by their model field name and not their column name. ### Feature - Added multi-schema support [Wes Kendall, 98342f2] ``django-pgtrigger`` didn't handle multiple schemas well, causing some issues for legacy installation commands. Multiple schema support is a first-class citizen. Depending on the database setup, you can now take advantage of the ``--schema`` options for management commands to dynamically set the schema. Docs were added that overview multi-schema support. ### Trivial - Added docs for using triggers in abstract models [Wes Kendall, cd215ac] - Refactored project structure [Wes Kendall, 4d53eef] ## 3.4.0 (2022-08-11) ### Bug - Fixed issues using ``pgtrigger.ignore`` with multiple databases [Wes Kendall, 557f0e1] ``pgtrigger.ignore`` now uses the connection of the database router when ignoring triggers. ### Feature - Add ``pgtrigger.UpdateSearchVector`` to keep search vectors updated [Wes Kendall, 671e8be] When using Django's full-text search, one can keep a ``SearchVectorField`` updated with the relevant document fields by using ``pgtrigger.UpdateSearchVector``. An example was added to the trigger cookbook. - Added ``pgtrigger.constraints`` for runtime configuration of deferrable triggers [Wes Kendall, 4b77b7b] ``pgtrigger.constraints`` mimics Postgres's ``SET CONSTRAINTS`` statement, allowing one to dynamically modify when a deferrable trigger runs. Documentation was also added for deferrable triggers with an example in the cookbook. - Added deferrable triggers [Wes Kendall, fe4f16e] Triggers now have an optional ``timing`` argument. If set, triggers will be created as "CONSTRAINT" triggers that can be deferred. When ``timing`` is set to ``pgtrigger.Immediate``, the trigger will run at the end of a statement. ``pgtrigger.Deferred`` will cause the trigger to run at the end of the transaction. Note that deferrable triggers must have both ``pgtrigger.After`` and ``pgtrigger.Row`` values set for the ``when`` and ``level`` attributes. ## 3.3.0 (2022-08-10) ### Bug - Fixes ignoring triggers with nested transactions [Wes Kendall, d32113d] ``pgtrigger.ignore`` avoids injecting SQL when transactions are in a failed state, allowing for one to use nested transactions while ignoring triggers. - Fixed issue re-installing triggers with different conditions. [Wes Kendall, 68e29d2] Triggers with conditions that change were not successfully re-installed with ``pgtrigger.install``. Note that this only affects legacy installation and not installation with the new migration system. ## 3.2.0 (2022-08-08) ### Feature - Support proxy models on default many-to-many "through" relationships. [Wes Kendall, 4cb0f65] Previously one had to use an unmanaged model to declare triggers on default many-to-many "through" relationships. Users can now define a proxy model on these instead. Support for unmanaged models was dropped. ## 3.1.0 (2022-08-08) ### Api-Break - Integration with Django's migration system. [Wes Kendall, 6916c14] Triggers are fully integrated with Django's migration system, and they are no longer installed at the end of migrations by default. Users instead need to run ``python manage.py makemigrations`` to make trigger migrations for their applications. Triggers for models in third-party apps are declared with proxy models. Triggers for default many-to-many "through" models are declared with unmanaged models. For instructions on upgrading or preserving legacy behavior, see the frequently asked questions of the docs. ### Bug - Fixed issues with proxy models and M2M "through" models. [Wes Kendall, 52aa81f] Proxy models weren't creating migrations, and M2M "through" models are handled by making an unmanaged model that points to the right DB table. ### Feature - Remove dependency on ``django-pgconnection``. [Wes Kendall, af0c908] Users no longer have to wrap ``settings.DATABASES`` with ``django-pgconnection`` in order to use the ``pgtrigger.ignore`` function. ## 2.5.1 (2022-07-31) ### Trivial - Updated with latest Django template, fixing doc builds [Wes Kendall, 4b175a4] ## 2.5.0 (2022-07-30) ### Bug - Ignore non-postgres databases in global operations [Wes Kendall, a1aff5d] Some operations, such as pruning triggers, would iterate over all databases in a project, including non-postgres ones. This fix ignores non-postgres databases. - Fixes transaction leak when using ``pgtrigger.ignore()`` [Wes Kendall, 1501d7e] ``pgtrigger.ignore()`` would continue to ignore triggers until the end of the transaction once the context manager exited. This is now fixed. - Fixed more issues related to custom table names [Wes Kendall, a0e1f6d] Fixes and test cases were added for custom table names that collide with reserved words. - Wrap table names to avoid SQL command conflicts [Zac Miller, 86ee983] Prevents models/tables with names like Order from causing Syntax errors and add PyCharm .idea/ folder to .gitignore ### Feature - Triggers can be specified in model Meta options [Wes Kendall, 5c1cfec] Triggers can now be specified with the ``triggers`` attribute of a model's Meta options. This still works alongside the old method of using ``pgtrigger.register``. ## 2.4.1 (2022-02-24) ### Trivial - Updated with the latest template, dropped 3.6 supported, added Docker-based development [Wes Kendall, 25e0f0d] ## 2.4.0 (2021-08-15) ### Bug - Ensure that generated postgres IDs are lowercase [Wes Kendall, 5c12f66] django-pgtrigger now ensures that generated postgres IDs are lowercase. Postgres IDs are case insensitive, and it django-pgtrigger had issues dealing with names that had a mix of cases. ### Feature - Add the "declare" portion of a trigger as a top-level attribute [Wes Kendall, cd18512] Previously one had to subclass a trigger and override ``get_declare`` in order to change how the "DECLARE" fragment of a trigger was rendered. Users can now provide ``declare`` to the instantiation of a trigger. The documentation was updated to reflect this change. ### Trivial - Fix broken code examples in docs [Wes Kendall, 372719c] ## 2.3.3 (2021-08-15) ### Trivial - Adjusted max length of trigger names to 47 characters [Wes Kendall, 528140f] - Updated to the latest Django app template [Wes Kendall, d2d5328] - Change "Delete" to "Update" in tutorial docs [Rich Rauenzahn, 2839a78] ## 2.3.2 (2021-05-30) ### Trivial - Fixing tags after organization migration [Wes Kendall, 0ba84d2] ## 2.3.1 (2021-05-29) ### Bug - Throw errors on invalid trigger definitions. [Wes Kendall, 28f1329] Previously triggers were installed with a broad try/except in order to ignore errors when installing duplicate triggers. This caused invalid triggers to not be installed with no errors thrown. The code was updated to catch the specific exception for duplicate triggers and allow other trigger errors to surface. A failing test case was added. - Fix for wrong argument supplied at _get_database fn call [arpit o.O, 2f7cea1] ### Trivial - Updated with the latest django app template [Wes Kendall, 9a71227] - Fix incorrect name in example [Simon Willison, 069e05a] ## 2.2.1 (2021-02-23) ### Trivial - Optionally change "other" DB name if set at all [Tómas Árni Jónasson, 5b24058] ## 2.2.0 (2021-02-09) ### Feature - Multiple database support [Wes Kendall, b09ba73] Supports multiple-database functionality in all core functions and management commands. By default, all functions and management commands operate over all databases in a multi-database setup. This behavior can be overridden with the ``--database`` flag. When calling ``manage.py migrate``, only the database being migrated will have relevant triggers installed. This fits into how Django supports multi-database migrations. ## 2.1.0 (2020-10-20) ### Bug - Fixed possibility of duplicate trigger function names [Wes Kendall, b9b1552] django-pgtrigger previously enforced that no model could have the same trigger name, however, the trigger function being called is a globally unique name that needs to be checked. django-pgtrigger now adds a hash to the trigger function and installed trigger name based on the registered model. This prevents a global collision for trigger functions. Note that this change will make it appear like no triggers are installed. Upgrading to this version will involve dropping and re-creating existing triggers. ## 2.0.0 (2020-10-12) ### Api-Break - Trigger management commands [Wes Kendall, be26d33] Adds the ability to manage triggers by name with the ``manage.py pgtrigger`` management command. This change includes the following subcommands: - ``manage.py pgtrigger ls``: List all triggers, their installation status, and whether they are enabled or disabled. - ``manage.py pgtrigger install``: Install triggers. - ``manage.py pgtrigger uninstall``: Uninstall triggers. - ``manage.py pgtrigger enable``: Enable triggers. - ``manage.py pgtrigger disable``: Disable triggers. - ``manage.py pgtrigger prune``: Prune triggers. Because of this change, names are now enforced for every trigger and must be unique for every model. Users that wish to upgrade to this version must now supply a ``name`` keyword argument to their triggers. Docs were updated with references to the new management commands. ## 1.3.0 (2020-07-23) ### Feature - Extend the ``pgtrigger.SoftDelete`` trigger to support more field types. [Wes Kendall, 4dd8cf8] ``pgtrigger.SoftDelete`` takes an optional "value" argument to assign to the soft-deleted attribute upon deletion. This allows for more flexibility in soft-delete models that might, for example, set a ``CharField`` to "inactive". - ``pgtrigger.FSM`` enforces a finite state machine on a field. [Wes Kendall, bd3980e] The ``pgtrigger.FSM`` trigger allows a user to configure a field and a set of valid transitions for the field. An error will be raised if any transitions happen that are not part of the valid transitions list. The docs were updated with an example of how to use ``pgtrigger.FSM``. ### Trivial - Added trigger cookbook example for how to track history and model changes. [Wes Kendall, 114a70a] - Add "versioning" example to trigger cookbook. [Wes Kendall, 842ad5b] - Added trigger cookbook example of freezing a published model [Wes Kendall, 994e9da] ## 1.2.0 (2020-07-23) ### Feature - Added ``pgtrigger.ignore`` for dynamically ignoring triggers. [Wes Kendall, b3557bb] ``pgtrigger.ignore`` can be used to ignore triggers per thread of execution. Docs were updated with examples of how to use ``pgtrigger.ignore`` and how to utilize it to create "official" interfaces. - Allow custom naming of triggers [Wes Kendall, 864d653] Triggers can be given a "name" attribute that is used when generating the trigger and obtaining it from the registry. This will not only make trigger management in the future easier, but it will also make it possible to dynamically ignore specific triggers registered to models. ## 1.1.0 (2020-07-21) ### Feature - Added "Referencing" construct for statement-level triggers. [Wes Kendall, 20d958e] The ``pgtrigger.Referencing`` construct allows one to reference transition tables in statement-level triggers. - Added statement-level triggers. [Wes Kendall, c0cc365] django-pgtrigger now has a "level" construct for specifying row and statement-level triggers. All triggers default to being row-level triggers. ### Trivial - Support the "INSTEAD OF" construct for views on SQL triggers. [Wes Kendall, 79f9d54] - Updated docs and added a quick start section [Wes Kendall, 9ce7b29] ## 1.0.1 (2020-06-29) ### Trivial - Updated README and updated with the latest public django app template. [Wes Kendall, 001ef68] ## 1.0.0 (2020-06-27) ### Api-Break - Initial release of django-pgtrigger. [Wes Kendall, 1f737f0] ``django-pgtrigger`` provides primitives for configuring `Postgres triggers `__ on Django models. Models can be decorated with `pgtrigger.register` and supplied with `pgtrigger.Trigger` objects. These will automatically be installed after migrations. Users can use Django idioms such as ``Q`` and ``F`` objects to declare trigger conditions, alleviating the need to write raw SQL for a large amount of use cases. ``django-pgtrigger`` comes built with some derived triggers for expressing common patterns. For example, `pgtrigger.Protect` can protect operations on a model, such as deletions or updates (e.g. an append-only model). The `pgtrigger.Protect` trigger can even target protecting operations on specific updates of fields (e.g. don't allow updates if ``is_active`` is ``False`` on a model). Another derived trigger, `pgtrigger.SoftDelete`, can soft-delete models by setting a field to ``False`` when a deletion happens on the model. django-pgtrigger-4.11.1/docs/requirements.txt000066400000000000000000000144021460427745000212560ustar00rootroot00000000000000ansimarkup==2.0.0 ; python_full_version >= "3.8.0" and python_version < "4" arrow==1.3.0 ; python_version >= "3.8" and python_version < "4" asgiref==3.7.2 ; python_version >= "3.8" and python_version < "4" babel==2.13.0 ; python_version >= "3.8" and python_version < "4" backports-zoneinfo==0.2.1 ; python_version >= "3.8" and python_version < "3.9" binaryornot==0.4.4 ; python_full_version >= "3.8.0" and python_version < "4" black==23.9.1 ; python_version >= "3.8" and python_version < "4" cachetools==5.3.1 ; python_version >= "3.8" and python_version < "4" certifi==2023.7.22 ; python_version >= "3.8" and python_version < "4" chardet==5.2.0 ; python_version >= "3.8" and python_version < "4" charset-normalizer==3.3.0 ; python_version >= "3.8" and python_version < "4" click==8.1.7 ; python_version >= "3.8" and python_version < "4" colorama==0.4.6 ; python_version >= "3.8" and python_version < "4" cookiecutter==1.7.3 ; python_full_version >= "3.8.0" and python_version < "4" coverage[toml]==7.3.2 ; python_version >= "3.8" and python_version < "4" distlib==0.3.7 ; python_version >= "3.8" and python_version < "4" dj-database-url==2.1.0 ; python_full_version >= "3.8.0" and python_version < "4" django-dynamic-fixture==4.0.1 ; python_full_version >= "3.8.0" and python_version < "4" django-postgres-extra==2.0.4 ; python_full_version >= "3.8.0" and python_version < "4" django==4.2.6 ; python_version >= "3.8" and python_version < "4" exceptiongroup==1.1.3 ; python_full_version >= "3.8.0" and python_version < "3.11" filelock==3.12.4 ; python_version >= "3.8" and python_version < "4" footing==0.1.4 ; python_full_version >= "3.8.0" and python_version < "4" formaldict==1.0.5 ; python_full_version >= "3.8.0" and python_version < "4" ghp-import==2.1.0 ; python_version >= "3.8" and python_version < "4" git-tidy==1.2.0 ; python_full_version >= "3.8.0" and python_version < "4" griffe==0.36.5 ; python_version >= "3.8" and python_version < "4" idna==3.4 ; python_version >= "3.8" and python_version < "4" importlib-metadata==6.8.0 ; python_version >= "3.8" and python_version < "3.10" iniconfig==2.0.0 ; python_full_version >= "3.8.0" and python_version < "4" jinja2-time==0.2.0 ; python_full_version >= "3.8.0" and python_version < "4" jinja2==3.1.2 ; python_version >= "3.8" and python_version < "4" kmatch==0.5.0 ; python_full_version >= "3.8.0" and python_version < "4" markdown==3.5 ; python_version >= "3.8" and python_version < "4" markupsafe==2.1.3 ; python_version >= "3.8" and python_version < "4" mergedeep==1.3.4 ; python_version >= "3.8" and python_version < "4" mkdocs-autorefs==0.5.0 ; python_version >= "3.8" and python_version < "4" mkdocs-material-extensions==1.2 ; python_version >= "3.8" and python_version < "4" mkdocs-material==9.4.4 ; python_version >= "3.8" and python_version < "4" mkdocs==1.5.3 ; python_version >= "3.8" and python_version < "4" mkdocstrings-python==1.7.2 ; python_version >= "3.8" and python_version < "4" mkdocstrings==0.23.0 ; python_version >= "3.8" and python_version < "4" mypy-extensions==1.0.0 ; python_version >= "3.8" and python_version < "4" packaging==23.2 ; python_version >= "3.8" and python_version < "4" paginate==0.5.6 ; python_version >= "3.8" and python_version < "4" pathspec==0.11.2 ; python_version >= "3.8" and python_version < "4" platformdirs==3.11.0 ; python_version >= "3.8" and python_version < "4" pluggy==1.3.0 ; python_version >= "3.8" and python_version < "4" poetry-core==1.7.0 ; python_version >= "3.8" and python_version < "4.0" poyo==0.5.0 ; python_full_version >= "3.8.0" and python_version < "4" prompt-toolkit==3.0.39 ; python_full_version >= "3.8.0" and python_version < "4" psycopg2-binary==2.9.9 ; python_full_version >= "3.8.0" and python_version < "4" pygments==2.16.1 ; python_version >= "3.8" and python_version < "4" pymdown-extensions==10.3 ; python_version >= "3.8" and python_version < "4" pyproject-api==1.6.1 ; python_version >= "3.8" and python_version < "4" pytest-cov==4.1.0 ; python_full_version >= "3.8.0" and python_version < "4" pytest-django==4.5.2 ; python_full_version >= "3.8.0" and python_version < "4" pytest-dotenv==0.5.2 ; python_full_version >= "3.8.0" and python_version < "4" pytest-mock==3.6.1 ; python_full_version >= "3.8.0" and python_version < "4" pytest-order==1.1.0 ; python_full_version >= "3.8.0" and python_version < "4" pytest==7.4.2 ; python_full_version >= "3.8.0" and python_version < "4" python-dateutil==2.8.2 ; python_version >= "3.8" and python_version < "4" python-dotenv==1.0.0 ; python_version >= "3.8" and python_version < "4" python-gitlab==3.15.0 ; python_full_version >= "3.8.0" and python_version < "4" python-slugify==8.0.1 ; python_full_version >= "3.8.0" and python_version < "4" pytz==2023.3.post1 ; python_version >= "3.8" and python_version < "3.9" pyyaml-env-tag==0.1 ; python_version >= "3.8" and python_version < "4" pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4" regex==2023.10.3 ; python_version >= "3.8" and python_version < "4" requests-file==1.5.1 ; python_full_version >= "3.8.0" and python_version < "4" requests-toolbelt==1.0.0 ; python_full_version >= "3.8.0" and python_version < "4" requests==2.31.0 ; python_version >= "3.8" and python_version < "4" ruff==0.0.292 ; python_full_version >= "3.8.0" and python_version < "4" setuptools==68.2.2 ; python_version >= "3.8" and python_version < "4" six==1.16.0 ; python_version >= "3.8" and python_version < "4" sqlparse==0.4.4 ; python_version >= "3.8" and python_version < "4" text-unidecode==1.3 ; python_full_version >= "3.8.0" and python_version < "4" tldextract==3.6.0 ; python_full_version >= "3.8.0" and python_version < "4" tomli==2.0.1 ; python_version >= "3.8" and python_full_version <= "3.11.0a6" tox==4.11.3 ; python_version >= "3.8" and python_version < "4" types-python-dateutil==2.8.19.14 ; python_version >= "3.8" and python_version < "4" typing-extensions==4.8.0 ; python_version >= "3.8" and python_version < "4" tzdata==2023.3 ; python_version >= "3.8" and python_version < "4" and sys_platform == "win32" urllib3==2.0.6 ; python_version >= "3.8" and python_version < "4" virtualenv==20.24.5 ; python_version >= "3.8" and python_version < "4" watchdog==3.0.0 ; python_version >= "3.8" and python_version < "4" wcwidth==0.2.8 ; python_full_version >= "3.8.0" and python_version < "4" zipp==3.17.0 ; python_version >= "3.8" and python_version < "3.10" . django-pgtrigger-4.11.1/docs/settings.md000066400000000000000000000036201460427745000201540ustar00rootroot00000000000000# Settings Below are all settings for `django-pgtrigger`. ## PGTRIGGER_INSTALL_ON_MIGRATE If `True`, `python manage.py pgtrigger install` will run automatically after `python manage.py migrate`. The trigger install command will use the same database as the migrate command. This setting is unnecessary if `PGTRIGGER_MIGRATIONS` is `True`. **Default** `False` !!! warning There are known issues with this approach, such as having trigger installation issues when reversing migrations. This is a secondary way to install triggers if migrations or model meta options aren't desired. ## PGTRIGGER_MIGRATIONS If `False`, triggers will not be added to migrations when running `python manage.py makemigrations`. Triggers will need to be installed manually or with `settings.PGTRIGGER_INSTALL_ON_MIGRATE`. **Default** `True` ## PGTRIGGER_MODEL_META If `False`, triggers cannot be specified in the `triggers` attribute of model `Meta` options. Migrations will also be disabled. Triggers will need to be registered to models with [pgtrigger.register][] and installed manually or with `settings.PGTRIGGER_INSTALL_ON_MIGRATE`. **Default** `True` !!! warning Turning this off will result in an error if a third-party application declares triggers in model `Meta`. ## PGTRIGGER_PRUNE_ON_INSTALL If `True`, running `python manage.py install` or `python manage.py uninstall` with no arguments will run `python manage.py prune` to prune orphaned triggers. **Default** `True` ## PGTRIGGER_SCHEMA The schema under which global database objects are stored, such as the Postgres function used for ignoring triggers. **Default** `public` ## PGTRIGGER_SCHEMA_EDITOR If `False`, the schema editor for migrations will not be patched. Fields that are used in trigger conditions will result in migration failures if their types are changed unless the triggers are manually dropped ahead of time in the migration. **Default** `True` django-pgtrigger-4.11.1/docs/static/000077500000000000000000000000001460427745000172605ustar00rootroot00000000000000django-pgtrigger-4.11.1/docs/static/logo.svg000066400000000000000000000022111460427745000207350ustar00rootroot00000000000000 django-pgtrigger-4.11.1/docs/troubleshooting.md000066400000000000000000000100341460427745000215400ustar00rootroot00000000000000# Troubleshooting ## Disabling migration integration See [this section in the docs](advanced_installation.md#turning_off_migrations) for how to disable the integration with the migration system. ## Trigger installation fails when migrating If your triggers access mutliple tables across apps, you may encounter installation issues if you haven't declared those apps as `dependencies` in the migration file. [See the Django docs](https://docs.djangoproject.com/en/4.1/topics/migrations/#dependencies) for more information. If you have `settings.PGTRIGGER_INSTALL_ON_MIGRATE` set to `True`, this can also cause trigger installation issues when migrations are reversed. Although database tables are properly reversed, triggers may be in an inconsistent state. You can use `python manage.py pgtrigger ls` to see the status of all triggers. ## Triggers are still outdated after migrating If `python manage.py pgtrigger ls` shows outdated triggers and `makemigrations` isn't showing changes, you are likely affected by a legacy issue that has been addressed as of version 4.5. The issue is normally harmless and can be corrected by upgrading or running `python manage.py pgtrigger install` to ensure triggers are up to date. ## Patches are causing the application to fail `django-pgtrigger` patches the minimum amount of Django functionality necessary to integrate with the migration system and install triggers. If this causes errors in your application, try turning off the relevant settings: * Set `settings.PGTRIGGER_SCHEMA_EDITOR` to `False` to prevent it from overriding the schema editor. Turning this off is mostly harmless, but you will have errors installing triggers if column types of trigger conditions are altered. * Set `settings.PGTRIGGER_MIGRATIONS` to `False` to completely turn off integration with the migration system. You will need to manually install triggers or set `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True` to always install triggers after migrations. Note that this approach has limitations and bugs such as reversing migrations. * Set `settings.PGTRIGGER_MODEL_META` to `False` to disable specifying triggers in model `Meta`. You must explicitly register every trigger with [pgtrigger.register][], and triggers on third-party models may not be discovered. Integration with the migration system will also be turned off as a result. ## All triggers were updated at once A few updates, such as version 4.5, change the underlying SQL of triggers. This in turn causes all of the triggers to be updated when running `python manage.py makemigrations`. Version 4.5 made significant changes to the migration system integration to avoid this needing to happen in the future. ## Trigger migrations stall When a trigger is dropped or created, it alters the table, thus taking out the most exclusive lock possible and blocking reads the table. Migrations run in a transaction by default, meaning locks will be held until the end of the entire migration. If later operations in the migration block on acquiring locks, the previous locks will remain held until the end. This can cause extended downtime for an application. If your migration isn't doing any other table alterations such as adding columns, you can alleviate lock contention as follows: 1. Remove any `RemoveTrigger` operations if the trigger is only being updated in the migration. The `AddTrigger` operations are idempotent, so dropping them before adding them is not necessary. 2. Once all of the `RemoveTrigger` operations are gone, you can set `atomic = False` on the migration ([see the Django docs](https://docs.djangoproject.com/en/4.1/topics/migrations/#transactions)) to avoid unnecessary lock consumption. !!! danger Be sure you understand exactly what is happening when adding `atomic=False` to a migration. If there are other migration operations in the file, such as adding fields, it could create errors that are difficult to fix if the migration fails midway. If you don't remove the `RemoveTrigger` operations, you also might create a scenario where your triggers aren't installed for a period of time.django-pgtrigger-4.11.1/docs/upgrading.md000066400000000000000000000075211460427745000203000ustar00rootroot00000000000000# Upgrading ## Version 3 Version 3 integrates with the migration system and also drops the need for configuring `django-pgconnection` for using [pgtrigger.ignore][]. It also fully supports the `Meta.triggers` syntax for registering triggers. The majority of users can simply run `python manage.py makemigrations` after upgrading if you have no triggers registered to third-party models or many-to-many default "through" models. Read below for more details on the upgrades, and follow the special instructions if any of the former cases apply to you. ### Integration with Django migrations All triggers now appear in migrations when running `python manage.py makemigrations`. Triggers from version 2 will appear as new `AddTrigger` operations. They will succeed when running `migrate` even if previously installed. Remember, however, that triggers will be deleted if the migrations are reversed. Almost all users can simply run `python manage.py makemigrations` after upgrading. If, however, you have triggers on third-party models or many-to-many default "through" models, use these instructions to migrate them: 1. If you already ran `python manage.py makemigrations`, delete any new migrations made for these third-party apps. 2. Declare proxy models for the third-party or many-to-many "through" models, register triggers in the `Meta.triggers`, and call `python manage.py makemigrations`. See code examples in the [Advanced Installation](advanced_installation.md) section. 3. Declaring proxy models will rename old triggers, leaving them in an orphaned state since they weren't previously managed by migrations. Ensure these old triggers are removed by doing any of the following: - Make a `migrations.RunPython` operation at the end of your migration or in a new data migration that does `call_command("pgtrigger", "prune")`. Note that `call_command` is imported from `django.core.management`. - OR run `python manage.py pgtrigger prune` after your deployment is complete - OR set `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True` for a short period of time in your settings. This will automatically prune those old triggers after deployment, and you can turn this setting back to `False` later. If you'd like to keep the legacy installation behavior and turn off migrations entirely, set `settings.PGTRIGGER_MIGRATIONS` to `False` to turn off trigger migrations and set `settings.PGTRIGGER_INSTALL_ON_MIGRATE` to `True` so that triggers are always installed at the end of `python manage.py migrate`. ### Dropping of `django-pgconnection` dependency [pgtrigger.ignore][] previously required that `django-pgconnection` was used to configure the `settings.DATABASES` setting. `django-pgconnection` is no longer needed, and `settings.DATABASES` no longer needs to be wrapped in order for [pgtrigger.ignore][] to function properly. ### New `Meta.triggers` syntax Version 2.5 introduced the ability to register triggers on your model's `Meta.triggers` list. User can still use [pgtrigger.register][] to register triggers programmatically, but it has been deprecated. ## Version 4 Version 4 changes the behavior of multi-database and multi-schema usage. If you don't use multiple database and multiple schemas, the only breaking API change that might affect you is `pgtrigger.get` being renamed to [pgtrigger.registered][]. For multi-database setups, triggers are now installed on one database at a time using the `--database` argument of management commands. Triggers are only ignored on a databases based on the `allow_migrate` method of any installed routers. This mimics Django's behavior of installing tables. If you use `settings.PGTRIGGER_INSTALL_ON_MIGRATE`, triggers will only be installed for the database that was passed to `python manage.py migrate`. Version 4 adds support for multi-schema setups. See the [Advanced Database Setups](advanced_db.md) section for more information. django-pgtrigger-4.11.1/environment.yml000066400000000000000000000003541460427745000201320ustar00rootroot00000000000000name: django-pgtrigger channels: - conda-forge dependencies: - python==3.12.0 - poetry==1.6.1 - pip==23.2.1 - postgresql==16.0 variables: DATABASE_URL: "postgres://postgres@localhost:5432/pgtrigger_local" EXEC_WRAPPER: "" django-pgtrigger-4.11.1/footing.yaml000066400000000000000000000004321460427745000173710ustar00rootroot00000000000000_extensions: - jinja2_time.TimeExtension _template: git@github.com:Opus10/public-django-app-template.git _version: 3e583c745b210d08970a7be198e5b8e55dd8cd7a module_name: pgtrigger repo_name: django-pgtrigger short_description: Postgres trigger support integrated with Django models. django-pgtrigger-4.11.1/manage.py000066400000000000000000000003621460427745000166440ustar00rootroot00000000000000#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) django-pgtrigger-4.11.1/mkdocs.yml000066400000000000000000000053031460427745000170450ustar00rootroot00000000000000site_name: django-pgtrigger docs_dir: docs repo_name: Opus10/django-pgtrigger repo_url: https://github.com/Opus10/django-pgtrigger plugins: - search - mkdocstrings: handlers: python: import: - https://docs.python.org/3/objects.inv - https://installer.readthedocs.io/en/stable/objects.inv - https://mkdocstrings.github.io/autorefs/objects.inv options: docstring_options: ignore_init_summary: true line_length: 80 heading_level: 3 merge_init_into_class: true separate_signature: true show_root_heading: true show_root_full_path: true show_root_members_full_path: true show_signature_annotations: true show_symbol_type_heading: true show_symbol_type_toc: true signature_crossrefs: true markdown_extensions: # For admonitions - admonition - pymdownx.details - pymdownx.superfences - pymdownx.highlight: anchor_linenums: true line_spans: __span pygments_lang_class: true - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.superfences - tables - pymdownx.superfences: custom_fences: - name: mermaid class: mermaid format: !!python/name:pymdownx.superfences.fence_code_format theme: name: material logo: static/logo.svg features: - navigation.footer - navigation.path - navigation.sections - navigation.tracking - search.suggest - search.highlight - toc.follow palette: - media: "(prefers-color-scheme: light)" scheme: default primary: #6658ea toggle: icon: material/brightness-7 name: Switch to dark mode - media: "(prefers-color-scheme: dark)" scheme: slate primary: #6658ea toggle: icon: material/brightness-4 name: Switch to light mode extra_css: - css/mkdocstrings.css - css/mkdocs-material.css nav: - Overview: index.md - Installation: installation.md - Usage Guides: - Basics: basics.md - Trigger Cookbook: cookbook.md - Conditional Triggers: conditional.md - Ignoring Execution: ignoring_triggers.md - Deferrable Triggers: deferrable.md - Advanced Installation: advanced_installation.md - Advanced Database Setups: advanced_db.md - Help: - Frequently Asked Questions: faq.md - Troubleshooting: troubleshooting.md - Upgrading: upgrading.md - Further Reading: further_reading.md - API: - Settings: settings.md - Commands: commands.md - Module: module.md - Release Notes: release_notes.md - Contributing Guide: contributing.md django-pgtrigger-4.11.1/pgtrigger/000077500000000000000000000000001460427745000170335ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/__init__.py000066400000000000000000000032721460427745000211500ustar00rootroot00000000000000import django from pgtrigger.contrib import ( FSM, Protect, ReadOnly, SoftDelete, UpdateSearchVector, ) from pgtrigger.core import ( After, AllChange, AllDontChange, AnyChange, AnyDontChange, Before, Condition, Deferred, Delete, F, Func, Immediate, Insert, InsteadOf, IsDistinctFrom, IsNotDistinctFrom, Level, Operation, Operations, Q, Referencing, Row, Statement, Timing, Trigger, Truncate, Update, UpdateOf, When, ) from pgtrigger.installation import ( disable, enable, install, prunable, prune, uninstall, ) from pgtrigger.registry import ( register, registered, ) from pgtrigger.runtime import ( constraints, ignore, schema, ) from pgtrigger.version import __version__ if django.VERSION < (3, 2): # pragma: no cover default_app_config = "pgtrigger.apps.PGTriggerConfig" del django __all__ = [ "After", "AllChange", "AllDontChange", "AnyChange", "AnyDontChange", "Before", "Condition", "constraints", "Deferred", "Delete", "disable", "enable", "F", "FSM", "Func", "ignore", "Immediate", "Insert", "install", "InsteadOf", "IsDistinctFrom", "IsNotDistinctFrom", "Level", "Operation", "Operations", "Protect", "prunable", "prune", "Q", "ReadOnly", "Referencing", "register", "registered", "Row", "schema", "SoftDelete", "Statement", "Timing", "Trigger", "Truncate", "uninstall", "Update", "UpdateOf", "UpdateSearchVector", "When", "__version__", ] django-pgtrigger-4.11.1/pgtrigger/apps.py000066400000000000000000000073631460427745000203610ustar00rootroot00000000000000import django.apps import django.db.backends.postgresql.schema as postgresql_schema from django.conf import settings from django.core.management.commands import makemigrations, migrate from django.db.migrations import state from django.db.models import options from django.db.models.signals import post_migrate from django.db.utils import load_backend from pgtrigger import core, features, installation, migrations # Allow triggers to be specified in model Meta. Users can turn this # off via settings if it causes issues. If turned off, migrations # are also disabled if features.model_meta(): # pragma: no branch if "triggers" not in options.DEFAULT_NAMES: # pragma: no branch options.DEFAULT_NAMES = tuple(options.DEFAULT_NAMES) + ("triggers",) def patch_migrations(): """ Patch the autodetector and model state detection if migrations are turned on """ if features.migrations(): # pragma: no branch if "triggers" not in state.DEFAULT_NAMES: # pragma: no branch state.DEFAULT_NAMES = tuple(state.DEFAULT_NAMES) + ("triggers",) if not issubclass( # pragma: no branch makemigrations.MigrationAutodetector, migrations.MigrationAutodetectorMixin ): makemigrations.MigrationAutodetector = type( "MigrationAutodetector", (migrations.MigrationAutodetectorMixin, makemigrations.MigrationAutodetector), {}, ) if not issubclass( # pragma: no branch migrate.MigrationAutodetector, migrations.MigrationAutodetectorMixin ): migrate.MigrationAutodetector = type( "MigrationAutodetector", (migrations.MigrationAutodetectorMixin, migrate.MigrationAutodetector), {}, ) def patch_schema_editor(): """ Patch the schema editor to allow for column types to be altered on trigger conditions """ if features.schema_editor(): # pragma: no branch for config in settings.DATABASES.values(): backend = load_backend(config["ENGINE"]) schema_editor_class = backend.DatabaseWrapper.SchemaEditorClass if ( schema_editor_class and issubclass( schema_editor_class, postgresql_schema.DatabaseSchemaEditor, ) and not issubclass(schema_editor_class, migrations.DatabaseSchemaEditorMixin) ): backend.DatabaseWrapper.SchemaEditorClass = type( "DatabaseSchemaEditor", (migrations.DatabaseSchemaEditorMixin, schema_editor_class), {}, ) def register_triggers_from_meta(): """ Populate the trigger registry from model `Meta.triggers` """ if features.model_meta(): # pragma: no branch for model in django.apps.apps.get_models(): triggers = getattr(model._meta, "triggers", []) for trigger in triggers: if not isinstance(trigger, core.Trigger): # pragma: no cover raise TypeError(f"Triggers in {model} Meta must be pgtrigger.Trigger classes") trigger.register(model) def install_on_migrate(using, **kwargs): if features.install_on_migrate(): installation.install(database=using) class PGTriggerConfig(django.apps.AppConfig): name = "pgtrigger" def ready(self): """ Do all necessary patching, trigger setup, and signal handler configuration """ patch_migrations() patch_schema_editor() register_triggers_from_meta() # Configure triggers to automatically be installed after migrations post_migrate.connect(install_on_migrate, sender=self) django-pgtrigger-4.11.1/pgtrigger/compiler.py000066400000000000000000000164461460427745000212320ustar00rootroot00000000000000import collections import hashlib from pgtrigger import utils _unset = object() class UpsertTriggerSql(collections.UserString): """SQL for inserting or updating a trigger This class is intended to be versionable since migrations reference it. Older migrations need to be able to point to earlier versions of the installation template used for triggers. """ def get_template(self): """ This is v1 of the installation template. Do NOT edit this template unless you are absolutely sure it is backwards compatible, otherwise it may affect migrations that reference it. If it does need to be changed, we will need to introduce a version variable to be backwards compatible. Note: Postgres 14 has CREATE OR REPLACE syntax that we might consider using. This SQL is executed in a transaction, so dropping and recreating shouldn't be a problem. """ return """ CREATE OR REPLACE FUNCTION {ignore_func_name}( trigger_name NAME ) RETURNS BOOLEAN AS $$ DECLARE _pgtrigger_ignore TEXT[]; _result BOOLEAN; BEGIN BEGIN SELECT INTO _pgtrigger_ignore CURRENT_SETTING('pgtrigger.ignore'); EXCEPTION WHEN OTHERS THEN END; IF _pgtrigger_ignore IS NOT NULL THEN SELECT trigger_name = ANY(_pgtrigger_ignore) INTO _result; RETURN _result; ELSE RETURN FALSE; END IF; END; $$ LANGUAGE plpgsql; CREATE OR REPLACE FUNCTION {pgid}() RETURNS TRIGGER AS $$ {declare} BEGIN IF ({ignore_func_name}(TG_NAME) IS TRUE) THEN IF (TG_OP = 'DELETE') THEN RETURN OLD; ELSE RETURN NEW; END IF; END IF; {func} END; $$ LANGUAGE plpgsql; DROP TRIGGER IF EXISTS {pgid} ON {table}; CREATE {constraint} TRIGGER {pgid} {when} {operation} ON {table} {timing} {referencing} FOR EACH {level} {condition} EXECUTE PROCEDURE {execute}; COMMENT ON TRIGGER {pgid} ON {table} IS '{hash}'; """ def get_defaults(self, pgid): """ These are the default values for the installation template. Do NOT edit these default values. Keys may be added, but existing keys should never be updated, otherwise existing migrations may no longer be correct. If it does need to be changed, we will need to introduce a version variable to be backwards compatible. """ return { "ignore_func_name": '"public"._pgtrigger_should_ignore', "declare": "", "constraint": "", "timing": "", "referencing": "", "level": "ROW", "condition": "", "execute": f"{pgid}()", } def __init__( self, *, ignore_func_name=_unset, pgid, declare=_unset, func, table, constraint=_unset, when, operation, timing=_unset, referencing=_unset, level=_unset, condition=_unset, execute=_unset, hash=None, ): """Initialize the SQL and store it in the `.data` attribute.""" self.kwargs = { key: str(val) for key, val in locals().items() if key not in ("self", "hash") and val is not _unset } self.defaults = self.get_defaults(pgid) sql_args = {**self.defaults, **self.kwargs, **{"table": utils.quote(table)}} self.hash = ( hash or hashlib.sha1( self.get_template().format(**{**sql_args, **{"hash": ""}}).encode() ).hexdigest() ) self.data = self.get_template().format(**{**sql_args, **{"hash": self.hash}}) self.pgid = pgid self.table = table def deconstruct(self): """ Serialize the construction of this class so that it can be used in migrations. """ kwargs = { key: val for key, val in self.kwargs.items() if self.defaults.get(key, _unset) != val } path = f"{self.__class__.__module__}.{self.__class__.__name__}" return path, [], {**kwargs, **{"hash": self.hash}} class _TriggerDdlSql(collections.UserString): def get_template(self): raise NotImplementedError def __init__(self, *, pgid, table): """Initialize the SQL and store it in the `.data` attribute.""" sql_args = {**locals(), **{"table": utils.quote(table)}} self.data = self.get_template().format(**sql_args) class DropTriggerSql(_TriggerDdlSql): """SQL for dropping a trigger Triggers are dropped in migrations, so any edits to the drop trigger template should be backwards compatible or versioned. I.e. older migrations need to always point to the SQL here """ def get_template(self): return "DROP TRIGGER IF EXISTS {pgid} ON {table};" class EnableTriggerSql(_TriggerDdlSql): """SQL for enabling a trigger We don't currently perform enabling/disabling in migrations, so this SQL can be changed without consequences to past migrations. """ def get_template(self): return "ALTER TABLE {table} ENABLE TRIGGER {pgid};" class DisableTriggerSql(_TriggerDdlSql): """SQL for disabling a trigger We don't currently perform enabling/disabling in migrations, so this SQL can be changed without consequences to past migrations. """ def get_template(self): return "ALTER TABLE {table} DISABLE TRIGGER {pgid};" class Trigger: """ A compiled trigger that's added to internal model state of migrations. It consists of a name and the trigger SQL for migrations. """ def __init__(self, *, name, sql): self.name = name self.sql = sql assert isinstance(sql, UpsertTriggerSql) def __eq__(self, other): return ( self.__class__ == other.__class__ and self.name == other.name and self.sql == other.sql ) @property def install_sql(self): return str(self.sql) @property def uninstall_sql(self): return str(DropTriggerSql(pgid=self.sql.pgid, table=self.sql.table)) @property def enable_sql(self): return str(EnableTriggerSql(pgid=self.sql.pgid, table=self.sql.table)) @property def disable_sql(self): return str(DisableTriggerSql(pgid=self.sql.pgid, table=self.sql.table)) @property def hash(self): return self.sql.hash def deconstruct(self): """ Serialize the construction of this class so that it can be used in migrations. """ path = f"{self.__class__.__module__}.{self.__class__.__name__}" return path, [], {"name": self.name, "sql": self.sql} django-pgtrigger-4.11.1/pgtrigger/contrib.py000066400000000000000000000223261460427745000210520ustar00rootroot00000000000000"""Additional goodies""" import functools import itertools import operator from typing import Any, List, Tuple, Union from pgtrigger import core, utils # A sentinel value to determine if a kwarg is unset _unset = object() class Protect(core.Trigger): """A trigger that raises an exception.""" when: core.When = core.Before def get_func(self, model): sql = f""" RAISE EXCEPTION 'pgtrigger: Cannot {str(self.operation).lower()} rows from % table', TG_TABLE_NAME; """ return self.format_sql(sql) class ReadOnly(Protect): """A trigger that prevents edits to fields. If `fields` are provided, will protect edits to only those fields. If `exclude` is provided, will protect all fields except the ones excluded. If none of these arguments are provided, all fields cannot be edited. """ fields: Union[List[str], None] = None exclude: Union[List[str], None] = None operation: core.Operation = core.Update def __init__( self, *, fields: Union[List[str], None] = None, exclude: Union[List[str], None] = None, **kwargs: Any, ): self.fields = fields or self.fields self.exclude = exclude or self.exclude if self.fields and self.exclude: raise ValueError('Must provide only one of "fields" or "exclude" to ReadOnly trigger') super().__init__(**kwargs) def get_condition(self, model): if not self.fields and not self.exclude: return core.Condition("OLD.* IS DISTINCT FROM NEW.*") else: if self.exclude: # Sanity check that the exclude list contains valid fields for field in self.exclude: model._meta.get_field(field) fields = [f.name for f in model._meta.fields if f.name not in self.exclude] else: fields = [model._meta.get_field(field).name for field in self.fields] return functools.reduce( operator.or_, [core.Q(**{f"old__{field}__df": core.F(f"new__{field}")}) for field in fields], ) class FSM(core.Trigger): """Enforces a finite state machine on a field. Supply the trigger with the `field` that transitions and then a list of tuples of valid transitions to the `transitions` argument. !!! note Only non-null `CharField` fields without quotes are currently supported. If your strings have a colon symbol in them, you must override the "separator" argument to be a value other than a colon. """ when: core.When = core.Before operation: core.Operation = core.Update field: str = None transitions: List[Tuple[str, str]] = None separator: str = ":" def __init__( self, *, name: str = None, condition: Union[core.Condition, None] = None, field: str = None, transitions: List[Tuple[str, str]] = None, separator: str = None, ): self.field = field or self.field self.transitions = transitions or self.transitions self.separator = separator or self.separator if not self.field: # pragma: no cover raise ValueError('Must provide "field" for FSM') if not self.transitions: # pragma: no cover raise ValueError('Must provide "transitions" for FSM') # This trigger doesn't accept quoted values or values that # contain the configured separator for value in itertools.chain(*self.transitions): if "'" in value or '"' in value: raise ValueError(f'FSM transition value "{value}" contains quotes') elif self.separator in value: raise ValueError( f'FSM value "{value}" contains separator "{self.separator}".' ' Configure your trigger with a different "separator" attribute' ) # The separator must be a single character that isn't a quote if len(self.separator) != 1: raise ValueError(f'Separator "{self.separator}" must be a single character') elif self.separator in ('"', "'"): raise ValueError("Separator must not have quotes") super().__init__(name=name, condition=condition) def get_declare(self, model): return [("_is_valid_transition", "BOOLEAN")] def get_func(self, model): col = model._meta.get_field(self.field).column transition_uris = ( "{" + ",".join([f"{old}{self.separator}{new}" for old, new in self.transitions]) + "}" ) sql = f""" SELECT CONCAT(OLD.{utils.quote(col)}, '{self.separator}', NEW.{utils.quote(col)}) = ANY('{transition_uris}'::text[]) INTO _is_valid_transition; IF (_is_valid_transition IS FALSE AND OLD.{utils.quote(col)} IS DISTINCT FROM NEW.{utils.quote(col)}) THEN RAISE EXCEPTION 'pgtrigger: Invalid transition of field "{self.field}" from "%" to "%" on table %', OLD.{utils.quote(col)}, NEW.{utils.quote(col)}, TG_TABLE_NAME; ELSE RETURN NEW; END IF; """ # noqa return self.format_sql(sql) class SoftDelete(core.Trigger): """Sets a field to a value when a delete happens. Supply the trigger with the "field" that will be set upon deletion and the "value" to which it should be set. The "value" defaults to `False`. !!! note This trigger currently only supports nullable `BooleanField`, `CharField`, and `IntField` fields. """ when: core.When = core.Before operation: core.Operation = core.Delete field: str = None value: Union[bool, str, int, None] = False def __init__( self, *, name: str = None, condition: Union[core.Condition, None] = None, field: str = None, value: Union[bool, str, int, None] = _unset, ): self.field = field or self.field self.value = value if value is not _unset else self.value if not self.field: # pragma: no cover raise ValueError('Must provide "field" for soft delete') super().__init__(name=name, condition=condition) def get_func(self, model): soft_field = model._meta.get_field(self.field).column pk_col = model._meta.pk.column def _render_value(): if self.value is None: return "NULL" elif isinstance(self.value, str): return f"'{self.value}'" else: return str(self.value) sql = f""" UPDATE {utils.quote(model._meta.db_table)} SET {soft_field} = {_render_value()} WHERE {utils.quote(pk_col)} = OLD.{utils.quote(pk_col)}; RETURN NULL; """ return self.format_sql(sql) class UpdateSearchVector(core.Trigger): """Updates a `django.contrib.postgres.search.SearchVectorField` from document fields. Supply the trigger with the `vector_field` that will be updated with changes to the `document_fields`. Optionally provide a `config_name`, which defaults to `pg_catalog.english`. This trigger uses `tsvector_update_trigger` to update the vector field. See [the Postgres docs](https://www.postgresql.org/docs/current/textsearch-features.html#TEXTSEARCH-UPDATE-TRIGGERS) for more information. !!! note `UpdateSearchVector` triggers are not compatible with [pgtrigger.ignore][] since it references a built-in trigger. Trying to ignore this trigger results in a `RuntimeError`. """ # noqa when: core.When = core.Before vector_field: str = None document_fields: List[str] = None config_name: str = "pg_catalog.english" def __init__( self, *, name: str = None, vector_field: str = None, document_fields: List[str] = None, config_name: str = None, ): self.vector_field = vector_field or self.vector_field self.document_fields = document_fields or self.document_fields self.config_name = config_name or self.config_name if not self.vector_field: raise ValueError('Must provide "vector_field" to update search vector') if not self.document_fields: raise ValueError('Must provide "document_fields" to update search vector') if not self.config_name: # pragma: no cover raise ValueError('Must provide "config_name" to update search vector') super().__init__(name=name, operation=core.Insert | core.UpdateOf(*document_fields)) def ignore(self, model): raise RuntimeError(f"Cannot ignore {self.__class__.__name__} triggers") def get_func(self, model): return "" def render_execute(self, model): document_cols = [model._meta.get_field(field).column for field in self.document_fields] rendered_document_cols = ", ".join(utils.quote(col) for col in document_cols) vector_col = model._meta.get_field(self.vector_field).column return ( f"tsvector_update_trigger({utils.quote(vector_col)}," f" {utils.quote(self.config_name)}, {rendered_document_cols})" ) django-pgtrigger-4.11.1/pgtrigger/core.py000066400000000000000000000727651460427745000203560ustar00rootroot00000000000000import contextlib import copy import functools import hashlib import operator import re from typing import Any, List, Tuple, Union from django.db import DEFAULT_DB_ALIAS, models, router, transaction from django.db.models.expressions import Col from django.db.models.fields.related import RelatedField from django.db.models.sql import Query from django.db.models.sql.datastructures import BaseTable from django.db.utils import ProgrammingError from pgtrigger import compiler, features, registry, utils if utils.psycopg_maj_version == 2: import psycopg2.extensions elif utils.psycopg_maj_version == 3: import psycopg.adapt else: raise AssertionError # Postgres only allows identifiers to be 63 chars max. Since "pgtrigger_" # is the prefix for trigger names, and since an additional "_" and # 5 character hash is added, the user-defined name of the trigger can only # be 47 chars. # NOTE: We can do something more sophisticated later by allowing users # to name their triggers and then hashing the names when actually creating # the triggers. MAX_NAME_LENGTH = 47 # Installation states for a triggers INSTALLED = "INSTALLED" UNINSTALLED = "UNINSTALLED" OUTDATED = "OUTDATED" PRUNE = "PRUNE" UNALLOWED = "UNALLOWED" class _Primitive: """Boilerplate for some of the primitive operations""" def __init__(self, name): assert name in self.values self.name = name def __str__(self): return self.name class Level(_Primitive): values = ("ROW", "STATEMENT") Row = Level("ROW") """ For specifying row-level triggers (the default) """ Statement = Level("STATEMENT") """ For specifying statement-level triggers """ class Referencing: """For specifying the `REFERENCING` clause of a statement-level trigger""" def __init__(self, *, old=None, new=None): if not old and not new: raise ValueError( 'Must provide either "old" and/or "new" to the referencing' " construct of a trigger" ) self.old = old self.new = new def __str__(self): ref = "REFERENCING" if self.old: ref += f" OLD TABLE AS {self.old} " if self.new: ref += f" NEW TABLE AS {self.new} " return ref class When(_Primitive): values = ("BEFORE", "AFTER", "INSTEAD OF") Before = When("BEFORE") """ For specifying `BEFORE` in the when clause of a trigger. """ After = When("AFTER") """ For specifying `AFTER` in the when clause of a trigger. """ InsteadOf = When("INSTEAD OF") """ For specifying `INSTEAD OF` in the when clause of a trigger. """ class Operation(_Primitive): values = ("UPDATE", "DELETE", "TRUNCATE", "INSERT") def __or__(self, other): assert isinstance(other, Operation) return Operations(self, other) class Operations(Operation): """For providing multiple operations `OR`ed together. Note that using the `|` operator is preferred syntax. """ def __init__(self, *operations): for operation in operations: assert isinstance(operation, Operation) self.operations = operations def __str__(self): return " OR ".join(str(operation) for operation in self.operations) Update = Operation("UPDATE") """ For specifying `UPDATE` as the trigger operation. """ Delete = Operation("DELETE") """ For specifying `DELETE` as the trigger operation. """ Truncate = Operation("TRUNCATE") """ For specifying `TRUNCATE` as the trigger operation. """ Insert = Operation("INSERT") """ For specifying `INSERT` as the trigger operation. """ class UpdateOf(Operation): """For specifying `UPDATE OF` as the trigger operation.""" def __init__(self, *columns): if not columns: raise ValueError("Must provide at least one column") self.columns = columns def __str__(self): columns = ", ".join(f"{utils.quote(col)}" for col in self.columns) return f"UPDATE OF {columns}" class Timing(_Primitive): values = ("IMMEDIATE", "DEFERRED") Immediate = Timing("IMMEDIATE") """ For deferrable triggers that run immediately by default """ Deferred = Timing("DEFERRED") """ For deferrable triggers that run at the end of the transaction by default """ class Condition: """For specifying free-form SQL in the condition of a trigger.""" sql: str = None def __init__(self, sql: str = None): self.sql = sql or self.sql if not self.sql: raise ValueError("Must provide SQL to condition") def resolve(self, model): return self.sql class _OldNewQuery(Query): """ A special Query object for referencing the `OLD` and `NEW` variables in a trigger. Only used by the [pgtrigger.Q][] object. """ def build_lookup(self, lookups, lhs, rhs): # Django does not allow custom lookups on foreign keys, even though # DISTINCT FROM is a comnpletely valid lookup. Trick django into # being able to apply this lookup to related fields. if lookups == ["df"] and isinstance(lhs.output_field, RelatedField): lhs = copy.deepcopy(lhs) lhs.output_field = models.IntegerField(null=lhs.output_field.null) return super().build_lookup(lookups, lhs, rhs) def build_filter(self, filter_expr, *args, **kwargs): if isinstance(filter_expr, Q): return super().build_filter(filter_expr, *args, **kwargs) if filter_expr[0].startswith("old__"): alias = "OLD" elif filter_expr[0].startswith("new__"): alias = "NEW" else: # pragma: no cover raise ValueError("Filter expression on trigger.Q object must reference old__ or new__") filter_expr = (filter_expr[0][5:], filter_expr[1]) node, _ = super().build_filter(filter_expr, *args, **kwargs) self.alias_map[alias] = BaseTable(alias, alias) for child in node.children: child.lhs = Col( alias=alias, target=child.lhs.target, output_field=child.lhs.output_field, ) return node, {alias} class F(models.F): """ Similar to Django's `F` object, allows referencing the old and new rows in a trigger condition. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if self.name.startswith("old__"): self.row_alias = "OLD" elif self.name.startswith("new__"): self.row_alias = "NEW" else: raise ValueError("F() values must reference old__ or new__") self.col_name = self.name[5:] @property def resolved_name(self): return f"{self.row_alias}.{utils.quote(self.col_name)}" def resolve_expression(self, query=None, *args, **kwargs): return Col( alias=self.row_alias, target=query.model._meta.get_field(self.col_name), ) @models.ForeignKey.register_lookup @models.fields.Field.register_lookup class IsDistinctFrom(models.Lookup): """ A custom `IS DISTINCT FROM` field lookup for common trigger conditions. For example, `pgtrigger.Q(old__field__df=pgtrigger.F("new__field"))`. """ lookup_name = "df" def as_sql(self, compiler, connection): lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection) params = lhs_params + rhs_params return "%s IS DISTINCT FROM %s" % (lhs, rhs), params @models.ForeignKey.register_lookup @models.fields.Field.register_lookup class IsNotDistinctFrom(models.Lookup): """ A custom `IS NOT DISTINCT FROM` field lookup for common trigger conditions. For example, `pgtrigger.Q(old__field__ndf=pgtrigger.F("new__field"))`. """ lookup_name = "ndf" def as_sql(self, compiler, connection): lhs, lhs_params = self.process_lhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection) params = lhs_params + rhs_params return "%s IS NOT DISTINCT FROM %s" % (lhs, rhs), params class Q(models.Q, Condition): """ Similar to Django's `Q` object, allows building filter clauses based on the old and new rows in a trigger condition. """ def resolve(self, model: models.Model) -> str: query = _OldNewQuery(model) connection = utils.connection() sql, args = self.resolve_expression(query).as_sql( compiler=query.get_compiler("default"), connection=connection, ) sql = sql.replace('"OLD"', "OLD").replace('"NEW"', "NEW") def _quote(val): """Given a value, quote it and handle psycopg2/3 differences""" if utils.psycopg_maj_version == 2: return psycopg2.extensions.adapt(val).getquoted() elif utils.psycopg_maj_version == 3: transformer = psycopg.adapt.Transformer() return transformer.as_literal(val) if val is not None else b"NULL" else: raise AssertionError args = tuple(_quote(arg).decode() for arg in args) return sql % args class _Change(Condition): """For specifying a condition based on changes to fields. See child classes for more documentation on arguments. """ fields: Union[List[str], None] = None exclude: Union[List[str], None] = None exclude_auto: bool = False def __init__( self, *fields: str, exclude: Union[List[str], None] = None, exclude_auto: Union[bool, None] = None, all: bool = False, comparison: str = "df", ): self.fields = list(fields) or self.fields or [] self.exclude = exclude or self.exclude or [] self.exclude_auto = self.exclude_auto if exclude_auto is None else exclude_auto self._negated = False self.all = all self.comparison = comparison def __invert__(self): inverted = copy.copy(self) inverted._negated = not inverted._negated return inverted def resolve(self, model): model_fields = {f.name for f in model._meta.fields} for field in self.fields + self.exclude: if field not in model_fields: raise ValueError(f'Field "{field}" not found on model "{model}"') exclude = set(self.exclude) if self.exclude_auto: for f in model._meta.fields: if getattr(f, "auto_now", False) or getattr(f, "auto_now_add", False): exclude.add(f.name) fields = sorted(f for f in (self.fields or model_fields) if f not in exclude) if set(fields) == model_fields and not self.all: if self.comparison == "df": expr = "OLD.* IS DISTINCT FROM NEW.*" elif self.comparison == "ndf": expr = "OLD.* IS NOT DISTINCT FROM NEW.*" else: # pragma: no cover raise ValueError(f'Invalid comparison "{self.comparison}"') else: reduce_op = operator.and_ if self.all else operator.or_ q = functools.reduce( reduce_op, [ Q(**{f"old__{field}__{self.comparison}": F(f"new__{field}")}) for field in fields ], ) expr = q.resolve(model) return f"NOT ({expr})" if self._negated else expr class AnyChange(_Change): """If any supplied fields change, trigger the condition.""" def __init__( self, *fields: str, exclude: Union[List[str], None] = None, exclude_auto: Union[bool, None] = None, ): """ If any supplied fields change, trigger the condition. Args: *fields: If any supplied fields change, trigger the condition. If no fields are supplied, defaults to all fields on the model. exclude: Fields to exclude. exclude_auto: Exclude all `auto_now` and `auto_now_add` fields automatically. """ super().__init__( *fields, exclude=exclude, exclude_auto=exclude_auto, all=False, comparison="df" ) class AnyDontChange(_Change): """If any supplied fields don't change, trigger the condition.""" def __init__( self, *fields: str, exclude: Union[List[str], None] = None, exclude_auto: Union[bool, None] = None, ): """ If any supplied fields don't change, trigger the condition. Args: *fields: If any supplied fields don't change, trigger the condition. If no fields are supplied, defaults to all fields on the model. exclude: Fields to exclude. exclude_auto: Exclude all `auto_now` and `auto_now_add` fields automatically. """ super().__init__( *fields, exclude=exclude, exclude_auto=exclude_auto, all=False, comparison="ndf" ) class AllChange(_Change): """If all supplied fields change, trigger the condition.""" def __init__( self, *fields: str, exclude: Union[List[str], None] = None, exclude_auto: Union[bool, None] = None, ): """ If all supplied fields change, trigger the condition. Args: *fields: If all supplied fields change, trigger the condition. If no fields are supplied, defaults to all fields on the model. exclude: Fields to exclude. exclude_auto: Exclude all `auto_now` and `auto_now_add` fields automatically. """ super().__init__( *fields, exclude=exclude, exclude_auto=exclude_auto, all=True, comparison="df" ) class AllDontChange(_Change): """If all supplied don't fields change, trigger the condition.""" def __init__( self, *fields: str, exclude: Union[List[str], None] = None, exclude_auto: Union[bool, None] = None, ): """ If all supplied fields don't change, trigger the condition. Args: *fields: If all supplied fields don't change, trigger the condition. If no fields are supplied, defaults to all fields on the model. exclude: Fields to exclude. exclude_auto: Exclude all `auto_now` and `auto_now_add` fields automatically. """ super().__init__( *fields, exclude=exclude, exclude_auto=exclude_auto, all=True, comparison="ndf" ) class Func: """ Allows for rendering a function with access to the "meta", "fields", and "columns" variables of the current model. For example, `func=Func("SELECT {columns.id} FROM {meta.db_table};")` makes it possible to do inline SQL in the `Meta` of a model and reference its properties. """ def __init__(self, func): self.func = func def render(self, model: models.Model) -> str: """ Render the SQL of the function. Args: model: The model. Returns: The rendered SQL. """ fields = utils.AttrDict({field.name: field for field in model._meta.fields}) columns = utils.AttrDict({field.name: field.column for field in model._meta.fields}) return self.func.format(meta=model._meta, fields=fields, columns=columns) # Allows Trigger methods to be used as context managers, mostly for # testing purposes @contextlib.contextmanager def _cleanup_on_exit(cleanup): yield cleanup() def _ignore_func_name(): ignore_func = "_pgtrigger_should_ignore" if features.schema(): # pragma: no branch ignore_func = f"{utils.quote(features.schema())}.{ignore_func}" return ignore_func class Trigger: """ For specifying a free-form PL/pgSQL trigger function or for creating derived trigger classes. """ name: str = None level: Level = Row when: When = None operation: Operation = None condition: Union[Condition, None] = None referencing: Union[Referencing, None] = None func: Union[Func, str] = None declare: Union[List[Tuple[str, str]], None] = None timing: Union[Timing, None] = None def __init__( self, *, name: str = None, level: Level = None, when: When = None, operation: Operation = None, condition: Union[Condition, None] = None, referencing: Union[Referencing, None] = None, func: Union[Func, str] = None, declare: Union[List[Tuple[str, str]], None] = None, timing: Union[Timing, None] = None, ): self.name = name or self.name self.level = level or self.level self.when = when or self.when self.operation = operation or self.operation self.condition = condition or self.condition self.referencing = referencing or self.referencing self.func = func or self.func self.declare = declare or self.declare self.timing = timing or self.timing if not self.level or not isinstance(self.level, Level): raise ValueError(f'Invalid "level" attribute: {self.level}') if not self.when or not isinstance(self.when, When): raise ValueError(f'Invalid "when" attribute: {self.when}') if not self.operation or not isinstance(self.operation, Operation): raise ValueError(f'Invalid "operation" attribute: {self.operation}') if self.timing and not isinstance(self.timing, Timing): raise ValueError(f'Invalid "timing" attribute: {self.timing}') if self.level == Row and self.referencing: raise ValueError('Row-level triggers cannot have a "referencing" attribute') if self.timing and self.level != Row: raise ValueError('Deferrable triggers must have "level" attribute as "pgtrigger.Row"') if self.timing and self.when != After: raise ValueError('Deferrable triggers must have "when" attribute as "pgtrigger.After"') if not self.name: raise ValueError('Trigger must have "name" attribute') self.validate_name() def __str__(self) -> str: # pragma: no cover return self.name def validate_name(self) -> None: """Verifies the name is under the maximum length and has valid characters. Raises: ValueError: If the name is invalid """ if len(self.name) > MAX_NAME_LENGTH: raise ValueError(f'Trigger name "{self.name}" > {MAX_NAME_LENGTH} characters.') if not re.match(r"^[a-zA-Z0-9-_]+$", self.name): raise ValueError( f'Trigger name "{self.name}" has invalid characters.' " Only alphanumeric characters, hyphens, and underscores are allowed." ) def get_pgid(self, model: models.Model) -> str: """The ID of the trigger and function object in postgres All objects are prefixed with "pgtrigger_" in order to be discovered/managed by django-pgtrigger. Args: model: The model. Returns: The Postgres ID. """ model_hash = hashlib.sha1(self.get_uri(model).encode()).hexdigest()[:5] pgid = f"pgtrigger_{self.name}_{model_hash}" if len(pgid) > 63: raise ValueError(f'Trigger identifier "{pgid}" is greater than 63 chars') # NOTE - Postgres always stores names in lowercase. Ensure that all # generated IDs are lowercase so that we can properly do installation # and pruning tasks. return pgid.lower() def get_condition(self, model: models.Model) -> Condition: """Get the condition of the trigger. Args: model: The model. Returns: The condition. """ return self.condition def get_declare(self, model: models.Model) -> List[Tuple[str, str]]: """ Gets the DECLARE part of the trigger function if any variables are used. Args: model: The model Returns: A list of variable name / type tuples that will be shown in the DECLARE. For example [('row_data', 'JSONB')] """ return self.declare or [] def get_func(self, model: models.Model) -> Union[str, Func]: """ Returns the trigger function that comes between the BEGIN and END clause. Args: model: The model Returns: The trigger function as a SQL string or [pgtrigger.Func][] object. """ if not self.func: raise ValueError("Must define func attribute or implement get_func") return self.func def get_uri(self, model: models.Model) -> str: """The URI for the trigger. Args: model: The model Returns: The URI in the format of the ".:" """ return f"{model._meta.app_label}.{model._meta.object_name}:{self.name}" def render_condition(self, model: models.Model) -> str: """Renders the condition SQL in the trigger declaration. Args: model: The model. Returns: The rendered condition SQL """ condition = self.get_condition(model) resolved = condition.resolve(model).strip() if condition else "" if resolved: if not resolved.startswith("("): resolved = f"({resolved})" resolved = f"WHEN {resolved}" return resolved def render_declare(self, model: models.Model) -> str: """Renders the DECLARE of the trigger function, if any. Args: model: The model. Returns: The rendered declare SQL. """ declare = self.get_declare(model) if declare: rendered_declare = "DECLARE " + " ".join( f"{var_name} {var_type};" for var_name, var_type in declare ) else: rendered_declare = "" return rendered_declare def render_execute(self, model: models.Model) -> str: """ Renders what should be executed by the trigger. This defaults to the trigger function. Args: model: The model Returns: The SQL for the execution of the trigger function. """ return f"{self.get_pgid(model)}()" def render_func(self, model: models.Model) -> str: """ Renders the func. Args: model: The model Returns: The rendered SQL of the trigger function """ func = self.get_func(model) if isinstance(func, Func): return func.render(model) else: return func def compile(self, model: models.Model) -> compiler.Trigger: """ Create a compiled representation of the trigger. useful for migrations. Args: model: The model Returns: The compiled trigger object. """ return compiler.Trigger( name=self.name, sql=compiler.UpsertTriggerSql( ignore_func_name=_ignore_func_name(), pgid=self.get_pgid(model), declare=self.render_declare(model), func=self.render_func(model), table=model._meta.db_table, constraint="CONSTRAINT" if self.timing else "", when=self.when, operation=self.operation, timing=f"DEFERRABLE INITIALLY {self.timing}" if self.timing else "", referencing=self.referencing or "", level=self.level, condition=self.render_condition(model), execute=self.render_execute(model), ), ) def allow_migrate(self, model: models.Model, database: Union[str, None] = None) -> bool: """True if the trigger for this model can be migrated. Defaults to using the router's allow_migrate. Args: model: The model. database: The name of the database configuration. Returns: `True` if the trigger for the model can be migrated. """ model = model._meta.concrete_model return utils.is_postgres(database) and router.allow_migrate( database or DEFAULT_DB_ALIAS, model._meta.app_label, model_name=model._meta.model_name ) def format_sql(self, sql: str) -> str: """Returns SQL as one line that has trailing whitespace removed from each line. Args: sql: The unformatted SQL Returns: The formatted SQL """ return " ".join(line.strip() for line in sql.split("\n") if line.strip()).strip() def exec_sql( self, sql: str, model: models.Model, database: Union[str, None] = None, fetchall: bool = False, ) -> Any: """Conditionally execute SQL if migrations are allowed. Args: sql: The SQL string. model: The model. database: The name of the database configuration. fetchall: True if all results should be fetched Returns: A psycopg cursor result """ if self.allow_migrate(model, database=database): return utils.exec_sql(str(sql), database=database, fetchall=fetchall) def get_installation_status( self, model: models.Model, database: Union[str, None] = None ) -> Tuple[str, Union[bool, None]]: """Returns the installation status of a trigger. The return type is (status, enabled), where status is one of: 1. `INSTALLED`: If the trigger is installed 2. `UNINSTALLED`: If the trigger is not installed 3. `OUTDATED`: If the trigger is installed but has been modified 4. `IGNORED`: If migrations are not allowed "enabled" is True if the trigger is installed and enabled or false if installed and disabled (or uninstalled). Args: model: The model. database: The name of the database configuration. Returns: A tuple with the installation and enablement status. """ if not self.allow_migrate(model, database=database): return (UNALLOWED, None) trigger_exists_sql = f""" SELECT oid, obj_description(oid) AS hash, tgenabled AS enabled FROM pg_trigger WHERE tgname='{self.get_pgid(model)}' AND tgrelid='{utils.quote(model._meta.db_table)}'::regclass; """ try: with transaction.atomic(using=database): results = self.exec_sql( trigger_exists_sql, model, database=database, fetchall=True ) except ProgrammingError: # pragma: no cover # When the table doesn't exist yet, possibly because migrations # haven't been executed, a ProgrammingError will happen because # of an invalid regclass cast. Return 'UNINSTALLED' for this # case return (UNINSTALLED, None) if not results: return (UNINSTALLED, None) else: hash = self.compile(model).hash if hash != results[0][1]: return (OUTDATED, results[0][2] == "O") else: return (INSTALLED, results[0][2] == "O") def register(self, *models: models.Model): """Register model classes with the trigger Args: *models: Models to register to this trigger. """ for model in models: registry.set(self.get_uri(model), model=model, trigger=self) return _cleanup_on_exit(lambda: self.unregister(*models)) def unregister(self, *models: models.Model): """Unregister model classes with the trigger. Args: *models: Models to unregister to this trigger. """ for model in models: registry.delete(self.get_uri(model)) return _cleanup_on_exit(lambda: self.register(*models)) def install(self, model: models.Model, database: Union[str, None] = None): """Installs the trigger for a model. Args: model: The model. database: The name of the database configuration. """ install_sql = self.compile(model).install_sql with transaction.atomic(using=database): self.exec_sql(install_sql, model, database=database) return _cleanup_on_exit(lambda: self.uninstall(model, database=database)) def uninstall(self, model: models.Model, database: Union[str, None] = None): """Uninstalls the trigger for a model. Args: model: The model. database: The name of the database configuration. """ uninstall_sql = self.compile(model).uninstall_sql self.exec_sql(uninstall_sql, model, database=database) return _cleanup_on_exit( # pragma: no branch lambda: self.install(model, database=database) ) def enable(self, model: models.Model, database: Union[str, None] = None): """Enables the trigger for a model. Args: model: The model. database: The name of the database configuration. """ enable_sql = self.compile(model).enable_sql self.exec_sql(enable_sql, model, database=database) return _cleanup_on_exit( # pragma: no branch lambda: self.disable(model, database=database) ) def disable(self, model: models.Model, database: Union[str, None] = None): """Disables the trigger for a model. Args: model: The model. database: The name of the database configuration. """ disable_sql = self.compile(model).disable_sql self.exec_sql(disable_sql, model, database=database) return _cleanup_on_exit(lambda: self.enable(model, database=database)) # pragma: no branch django-pgtrigger-4.11.1/pgtrigger/features.py000066400000000000000000000021341460427745000212230ustar00rootroot00000000000000from django.conf import settings def model_meta(): """ True if model meta support is enabled """ return getattr(settings, "PGTRIGGER_MODEL_META", True) def schema_editor(): """ True if we are using the patched Postgres schema editor. Note that setting this to False means that we cannot easily alter columns of models that are associated with trigger conditions """ return getattr(settings, "PGTRIGGER_SCHEMA_EDITOR", True) def migrations(): """ True if migrations are enabled """ return model_meta() and getattr(settings, "PGTRIGGER_MIGRATIONS", True) def install_on_migrate(): """ True if triggers should be installed after migrations """ return getattr(settings, "PGTRIGGER_INSTALL_ON_MIGRATE", False) def schema(): """ The default schema where special objects are installed """ return getattr(settings, "PGTRIGGER_SCHEMA", "public") def prune_on_install(): """ True if triggers should be pruned on a full install or uninstall """ return getattr(settings, "PGTRIGGER_PRUNE_ON_INSTALL", True) django-pgtrigger-4.11.1/pgtrigger/installation.py000066400000000000000000000121101460427745000221010ustar00rootroot00000000000000""" The primary functional API for pgtrigger """ import logging from typing import List, Tuple, Union from django.db import DEFAULT_DB_ALIAS, connections from pgtrigger import features, registry, utils # The core pgtrigger logger LOGGER = logging.getLogger("pgtrigger") def install(*uris: str, database: Union[str, None] = None) -> None: """ Install triggers. Args: *uris: URIs of triggers to install. If none are provided, all triggers are installed and orphaned triggers are pruned. database: The database. Defaults to the "default" database. """ for model, trigger in registry.registered(*uris): LOGGER.info( "pgtrigger: Installing %s trigger for %s table on %s database.", trigger, model._meta.db_table, database or DEFAULT_DB_ALIAS, ) trigger.install(model, database=database) if not uris and features.prune_on_install(): # pragma: no branch prune(database=database) def prunable(database: Union[str, None] = None) -> List[Tuple[str, str, bool, str]]: """Return triggers that are candidates for pruning Args: database: The database. Defaults to the "default" database. Returns: A list of tuples consisting of the table, trigger ID, enablement, and database """ if not utils.is_postgres(database): return [] registered = { (utils.quote(model._meta.db_table), trigger.get_pgid(model)) for model, trigger in registry.registered() } with utils.connection(database).cursor() as cursor: parent_trigger_clause = "tgparentid = 0 AND" if utils.pg_maj_version(cursor) >= 13 else "" # Only select triggers that are in the current search path. We accomplish # this by parsing the tgrelid and only selecting triggers that don't have # a schema name in their path cursor.execute( f""" SELECT tgrelid::regclass, tgname, tgenabled FROM pg_trigger WHERE tgname LIKE 'pgtrigger_%%' AND {parent_trigger_clause} array_length(parse_ident(tgrelid::regclass::varchar), 1) = 1 """ ) triggers = set(cursor.fetchall()) return [ (trigger[0], trigger[1], trigger[2] == "O", database or DEFAULT_DB_ALIAS) for trigger in triggers if (utils.quote(trigger[0]), trigger[1]) not in registered ] def prune(database: Union[str, None] = None) -> None: """ Remove any pgtrigger triggers in the database that are not used by models. I.e. if a model or trigger definition is deleted from a model, ensure it is removed from the database Args: database: The database. Defaults to the "default" database. """ for trigger in prunable(database=database): LOGGER.info( "pgtrigger: Pruning trigger %s for table %s on %s database.", trigger[1], trigger[0], trigger[3], ) connection = connections[trigger[3]] uninstall_sql = utils.render_uninstall(trigger[0], trigger[1]) with connection.cursor() as cursor: cursor.execute(uninstall_sql) def enable(*uris: str, database: Union[str, None] = None) -> None: """ Enables registered triggers. Args: *uris: URIs of triggers to enable. If none are provided, all triggers are enabled. database: The database. Defaults to the "default" database. """ for model, trigger in registry.registered(*uris): LOGGER.info( "pgtrigger: Enabling %s trigger for %s table on %s database.", trigger, model._meta.db_table, database or DEFAULT_DB_ALIAS, ) trigger.enable(model, database=database) def uninstall(*uris: str, database: Union[str, None] = None) -> None: """ Uninstalls triggers. Args: *uris: URIs of triggers to uninstall. If none are provided, all triggers are uninstalled and orphaned triggers are pruned. database: The database. Defaults to the "default" database. """ for model, trigger in registry.registered(*uris): LOGGER.info( "pgtrigger: Uninstalling %s trigger for %s table on %s database.", trigger, model._meta.db_table, database or DEFAULT_DB_ALIAS, ) trigger.uninstall(model, database=database) if not uris and features.prune_on_install(): prune(database=database) def disable(*uris: str, database: Union[str, None] = None) -> None: """ Disables triggers. Args: *uris: URIs of triggers to disable. If none are provided, all triggers are disabled. database: The database. Defaults to the "default" database. """ for model, trigger in registry.registered(*uris): LOGGER.info( "pgtrigger: Disabling %s trigger for %s table on %s database.", trigger, model._meta.db_table, database or DEFAULT_DB_ALIAS, ) trigger.disable(model, database=database) django-pgtrigger-4.11.1/pgtrigger/management/000077500000000000000000000000001460427745000211475ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/management/__init__.py000066400000000000000000000000001460427745000232460ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/management/commands/000077500000000000000000000000001460427745000227505ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/management/commands/__init__.py000066400000000000000000000000001460427745000250470ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/management/commands/pgtrigger.py000066400000000000000000000163101460427745000253150ustar00rootroot00000000000000import contextlib import logging from django.core.management.base import BaseCommand from django.db import DEFAULT_DB_ALIAS from pgtrigger import core, installation, registry, runtime def _setup_logging(): # pragma: no cover installation.LOGGER.addHandler(logging.StreamHandler()) installation.LOGGER.setLevel(logging.INFO) class SubCommands(BaseCommand): # pragma: no cover """ Subcommand class vendored in from https://github.com/andrewp-as-is/django-subcommands.py because of installation issues """ argv = [] subcommands = {} def add_arguments(self, parser): subparsers = parser.add_subparsers(dest="subcommand", title="subcommands", description="") subparsers.required = True for command_name, command_class in self.subcommands.items(): command = command_class() subparser = subparsers.add_parser(command_name, help=command_class.help) command.add_arguments(subparser) prog_name = subcommand = "" if self.argv: prog_name = self.argv[0] subcommand = self.argv[1] command_parser = command.create_parser(prog_name, subcommand) subparser._actions = command_parser._actions def run_from_argv(self, argv): self.argv = argv return super().run_from_argv(argv) def handle(self, *args, **options): command_name = options["subcommand"] self.subcommands.get(command_name) command_class = self.subcommands[command_name] if self.argv: args = [self.argv[0]] + self.argv[2:] return command_class().run_from_argv(args) else: return command_class().execute(*args, **options) class BaseSchemaCommand(BaseCommand): """Sets the search path based on any "schema" option that's found""" def handle(self, *args, **options): database = options["database"] or DEFAULT_DB_ALIAS schemas = options["schema"] or [] if schemas: context = runtime.schema(*schemas, databases=[database]) else: context = contextlib.nullcontext() with context: return self.handle_with_schema(*args, **options) class LsCommand(BaseSchemaCommand): help = "List triggers and their installation state." def add_arguments(self, parser): parser.add_argument("uris", nargs="*", type=str) parser.add_argument("-d", "--database", help="The database") parser.add_argument( "-s", "--schema", action="append", help="Set the search path to this schema", ) def handle_with_schema(self, *args, **options): uris = options["uris"] status_formatted = { core.UNINSTALLED: "\033[91mUNINSTALLED\033[0m", core.INSTALLED: "\033[92mINSTALLED\033[0m", core.OUTDATED: "\033[93mOUTDATED\033[0m", core.PRUNE: "\033[96mPRUNE\033[0m", core.UNALLOWED: "\033[94mUNALLOWED\033[0m", } enabled_formatted = { True: "\033[92mENABLED\033[0m", False: "\033[91mDISABLED\033[0m", None: "\033[94mN/A\033[0m", } def _format_status(status, enabled, uri): if status in (core.UNINSTALLED, core.UNALLOWED): enabled = None return status_formatted[status], enabled_formatted[enabled], uri formatted = [] for model, trigger in registry.registered(*uris): uri = trigger.get_uri(model) status, enabled = trigger.get_installation_status(model, database=options["database"]) formatted.append(_format_status(status, enabled, uri)) if not uris: for trigger in installation.prunable(database=options["database"]): formatted.append(_format_status("PRUNE", trigger[2], f"{trigger[0]}:{trigger[1]}")) max_status_len = max(len(val) for val, _, _ in formatted) max_enabled_len = max(len(val) for _, val, _ in formatted) for status, enabled, uri in formatted: print( f"{{: <{max_status_len}}} {{: <{max_enabled_len}}} {{}}".format( status, enabled, uri ) ) class InstallCommand(BaseSchemaCommand): help = "Install triggers." def add_arguments(self, parser): parser.add_argument("uris", nargs="*", type=str) parser.add_argument("-d", "--database", help="The database") parser.add_argument( "-s", "--schema", action="append", help="Set the search path to this schema", ) def handle_with_schema(self, *args, **options): _setup_logging() installation.install(*options["uris"], database=options["database"]) class UninstallCommand(BaseSchemaCommand): help = "Uninstall triggers." def add_arguments(self, parser): parser.add_argument("uris", nargs="*", type=str) parser.add_argument("-d", "--database", help="The database") parser.add_argument( "-s", "--schema", action="append", help="Set the search path to this schema", ) def handle_with_schema(self, *args, **options): _setup_logging() installation.uninstall(*options["uris"], database=options["database"]) class EnableCommand(BaseSchemaCommand): help = "Enable triggers." def add_arguments(self, parser): parser.add_argument("uris", nargs="*", type=str) parser.add_argument("-d", "--database", help="The database") parser.add_argument( "-s", "--schema", action="append", help="Set the search path to this schema", ) def handle_with_schema(self, *args, **options): _setup_logging() installation.enable(*options["uris"], database=options["database"]) class DisableCommand(BaseSchemaCommand): help = "Disable triggers." def add_arguments(self, parser): parser.add_argument("uris", nargs="*", type=str) parser.add_argument("-d", "--database", help="The database") parser.add_argument( "-s", "--schema", action="append", help="Set the search path to this schema", ) def handle_with_schema(self, *args, **options): _setup_logging() installation.disable(*options["uris"], database=options["database"]) class PruneCommand(BaseSchemaCommand): help = "Prune installed triggers that are no longer in the codebase." def add_arguments(self, parser): parser.add_argument("-d", "--database", help="The database") parser.add_argument( "-s", "--schema", action="append", help="Set the search path to this schema", ) def handle_with_schema(self, *args, **options): _setup_logging() installation.prune(database=options["database"]) class Command(SubCommands): help = "Core django-pgtrigger subcommands." subcommands = { "ls": LsCommand, "install": InstallCommand, "uninstall": UninstallCommand, "enable": EnableCommand, "disable": DisableCommand, "prune": PruneCommand, } django-pgtrigger-4.11.1/pgtrigger/migrations.py000066400000000000000000000404651460427745000215720ustar00rootroot00000000000000import contextlib import re from django.apps import apps from django.db import transaction from django.db.migrations.operations.fields import AddField from django.db.migrations.operations.models import CreateModel, IndexOperation from pgtrigger import compiler, utils def _add_trigger(schema_editor, model, trigger): """Add a trigger to a model.""" if not isinstance(trigger, compiler.Trigger): # pragma: no cover trigger = trigger.compile(model) with transaction.atomic(using=schema_editor.connection.alias): # Trigger install SQL returns interpolated SQL which makes # params=None a necessity to avoid escaping attempts on execution. schema_editor.execute(trigger.install_sql, params=None) def _remove_trigger(schema_editor, model, trigger): """Remove a trigger from a model.""" if not isinstance(trigger, compiler.Trigger): # pragma: no cover trigger = trigger.compile(model) # Trigger uninstall SQL returns interpolated SQL which makes # params=None a necessity to avoid escaping attempts on execution. schema_editor.execute(trigger.uninstall_sql, params=None) class TriggerOperationMixin: def allow_migrate_model_trigger(self, schema_editor, model): """ The check for determinig if a trigger is migrated """ return schema_editor.connection.vendor == "postgresql" and self.allow_migrate_model( schema_editor.connection.alias, model._meta.concrete_model ) class AddTrigger(TriggerOperationMixin, IndexOperation): option_name = "triggers" def __init__(self, model_name, trigger): self.model_name = model_name self.trigger = trigger def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name] model_state.options["triggers"] = model_state.options.get("triggers", []) + [self.trigger] state.reload_model(app_label, self.model_name, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model_trigger(schema_editor, model): # pragma: no branch _add_trigger(schema_editor, model, self.trigger) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model_trigger(schema_editor, model): # pragma: no branch _remove_trigger(schema_editor, model, self.trigger) def describe(self): return f"Create trigger {self.trigger.name} on model {self.model_name}" def deconstruct(self): return ( self.__class__.__name__, [], { "model_name": self.model_name, "trigger": self.trigger, }, ) @property def migration_name_fragment(self): return f"{self.model_name_lower}_{self.trigger.name.lower()}" def _get_trigger_by_name(model_state, name): for trigger in model_state.options.get("triggers", []): # pragma: no branch if trigger.name == name: return trigger raise ValueError(f"No trigger named {name} on model {model_state.name}") # pragma: no cover class RemoveTrigger(TriggerOperationMixin, IndexOperation): option_name = "triggers" def __init__(self, model_name, name): self.model_name = model_name self.name = name def state_forwards(self, app_label, state): model_state = state.models[app_label, self.model_name] objs = model_state.options.get("triggers", []) model_state.options["triggers"] = [obj for obj in objs if obj.name != self.name] state.reload_model(app_label, self.model_name, delay=True) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model_trigger(schema_editor, model): # pragma: no branch from_model_state = from_state.models[app_label, self.model_name_lower] trigger = _get_trigger_by_name(from_model_state, self.name) _remove_trigger(schema_editor, model, trigger) def database_backwards(self, app_label, schema_editor, from_state, to_state): model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model_trigger(schema_editor, model): # pragma: no branch to_model_state = to_state.models[app_label, self.model_name_lower] trigger = _get_trigger_by_name(to_model_state, self.name) _add_trigger(schema_editor, model, trigger) def describe(self): return f"Remove trigger {self.name} from model {self.model_name}" def deconstruct(self): return ( self.__class__.__name__, [], { "model_name": self.model_name, "name": self.name, }, ) @property def migration_name_fragment(self): return f"remove_{self.model_name_lower}_{self.name.lower()}" def _inject_m2m_dependency_in_proxy(proxy_op): """ Django does not properly add dependencies to m2m fields that are base classes for proxy models. Inject the dependency here """ for base in proxy_op.bases: model = apps.get_model(base) creator = model._meta.auto_created if creator: for field in creator._meta.many_to_many: if field.remote_field.through == model: app_label, model_name = creator._meta.label_lower.split(".") proxy_op._auto_deps.append((app_label, model_name, field.name, True)) class MigrationAutodetectorMixin: """A mixin that can be subclassed with MigrationAutodetector and detects triggers""" def _detect_changes(self, *args, **kwargs): self.altered_triggers = {} return super()._detect_changes(*args, **kwargs) def _get_add_trigger_op(self, model, trigger): if not isinstance(trigger, compiler.Trigger): trigger = trigger.compile(model) return AddTrigger(model_name=model._meta.model_name, trigger=trigger) def create_altered_constraints(self): """ Piggyback off of constraint generation hooks to generate trigger migration operations """ for app_label, model_name in sorted(self.kept_model_keys | self.kept_proxy_keys): old_model_name = self.renamed_models.get((app_label, model_name), model_name) old_model_state = self.from_state.models[app_label, old_model_name] new_model_state = self.to_state.models[app_label, model_name] new_model = self.to_state.apps.get_model(app_label, model_name) old_triggers = old_model_state.options.get("triggers", []) new_triggers = [ trigger.compile(new_model) for trigger in new_model_state.options.get("triggers", []) ] add_triggers = [c for c in new_triggers if c not in old_triggers] rem_triggers = [c for c in old_triggers if c not in new_triggers] self.altered_triggers.update( { (app_label, model_name): { "added_triggers": add_triggers, "removed_triggers": rem_triggers, } } ) return super().create_altered_constraints() def generate_added_constraints(self): for (app_label, model_name), alt_triggers in self.altered_triggers.items(): model = self.to_state.apps.get_model(app_label, model_name) for trigger in alt_triggers["added_triggers"]: self.add_operation( app_label, self._get_add_trigger_op(model=model, trigger=trigger) ) return super().generate_added_constraints() def generate_removed_constraints(self): for (app_label, model_name), alt_triggers in self.altered_triggers.items(): for trigger in alt_triggers["removed_triggers"]: self.add_operation( app_label, RemoveTrigger(model_name=model_name, name=trigger.name) ) return super().generate_removed_constraints() def generate_created_models(self): super().generate_created_models() added_models = self.new_model_keys - self.old_model_keys added_models = sorted(added_models, key=self.swappable_first_key, reverse=True) for app_label, model_name in added_models: model = self.to_state.apps.get_model(app_label, model_name) model_state = self.to_state.models[app_label, model_name] if not model_state.options.get("managed", True): continue # pragma: no cover related_fields = { op.name: op.field for op in self.generated_operations.get(app_label, []) if isinstance(op, AddField) and model_name == op.model_name } related_dependencies = [ (app_label, model_name, name, True) for name in sorted(related_fields) ] # Depend on the model being created related_dependencies.append((app_label, model_name, None, True)) for trigger in model_state.options.pop("triggers", []): self.add_operation( app_label, self._get_add_trigger_op(model=model, trigger=trigger), dependencies=related_dependencies, ) def generate_created_proxies(self): super().generate_created_proxies() added = self.new_proxy_keys - self.old_proxy_keys for app_label, model_name in sorted(added): # Django has a bug that prevents it from injecting a dependency # to an M2M through model when a proxy model inherits it. # Inject an additional dependency for created proxies to # avoid this for op in self.generated_operations.get(app_label, []): if isinstance(op, CreateModel) and op.options.get( # pragma: no branch "proxy", False ): _inject_m2m_dependency_in_proxy(op) model = self.to_state.apps.get_model(app_label, model_name) model_state = self.to_state.models[app_label, model_name] assert model_state.options.get("proxy") for trigger in model_state.options.pop("triggers", []): self.add_operation( app_label, self._get_add_trigger_op(model=model, trigger=trigger), dependencies=[(app_label, model_name, None, True)], ) def generate_deleted_proxies(self): deleted = self.old_proxy_keys - self.new_proxy_keys for app_label, model_name in sorted(deleted): model_state = self.from_state.models[app_label, model_name] assert model_state.options.get("proxy") for trigger in model_state.options.pop("triggers", []): self.add_operation( app_label, RemoveTrigger(model_name=model_name, name=trigger.name), dependencies=[(app_label, model_name, None, True)], ) super().generate_deleted_proxies() class DatabaseSchemaEditorMixin: """ A schema editor mixin that can subclass a DatabaseSchemaEditor and handle altering column types of triggers. Postgres does not allow altering column types of columns used in trigger conditions. Here we fix this with the following approach: 1. Detect that a column type is being changed and set a flag so that we can alter behavior of the schema editor. 2. In execute(), check for the special error message that's raised when trying to alter a column of a trigger. Temporarily drop triggers during the alter statement and reinstall them. Ensure this is all wrapped in a transaction """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.temporarily_dropped_triggers = set() self.is_altering_field_type = False @contextlib.contextmanager def alter_field_type(self): """ Temporarily sets state so that execute() knows we are trying to alter a column type """ self.is_altering_field_type = True try: yield finally: self.is_altering_field_type = False def _alter_field( self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False, ): """ Detects that a field type is being altered and sets the appropriate state """ context = self.alter_field_type() if old_type != new_type else contextlib.nullcontext() with context: return super()._alter_field( model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=strict, ) @contextlib.contextmanager def temporarily_drop_trigger(self, trigger, table): """ Given a table and trigger, temporarily drop the trigger and recreate it after the context manager yields. """ self.temporarily_dropped_triggers.add((trigger, table)) try: with transaction.atomic(self.connection.alias), self.connection.cursor() as cursor: cursor.execute( f""" SELECT pg_get_triggerdef(oid) FROM pg_trigger WHERE tgname = '{trigger}' AND tgrelid = '{table}'::regclass; """ ) trigger_create_sql = cursor.fetchall()[0][0] cursor.execute(f"DROP TRIGGER {trigger} on {utils.quote(table)};") yield cursor.execute(trigger_create_sql) finally: self.temporarily_dropped_triggers.remove((trigger, table)) def execute(self, *args, **kwargs): """ If we are altering a field type, catch the special error psycopg raises when a column on a trigger is altered. Temporarily drop and recreate triggers to ensure the alter operation is successful. """ if self.is_altering_field_type: try: with transaction.atomic(self.connection.alias): return super().execute(*args, **kwargs) except Exception as exc: match = re.search( r"cannot alter type of a column used in a trigger definition\n" r'DETAIL:\s+trigger (?P\w+).+on table "?(?P\w+)"?', str(exc), ) if match: trigger = match.groupdict()["trigger"] table = match.groupdict()["table"] # In practice we should never receive the same error message for # the same trigger/table, but check anyways to avoid infinite # recursion if ( trigger.startswith("pgtrigger_") and (table, trigger) not in self.temporarily_dropped_triggers ): with self.temporarily_drop_trigger(trigger, table): return self.execute(*args, **kwargs) raise # pragma: no cover else: return super().execute(*args, **kwargs) def create_model(self, model): """ Create the model `Meta.triggers` isn't populated on the forwards `CreateTable` migration (as Triggers are only added to the migration state via `AddTrigger` operations). `Meta.triggers` may be populated when: - The backwards operation of a `RemoveTable` operation where there was still triggers defined in the model state when the table was removed. - Creating the tables of an unmigrated app when `run_syncdb` is supplied to the migrate command (or when running tests). """ super().create_model(model) for trigger in getattr(model._meta, "triggers", []): _add_trigger(self, model, trigger) django-pgtrigger-4.11.1/pgtrigger/models.py000066400000000000000000000001511460427745000206650ustar00rootroot00000000000000""" pgtrigger needs an empty models.py in order to get post_migrate signals needed in its app config """ django-pgtrigger-4.11.1/pgtrigger/registry.py000066400000000000000000000115611460427745000212610ustar00rootroot00000000000000import collections from typing import TYPE_CHECKING, Callable, List, Tuple from pgtrigger import features _unset = object() if TYPE_CHECKING: from django.db.models import Model from pgtrigger.core import Trigger # All registered triggers for each model class _Registry(collections.UserDict): @property def pg_function_names(self): """ The postgres function names of all registered triggers """ return {trigger.get_pgid(model) for model, trigger in self.values()} @property def by_db_table(self): """ Return the registry keys by db_table, name """ return {(model._meta.db_table, trigger.name): trigger for model, trigger in self.values()} def __getitem__(self, key): assert isinstance(key, str) if len(key.split(":")) == 1: raise ValueError( 'Trigger URI must be in the format of "app_label.model_name:trigger_name"' ) elif key not in _registry: raise KeyError(f'URI "{key}" not found in pgtrigger registry') return super().__getitem__(key) def __setitem__(self, key, value): assert isinstance(key, str) model, trigger = value assert f"{model._meta.label}:{trigger.name}" == key found_trigger = self.by_db_table.get((model._meta.db_table, trigger.name)) if not found_trigger or found_trigger != trigger: if found_trigger: raise KeyError( f'Trigger name "{trigger.name}" already' f' used for model "{model._meta.label}"' f' table "{model._meta.db_table}".' ) if trigger.get_pgid(model) in self.pg_function_names: raise KeyError( f'Trigger "{trigger.name}" on model "{model._meta.label}"' " has Postgres function name that's already in use." " Use a different name for the trigger." ) # Add the trigger to Meta.triggers. # Note, pgtrigger's App.ready() method auto-registers any # triggers in Meta already, meaning the trigger may already exist. If so, ignore it if features.migrations(): # pragma: no branch if trigger not in getattr(model._meta, "triggers", []): model._meta.triggers = list(getattr(model._meta, "triggers", [])) + [trigger] if trigger not in model._meta.original_attrs.get("triggers", []): model._meta.original_attrs["triggers"] = list( model._meta.original_attrs.get("triggers", []) ) + [trigger] return super().__setitem__(key, value) def __delitem__(self, key): model, trigger = self[key] super().__delitem__(key) # If we support migration integration, remove from Meta triggers if features.migrations(): # pragma: no branch model._meta.triggers.remove(trigger) # If model._meta.triggers and the original_attrs triggers are the same, # we don't need to remove it from the original_attrs if trigger in model._meta.original_attrs["triggers"]: # pragma: no branch model._meta.original_attrs["triggers"].remove(trigger) _registry = _Registry() def set(uri: str, *, model: "Model", trigger: "Trigger") -> None: """Set a trigger in the registry Args: uri: The trigger URI model: The trigger model trigger: The trigger object """ _registry[uri] = (model, trigger) def delete(uri: str) -> None: """Delete a trigger from the registry. Args: uri: The trigger URI """ del _registry[uri] def registered(*uris: str) -> List[Tuple["Model", "Trigger"]]: """ Get registered trigger objects. Args: *uris: URIs of triggers to get. If none are provided, all triggers are returned. URIs are in the format of `{app_label}.{model_name}:{trigger_name}`. Returns: Matching trigger objects. """ uris = uris or _registry.keys() return [_registry[uri] for uri in uris] def register(*triggers: "Trigger") -> Callable: """ Register the given triggers with wrapped Model class. Args: *triggers: Trigger classes to register. Example: Register by decorating a model: @pgtrigger.register( pgtrigger.Protect( name="append_only", operation=(pgtrigger.Update | pgtrigger.Delete) ) ) class MyModel(models.Model): pass Example: Register by calling functionally: pgtrigger.register(trigger_object)(MyModel) """ def _model_wrapper(model_class): for trigger in triggers: trigger.register(model_class) return model_class return _model_wrapper django-pgtrigger-4.11.1/pgtrigger/runtime.py000066400000000000000000000246231460427745000210770ustar00rootroot00000000000000""" Functions for runtime-configuration of triggers, such as ignoring them or dynamically setting the search path. """ import contextlib import threading from typing import TYPE_CHECKING, List, Union from django.db import connections from pgtrigger import registry, utils if utils.psycopg_maj_version == 2: import psycopg2.extensions elif utils.psycopg_maj_version == 3: import psycopg.pq else: raise AssertionError if TYPE_CHECKING: from pgtrigger import Timing # All triggers currently being ignored _ignore = threading.local() # All schemas in the search path _schema = threading.local() def _is_concurrent_statement(sql): """ True if the sql statement is concurrent and cannot be ran in a transaction """ sql = sql.strip().lower() if sql else "" return sql.startswith("create") and "concurrently" in sql def _is_transaction_errored(cursor): """ True if the current transaction is in an errored state """ if utils.psycopg_maj_version == 2: return ( cursor.connection.get_transaction_status() == psycopg2.extensions.TRANSACTION_STATUS_INERROR ) elif utils.psycopg_maj_version == 3: return cursor.connection.info.transaction_status == psycopg.pq.TransactionStatus.INERROR else: raise AssertionError def _can_inject_variable(cursor, sql): """True if we can inject a SQL variable into a statement. A named cursor automatically prepends "NO SCROLL CURSOR WITHOUT HOLD FOR" to the query, which causes invalid SQL to be generated. There is no way to override this behavior in psycopg, so ignoring triggers cannot happen for named cursors. Django only names cursors for iterators and other statements that read the database, so it seems to be safe to ignore named cursors. Concurrent index creation is also incompatible with local variable setting. Ignore these cases for now. """ return ( not getattr(cursor, "name", None) and not _is_concurrent_statement(sql) and not _is_transaction_errored(cursor) ) def _execute_wrapper(execute_result): if utils.psycopg_maj_version == 3: while execute_result.nextset(): pass return execute_result def _inject_pgtrigger_ignore(execute, sql, params, many, context): """ A connection execution wrapper that sets a pgtrigger.ignore variable in the executed SQL. This lets other triggers know when they should ignore execution """ if _can_inject_variable(context["cursor"], sql): serialized_ignore = "{" + ",".join(_ignore.value) + "}" sql = f"SELECT set_config('pgtrigger.ignore', %s, true); {sql}" params = [serialized_ignore, *(params or ())] return _execute_wrapper(execute(sql, params, many, context)) @contextlib.contextmanager def _set_ignore_session_state(database=None): """Starts a session where triggers can be ignored""" connection = utils.connection(database) if _inject_pgtrigger_ignore not in connection.execute_wrappers: with connection.execute_wrapper(_inject_pgtrigger_ignore): try: yield finally: if connection.in_atomic_block: # We've finished ignoring triggers and are in a transaction, # so flush the local variable. with connection.cursor() as cursor: cursor.execute("SELECT set_config('pgtrigger.ignore', NULL, false);") else: yield @contextlib.contextmanager def _ignore_session(databases=None): """Starts a session where triggers can be ignored""" with contextlib.ExitStack() as stack: for database in utils.postgres_databases(databases): stack.enter_context(_set_ignore_session_state(database=database)) yield @contextlib.contextmanager def _set_ignore_state(model, trigger): """ Manage state to ignore a single URI """ if not hasattr(_ignore, "value"): _ignore.value = set() pgid = trigger.get_pgid(model) if pgid not in _ignore.value: # In order to preserve backwards compatibiliy with older installations # of the _pgtrigger_ignore func, we must set a full URI (old version) # and trigger ID (new version). # This will be removed in version 5 uri = f"{model._meta.db_table}:{pgid}" try: _ignore.value.add(uri) _ignore.value.add(pgid) yield finally: _ignore.value.remove(uri) _ignore.value.remove(pgid) else: # The trigger is already being ignored yield @contextlib.contextmanager def ignore(*uris: str, databases: Union[List[str], None] = None): """ Dynamically ignore registered triggers matching URIs from executing in an individual thread. If no URIs are provided, ignore all pgtriggers from executing in an individual thread. Args: *uris: Trigger URIs to ignore. If none are provided, all triggers will be ignored. databases: The databases to use. If none, all postgres databases will be used. Example: Ingore triggers in a context manager: with pgtrigger.ignore("my_app.Model:trigger_name"): # Do stuff while ignoring trigger Example: Ignore multiple triggers as a decorator: @pgtrigger.ignore("my_app.Model:trigger_name", "my_app.Model:other_trigger") def my_func(): # Do stuff while ignoring trigger """ with contextlib.ExitStack() as stack: stack.enter_context(_ignore_session(databases=databases)) for model, trigger in registry.registered(*uris): stack.enter_context(_set_ignore_state(model, trigger)) yield ignore.session = _ignore_session def _inject_schema(execute, sql, params, many, context): """ A connection execution wrapper that sets the schema variable in the executed SQL. """ if _can_inject_variable(context["cursor"], sql) and _schema.value: path = ", ".join(val if not val.startswith("$") else f'"{val}"' for val in _schema.value) sql = f"SELECT set_config('search_path', %s, true); {sql}" params = [path, *(params or ())] return _execute_wrapper(execute(sql, params, many, context)) @contextlib.contextmanager def _set_schema_session_state(database=None): connection = utils.connection(database) if _inject_schema not in connection.execute_wrappers: if connection.in_atomic_block: # If this is the first time we are setting the search path, # register the pre_execute_hook and store a reference to the original # search path. Note that we must use this approach because we cannot # simply reset the search_path at the end. A user may have previously # set it with connection.cursor() as cursor: cursor.execute("SELECT current_setting('search_path')") initial_search_path = cursor.fetchall()[0][0] with connection.execute_wrapper(_inject_schema): try: yield finally: if connection.in_atomic_block: # We've finished modifying the search path and are in a transaction, # so flush the local variable with connection.cursor() as cursor: cursor.execute( "SELECT set_config('search_path', %s, false)", [initial_search_path] ) else: yield @contextlib.contextmanager def _schema_session(databases=None): """Starts a session where the search path can be modified""" with contextlib.ExitStack() as stack: for database in utils.postgres_databases(databases): stack.enter_context(_set_schema_session_state(database=database)) yield @contextlib.contextmanager def _set_schema_state(*schemas): if not hasattr(_schema, "value"): # Use a list instead of a set because ordering is important to the search path _schema.value = [] schemas = [s for s in schemas if s not in _schema.value] try: _schema.value.extend(schemas) yield finally: for s in schemas: _schema.value.remove(s) @contextlib.contextmanager def schema(*schemas: str, databases: Union[List[str], None] = None): """ Sets the search path to the provided schemas. If nested, appends the schemas to the search path if not already in it. Args: *schemas: Schemas that should be appended to the search path. Schemas already in the search path from nested calls will not be appended. databases: The databases to set the search path. If none, all postgres databases will be used. """ with contextlib.ExitStack() as stack: stack.enter_context(_schema_session(databases=databases)) stack.enter_context(_set_schema_state(*schemas)) yield schema.session = _schema_session def constraints(timing: "Timing", *uris: str, databases: Union[List[str], None] = None) -> None: """ Set deferrable constraint timing for the given triggers, which will persist until overridden or until end of transaction. Must be in a transaction to run this. Args: timing: The timing value that overrides the default trigger timing. *uris: Trigger URIs over which to set constraint timing. If none are provided, all trigger constraint timing will be set. All triggers must be deferrable. databases: The databases on which to set constraints. If none, all postgres databases will be used. Raises: RuntimeError: If the database of any triggers is not in a transaction. ValueError: If any triggers are not deferrable. """ for model, trigger in registry.registered(*uris): if not trigger.timing: raise ValueError( f"Trigger {trigger.name} on model {model._meta.label_lower} is not deferrable." ) for database in utils.postgres_databases(databases): if not connections[database].in_atomic_block: raise RuntimeError(f'Database "{database}" is not in a transaction.') names = ", ".join(trigger.get_pgid(model) for model, trigger in registry.registered(*uris)) with connections[database].cursor() as cursor: cursor.execute(f"SET CONSTRAINTS {names} {timing}") django-pgtrigger-4.11.1/pgtrigger/tests/000077500000000000000000000000001460427745000201755ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/tests/__init__.py000066400000000000000000000002201460427745000223000ustar00rootroot00000000000000import django if django.VERSION < (3, 2): # pragma: no cover default_app_config = "pgtrigger.tests.apps.PGTriggerTestsConfig" del django django-pgtrigger-4.11.1/pgtrigger/tests/apps.py000066400000000000000000000011741460427745000215150ustar00rootroot00000000000000import django.apps from django.db import connections from django.db.models.signals import pre_migrate def install_schemas(using, **kwargs): if connections[using].vendor == "postgresql": with connections[using].cursor() as cursor: cursor.execute('CREATE SCHEMA IF NOT EXISTS "order";') cursor.execute("CREATE SCHEMA IF NOT EXISTS receipt;") class PGTriggerTestsConfig(django.apps.AppConfig): name = "pgtrigger.tests" def ready(self): """ Ensure schemas are created for test databases before migrations """ pre_migrate.connect(install_schemas, sender=self) django-pgtrigger-4.11.1/pgtrigger/tests/conftest.py000066400000000000000000000015421460427745000223760ustar00rootroot00000000000000import pytest from django.core.management import call_command @pytest.fixture(scope="session") def django_db_setup(django_db_setup, django_db_blocker, request): with django_db_blocker.unblock(): # Note - schemas for databases are made in the pre-migrate hook # The django test runner only runs migrations ones per unique connection string. # Ensure that we've migrated all of our schema-based databases here call_command("migrate", database="default", verbosity=request.config.option.verbose) call_command("migrate", database="order", verbosity=request.config.option.verbose) call_command("migrate", database="receipt", verbosity=request.config.option.verbose) @pytest.fixture(autouse=True) def disable_logging(mocker): mocker.patch("pgtrigger.management.commands.pgtrigger._setup_logging", autospec=True) django-pgtrigger-4.11.1/pgtrigger/tests/migrations/000077500000000000000000000000001460427745000223515ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0001_initial.py000066400000000000000000000074351460427745000250250ustar00rootroot00000000000000# Generated by Django 3.0.7 on 2020-06-26 22:02 import django.db.models.deletion import django.utils.timezone from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)] operations = [ migrations.CreateModel( name="CharPk", fields=[ ( "custom_pk", models.CharField(max_length=32, primary_key=True, serialize=False), ) ], ), migrations.CreateModel( name="SoftDelete", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("is_active", models.BooleanField(default=True)), ("other_field", models.TextField()), ], ), migrations.CreateModel( name="TestTrigger", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("field", models.CharField(max_length=16)), ("int_field", models.IntegerField(default=0)), ( "dt_field", models.DateTimeField(default=django.utils.timezone.now), ), ( "nullable", models.CharField(default=None, max_length=16, null=True), ), ( "char_pk_fk_field", models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to="tests.CharPk", ), ), ( "fk_field", models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, ), ), ], ), migrations.CreateModel( name="TestModel", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("int_field", models.IntegerField(null=True, unique=True)), ("char_field", models.CharField(max_length=128, null=True)), ("float_field", models.FloatField(null=True)), ], options={"unique_together": {("int_field", "char_field")}}, ), migrations.CreateModel( name="FkToSoftDelete", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "ref", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="tests.SoftDelete", ), ), ], ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0002_logentry_tologmodel.py000066400000000000000000000021671460427745000274620ustar00rootroot00000000000000# Generated by Django 3.0.7 on 2020-07-18 07:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("tests", "0001_initial"), ] operations = [ migrations.CreateModel( name="LogEntry", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("level", models.CharField(max_length=16)), ], ), migrations.CreateModel( name="ToLogModel", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("field", models.CharField(max_length=16)), ], ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0003_auto_20200718_0938.py000066400000000000000000000010651460427745000260050ustar00rootroot00000000000000# Generated by Django 3.0.7 on 2020-07-18 09:38 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("tests", "0002_logentry_tologmodel"), ] operations = [ migrations.AddField( model_name="logentry", name="new_field", field=models.CharField(max_length=16, null=True), ), migrations.AddField( model_name="logentry", name="old_field", field=models.CharField(max_length=16, null=True), ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0004_fsm.py000066400000000000000000000012631460427745000241550ustar00rootroot00000000000000# Generated by Django 3.0.7 on 2020-07-21 19:46 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("tests", "0003_auto_20200718_0938"), ] operations = [ migrations.CreateModel( name="FSM", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("transition", models.CharField(max_length=32)), ], ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0005_customsoftdelete.py000066400000000000000000000013531460427745000267620ustar00rootroot00000000000000# Generated by Django 3.0.7 on 2020-10-13 11:26 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("tests", "0004_fsm"), ] operations = [ migrations.CreateModel( name="CustomSoftDelete", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("custom_active", models.BooleanField(default=True)), ("other_field", models.TextField()), ], ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0006_customtablename.py000066400000000000000000000013111460427745000265470ustar00rootroot00000000000000# Generated by Django 3.2.3 on 2022-07-30 09:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("tests", "0005_customsoftdelete"), ] operations = [ migrations.CreateModel( name="CustomTableName", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("int_field", models.IntegerField(null=True, unique=True)), ], options={ "db_table": "order", }, ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0007_auto_20220808_1055.py000066400000000000000000000022021460427745000257740ustar00rootroot00000000000000# Generated by Django 3.2.15 on 2022-08-08 10:55 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("tests", "0006_customtablename"), ] operations = [ migrations.CreateModel( name="TestTriggerProxy", fields=[], options={ "proxy": True, "indexes": [], "constraints": [], }, bases=("tests.testtrigger",), ), migrations.AddField( model_name="testtrigger", name="m2m_field", field=models.ManyToManyField( related_name="_tests_testtrigger_m2m_field_+", to=settings.AUTH_USER_MODEL ), ), migrations.CreateModel( name="TestDefaultThrough", fields=[], options={ "proxy": True, "indexes": [], "constraints": [], }, bases=("tests.testtrigger_m2m_field",), ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0008_searchmodel.py000066400000000000000000000015601460427745000256620ustar00rootroot00000000000000# Generated by Django 2.2 on 2022-08-10 19:13 import django.contrib.postgres.search from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("tests", "0007_auto_20220808_1055"), ] operations = [ migrations.CreateModel( name="SearchModel", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("body_vector", django.contrib.postgres.search.SearchVectorField()), ("title_body_vector", django.contrib.postgres.search.SearchVectorField()), ("title", models.CharField(max_length=128)), ("body", models.TextField()), ], ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0009_orderschema_receiptschema.py000066400000000000000000000021041460427745000305600ustar00rootroot00000000000000# Generated by Django 3.2.15 on 2022-08-11 11:37 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("tests", "0008_searchmodel"), ] operations = [ migrations.CreateModel( name="OrderSchema", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("int_field", models.IntegerField()), ], ), migrations.CreateModel( name="ReceiptSchema", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("char_field", models.CharField(max_length=128)), ], options={ "db_table": "table.with.dots", }, ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0010_auto_20220817_2211.py000066400000000000000000000031511460427745000257650ustar00rootroot00000000000000# Generated by Django 3.2.15 on 2022-08-17 22:11 import psqlextra.backend.migrations.operations.add_default_partition import psqlextra.backend.migrations.operations.create_partitioned_model import psqlextra.manager.manager import psqlextra.models.partitioned import psqlextra.types from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("tests", "0009_orderschema_receiptschema"), ] operations = [ psqlextra.backend.migrations.operations.create_partitioned_model.PostgresCreatePartitionedModel( # noqa name="PartitionModel", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("name", models.TextField()), ("timestamp", models.DateTimeField()), ], options={ "abstract": False, "base_manager_name": "objects", }, partitioning_options={ "method": psqlextra.types.PostgresPartitioningMethod["RANGE"], "key": ["timestamp"], }, bases=(psqlextra.models.partitioned.PostgresPartitionedModel,), managers=[ ("objects", psqlextra.manager.manager.PostgresManager()), ], ), psqlextra.backend.migrations.operations.add_default_partition.PostgresAddDefaultPartition( model_name="PartitionModel", name="default", ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0011_auto_20220817_2211.py000066400000000000000000000015301460427745000257650ustar00rootroot00000000000000# Generated by Django 3.2.15 on 2022-08-17 22:11 from django.db import migrations from psqlextra.backend.migrations.operations import PostgresAddRangePartition class Migration(migrations.Migration): dependencies = [ ("tests", "0010_auto_20220817_2211"), ] operations = [ PostgresAddRangePartition( model_name="partitionmodel", name="pt1", from_values="2019-01-01", to_values="2019-02-01", ), PostgresAddRangePartition( model_name="partitionmodel", name="pt2", from_values="2019-02-01", to_values="2019-03-01", ), PostgresAddRangePartition( model_name="partitionmodel", name="pt3", from_values="2019-03-01", to_values="2019-04-01", ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/0012_alter_partitionmodel_options.py000066400000000000000000000005131460427745000313600ustar00rootroot00000000000000# Generated by Django 3.2.15 on 2022-08-18 10:48 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("tests", "0011_auto_20220817_2211"), ] operations = [ migrations.AlterModelOptions( name="partitionmodel", options={}, ), ] 0013_alter_testtrigger_m2m_field_changedcondition.py000066400000000000000000000035201460427745000343370ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/tests/migrations# Generated by Django 4.2.6 on 2023-10-11 20:50 import django.db.models.deletion from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("tests", "0012_alter_partitionmodel_options"), ] operations = [ migrations.AlterField( model_name="testtrigger", name="m2m_field", field=models.ManyToManyField(related_name="+", to=settings.AUTH_USER_MODEL), ), migrations.CreateModel( name="ChangedCondition", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID" ), ), ("field", models.CharField(max_length=16)), ("int_field", models.IntegerField(default=0)), ("dt_field", models.DateTimeField(auto_now=True)), ("nullable", models.CharField(default=None, max_length=16, null=True)), ( "char_pk_fk_field", models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to="tests.charpk" ), ), ( "fk_field", models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, ), ), ( "m2m_field", models.ManyToManyField(related_name="+", to=settings.AUTH_USER_MODEL), ), ], ), ] django-pgtrigger-4.11.1/pgtrigger/tests/migrations/__init__.py000066400000000000000000000000001460427745000244500ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/tests/models.py000066400000000000000000000176301460427745000220410ustar00rootroot00000000000000from django.contrib.auth.models import User from django.contrib.postgres.search import SearchVectorField from django.db import connections, models from django.utils import timezone from psqlextra.models import PostgresPartitionedModel from psqlextra.types import PostgresPartitioningMethod import pgtrigger import pgtrigger.utils def _get_pg_maj_version(db): # pragma: no cover connection = connections[db] if connection.vendor == "postgresql": with connection.cursor() as cursor: return pgtrigger.utils.pg_maj_version(cursor) class Router: route_app_labels = ["tests"] def allow_migrate(self, db, app_label, model_name=None, **hints): """ Ignore the parititon model for the "other" DB, for non-Postgres DBs, and for Postgres DBs that are less than version 13 """ pg_maj_version = _get_pg_maj_version(db) if model_name == "partitionmodel" and ( db in ("sqlite", "other") or not pg_maj_version or pg_maj_version < 13 ): return False class PartitionModel(PostgresPartitionedModel): class PartitioningMeta: method = PostgresPartitioningMethod.RANGE key = ["timestamp"] name = models.TextField() timestamp = models.DateTimeField() class Meta: triggers = [pgtrigger.Protect(name="protect_delete", operation=pgtrigger.Delete)] class OrderSchema(models.Model): """A model that only appears in the "schema1" schema""" int_field = models.IntegerField() class ReceiptSchema(models.Model): """A model that only appears in the "schema2" schema""" char_field = models.CharField(max_length=128) class Meta: db_table = "table.with.dots" class SearchModel(models.Model): body_vector = SearchVectorField() title_body_vector = SearchVectorField() title = models.CharField(max_length=128) body = models.TextField() class Meta: triggers = [ pgtrigger.UpdateSearchVector( name="add_body_to_vector", vector_field="body_vector", document_fields=["body"] ), pgtrigger.UpdateSearchVector( name="add_body_title_to_vector", vector_field="title_body_vector", document_fields=["body", "title"], ), ] @pgtrigger.register( pgtrigger.Protect(name="protect_delete", operation=pgtrigger.Delete), ) class CustomTableName(models.Model): int_field = models.IntegerField(null=True, unique=True) class Meta: db_table = "order" class TestModel(models.Model): int_field = models.IntegerField(null=True, unique=True) char_field = models.CharField(max_length=128, null=True) float_field = models.FloatField(null=True) class Meta: unique_together = ("int_field", "char_field") class LogEntry(models.Model): """Created when ToLogModel is updated""" level = models.CharField(max_length=16) old_field = models.CharField(max_length=16, null=True) new_field = models.CharField(max_length=16, null=True) class ToLogModel(models.Model): """For testing triggers that log records at statement and row level""" field = models.CharField(max_length=16) class Meta: triggers = [ pgtrigger.Trigger( name="update_of_statement_test", level=pgtrigger.Statement, operation=pgtrigger.UpdateOf("field"), when=pgtrigger.After, func=pgtrigger.Func( f""" INSERT INTO {LogEntry._meta.db_table}(level) VALUES ('STATEMENT'); RETURN NULL; """ ), ), pgtrigger.Trigger( name="after_update_statement_test", level=pgtrigger.Statement, operation=pgtrigger.Update, when=pgtrigger.After, referencing=pgtrigger.Referencing(old="old_values", new="new_values"), func=f""" INSERT INTO {LogEntry._meta.db_table}(level, old_field, new_field) SELECT 'STATEMENT' AS level, old_values.field AS old_field, new_values.field AS new_field FROM old_values JOIN new_values ON old_values.id = new_values.id; RETURN NULL; """, ), pgtrigger.Trigger( name="after_update_row_test", level=pgtrigger.Row, operation=pgtrigger.Update, when=pgtrigger.After, condition=pgtrigger.Q(old__field__df=pgtrigger.F("new__field")), func=( f"INSERT INTO {LogEntry._meta.db_table}(level) VALUES ('ROW'); RETURN NULL;" ), ), ] class CharPk(models.Model): custom_pk = models.CharField(primary_key=True, max_length=32) class TestTrigger(models.Model): """ For testing triggers """ field = models.CharField(max_length=16) int_field = models.IntegerField(default=0) dt_field = models.DateTimeField(default=timezone.now) nullable = models.CharField(null=True, default=None, max_length=16) fk_field = models.ForeignKey("auth.User", null=True, on_delete=models.CASCADE) char_pk_fk_field = models.ForeignKey(CharPk, null=True, on_delete=models.CASCADE) m2m_field = models.ManyToManyField(User, related_name="+") class Meta: triggers = [ pgtrigger.Trigger( name="protect_misc_insert", when=pgtrigger.Before, operation=pgtrigger.Insert, func="RAISE EXCEPTION 'no no no!';", condition=pgtrigger.Q(new__field="misc_insert"), ), ] class TestTriggerProxy(TestTrigger): """ For testing triggers on proxy models """ class Meta: proxy = True triggers = [ pgtrigger.Protect(name="protect_delete", operation=pgtrigger.Delete), ] class TestDefaultThrough(TestTrigger.m2m_field.through): class Meta: proxy = True triggers = [ pgtrigger.Protect(name="protect_it", operation=pgtrigger.Delete), ] @pgtrigger.register(pgtrigger.SoftDelete(name="soft_delete", field="is_active")) class SoftDelete(models.Model): """ For testing soft deletion. Deletions on this model will set is_active = False without deleting the model """ is_active = models.BooleanField(default=True) other_field = models.TextField() class FkToSoftDelete(models.Model): """Ensures foreign keys to a soft delete model are deleted""" ref = models.ForeignKey(SoftDelete, on_delete=models.CASCADE) @pgtrigger.register(pgtrigger.SoftDelete(name="soft_delete", field="custom_active")) class CustomSoftDelete(models.Model): """ For testing soft deletion with a custom active field. This trigger also helps ensure that triggers can have the same names across multiple models. """ custom_active = models.BooleanField(default=True) other_field = models.TextField() @pgtrigger.register( pgtrigger.FSM( name="fsm", field="transition", transitions=[("unpublished", "published"), ("published", "inactive")], ) ) class FSM(models.Model): """Tests valid transitions of a field""" transition = models.CharField(max_length=32) class ChangedCondition(models.Model): """ For testing changed conditions """ field = models.CharField(max_length=16) int_field = models.IntegerField(default=0) dt_field = models.DateTimeField(auto_now=True) nullable = models.CharField(null=True, default=None, max_length=16) fk_field = models.ForeignKey("auth.User", null=True, on_delete=models.CASCADE) char_pk_fk_field = models.ForeignKey(CharPk, null=True, on_delete=models.CASCADE) m2m_field = models.ManyToManyField(User, related_name="+") django-pgtrigger-4.11.1/pgtrigger/tests/syncdb_app/000077500000000000000000000000001460427745000223175ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/tests/syncdb_app/__init__.py000066400000000000000000000000001460427745000244160ustar00rootroot00000000000000django-pgtrigger-4.11.1/pgtrigger/tests/syncdb_app/apps.py000066400000000000000000000001711460427745000236330ustar00rootroot00000000000000import django.apps class PGTriggerTestsSyncdbAppConfig(django.apps.AppConfig): name = "pgtrigger.tests.syncdb_app" django-pgtrigger-4.11.1/pgtrigger/tests/syncdb_app/models.py000066400000000000000000000015001460427745000241500ustar00rootroot00000000000000from django.apps.registry import Apps from django.db import models from django.utils import timezone import pgtrigger syncdb_apps = Apps() class NoMigrationModel(models.Model): """ For testing triggers installed with syncdb """ field = models.CharField(max_length=16) int_field = models.IntegerField(default=0) dt_field = models.DateTimeField(default=timezone.now) nullable = models.CharField(null=True, default=None, max_length=16) class Meta: apps = syncdb_apps triggers = [ pgtrigger.Trigger( name="protect_misc_insert", when=pgtrigger.Before, operation=pgtrigger.Insert, func="RAISE EXCEPTION 'no no no!';", condition=pgtrigger.Q(new__field="misc_insert"), ), ] django-pgtrigger-4.11.1/pgtrigger/tests/test_commands.py000066400000000000000000000177741460427745000234270ustar00rootroot00000000000000# flake8: noqa from unittest import mock from django.core.management import call_command import pytest import pgtrigger from pgtrigger import registry @pytest.mark.django_db def test_full_ls(capsys): """Tests listing all triggers""" call_command("pgtrigger", "ls") lines = capsys.readouterr().out.split("\n") expected_lines = [ "", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomSoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomTableName:protect_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.FSM:fsm", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_title_to_vector", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_to_vector", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestDefaultThrough:protect_it", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTrigger:protect_misc_insert", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_row_test", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_statement_test", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:update_of_statement_test", ] assert set(expected_lines).issubset(set(lines)) @pytest.mark.django_db def test_subset_ls(capsys): """Tests listing some triggers""" call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", ] ) @pytest.mark.django_db def test_main_commands(capsys): """ Tests running main commands """ call_command("pgtrigger", "uninstall") call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "", "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.SoftDelete:soft_delete", "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.TestTriggerProxy:protect_delete", ] ) call_command("pgtrigger", "install") call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", ] ) call_command("pgtrigger", "disable") call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "", "\x1b[92mINSTALLED\x1b[0m \x1b[91mDISABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[91mDISABLED\x1b[0m tests.TestTriggerProxy:protect_delete", ] ) call_command("pgtrigger", "enable") call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", ] ) @pytest.mark.django_db def test_prune(capsys): """Test pruning a trigger""" # Make it appear as though the trigger has been renamed and is no # longer installed soft_delete_model, soft_delete_trigger = pgtrigger.registered("tests.SoftDelete:soft_delete")[ 0 ] with soft_delete_trigger.unregister(soft_delete_model): call_command("pgtrigger", "ls") captured = capsys.readouterr() lines = sorted(captured.out.split("\n")) assert ( "\x1b[96mPRUNE\x1b[0m \x1b[92mENABLED\x1b[0m tests_softdelete:pgtrigger_soft_delete_f41be" ) in lines call_command("pgtrigger", "prune") call_command("pgtrigger", "ls") lines = capsys.readouterr().out.split("\n") assert ( "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.SoftDelete:soft_delete" ) in lines call_command("pgtrigger", "install") call_command("pgtrigger", "ls") lines = capsys.readouterr().out.split("\n") assert ( "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete" ) in lines @pytest.mark.django_db(databases=["default", "other"]) def test_outdated(capsys, mocker): """Test an outdated trigger""" # Make it appear like the trigger is out of date by changing # its hash mocker.patch.object( registry._registry["tests.SoftDelete:soft_delete"][1], "compile", return_value=mocker.Mock(hash="hash"), ) call_command("pgtrigger", "ls") lines = capsys.readouterr().out.split("\n") assert ( "\x1b[93mOUTDATED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete" ) in lines @pytest.mark.django_db def test_main_commands_w_args(capsys): """ Tests running main commands with arguments """ call_command("pgtrigger", "uninstall", "tests.SoftDelete:soft_delete") call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", "", ] ) call_command("pgtrigger", "install", "tests.SoftDelete:soft_delete") call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", "", ] ) call_command("pgtrigger", "disable", "tests.SoftDelete:soft_delete") call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "\x1b[92mINSTALLED\x1b[0m \x1b[91mDISABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", "", ] ) call_command("pgtrigger", "enable", "tests.SoftDelete:soft_delete") call_command( "pgtrigger", "ls", "tests.SoftDelete:soft_delete", "tests.TestTriggerProxy:protect_delete", ) lines = capsys.readouterr().out.split("\n") assert set(lines) == set( [ "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", "", ] ) django-pgtrigger-4.11.1/pgtrigger/tests/test_contrib.py000066400000000000000000000166241460427745000232570ustar00rootroot00000000000000import ddf import pytest from django.core.exceptions import FieldDoesNotExist import pgtrigger from pgtrigger.tests import models, utils def test_registered_invalid_args(): with pytest.raises(ValueError): pgtrigger.registered("uri") @pytest.mark.django_db def test_read_only(): """Tests the ReadOnly trigger""" with pytest.raises(ValueError, match="only one of"): pgtrigger.ReadOnly(name="uneditable", fields=["level"], exclude=["hello"]) trigger = pgtrigger.ReadOnly(name="uneditable", fields=["level"]) with pytest.raises(FieldDoesNotExist): trigger.install(models.TestModel) trigger = pgtrigger.ReadOnly(name="uneditable", exclude=["level"]) with pytest.raises(FieldDoesNotExist): trigger.install(models.TestModel) trigger = pgtrigger.ReadOnly(name="uneditable") with trigger.install(models.TestModel): m = ddf.G(models.TestModel, int_field=1) m.save() with utils.raises_trigger_error(match="Cannot update rows"): m.int_field = 2 m.save() trigger = pgtrigger.ReadOnly(name="uneditable", fields=["char_field", "float_field"]) with trigger.install(models.TestModel): m = ddf.G(models.TestModel, int_field=2, char_field="a") m.int_field = 3 m.save() with utils.raises_trigger_error(match="Cannot update rows"): m.char_field = "b" m.save() trigger = pgtrigger.ReadOnly(name="uneditable", exclude=["int_field"]) with trigger.install(models.TestModel): m = ddf.G(models.TestModel, int_field=4, char_field="a") m.int_field = 5 m.save() with utils.raises_trigger_error(match="Cannot update rows"): m.char_field = "b" m.save() @pytest.mark.django_db def test_search_model(): """Verifies search model fields are kept up to date""" obj = models.SearchModel.objects.create( title="This is a message", body="Hello World. What a great body." ) models.SearchModel.objects.create(title="Hi guys", body="Random Word. This is a good idea.") models.SearchModel.objects.create( title="Hello", body="Other words. Many great ideas come from stuff." ) models.SearchModel.objects.create(title="The title", body="A short message.") assert models.SearchModel.objects.filter(body_vector="hello").count() == 1 assert models.SearchModel.objects.filter(body_vector="words").count() == 2 assert models.SearchModel.objects.filter(body_vector="world").count() == 1 assert models.SearchModel.objects.filter(title_body_vector="message").count() == 2 assert models.SearchModel.objects.filter(title_body_vector="idea").count() == 2 assert models.SearchModel.objects.filter(title_body_vector="hello").count() == 2 obj.body = "Nothing more" obj.save() assert not models.SearchModel.objects.filter(body_vector="hello").exists() assert models.SearchModel.objects.filter(title_body_vector="hello").count() == 1 def test_update_search_vector_args(): """Verifies arg checking for UpdateSearchVector""" with pytest.raises(ValueError, match='provide "vector_field"'): pgtrigger.UpdateSearchVector() with pytest.raises(ValueError, match='provide "document_fields"'): pgtrigger.UpdateSearchVector(vector_field="vector_field") def test_update_search_vector_ignore(): """Verifies UpdateSearchVector cannot be ignored""" trigger = pgtrigger.UpdateSearchVector( name="hi", vector_field="vector_field", document_fields=["hi"] ) with pytest.raises(RuntimeError, match="Cannot ignore UpdateSearchVector"): with trigger.ignore(models.SearchModel): pass @pytest.mark.django_db def test_soft_delete(): """ Verifies the SoftDelete test model has the "is_active" flag set to false """ soft_delete = ddf.G(models.SoftDelete, is_active=True) ddf.G(models.FkToSoftDelete, ref=soft_delete) soft_delete.delete() assert not models.SoftDelete.objects.get().is_active assert not models.FkToSoftDelete.objects.exists() @pytest.mark.django_db def test_customer_soft_delete(): """ Verifies the CustomSoftDelete test model has the "custom_active" flag set to false """ soft_delete = ddf.G(models.CustomSoftDelete, custom_active=True) soft_delete.delete() assert not models.CustomSoftDelete.objects.get().custom_active @pytest.mark.django_db def test_soft_delete_different_values(): """ Tests SoftDelete with different types of fields and values """ # Make the LogEntry model a soft delete model where # "level" is set to "inactive" trigger = pgtrigger.SoftDelete(name="soft_delete", field="level", value="inactive") with trigger.install(models.LogEntry): le = ddf.G(models.LogEntry, level="active") le.delete() assert models.LogEntry.objects.get().level == "inactive" models.LogEntry.objects.all().delete() # Make the LogEntry model a soft delete model where # "old_field" is set to None trigger = pgtrigger.SoftDelete(name="soft_delete", field="old_field", value=None) with trigger.install(models.LogEntry): le = ddf.G(models.LogEntry, old_field="something") le.delete() assert models.LogEntry.objects.get().old_field is None @pytest.mark.django_db def test_fsm(): """ Verifies the FSM test model cannot make invalid transitions """ fsm = ddf.G(models.FSM, transition="unpublished") fsm.transition = "inactive" with utils.raises_trigger_error(match="Invalid transition"): fsm.save() fsm.transition = "published" fsm.save() # Be sure we ignore FSM when there is no transition fsm.save() with utils.raises_trigger_error(match="Invalid transition"): fsm.transition = "unpublished" fsm.save() fsm.transition = "inactive" fsm.save() def test_fsm_args(): """Verifies arg checking for FSM""" with pytest.raises(ValueError, match='provide "field"'): pgtrigger.FSM() with pytest.raises(ValueError, match='provide "transitions"'): pgtrigger.FSM(field="hello") with pytest.raises(ValueError, match='contains separator ":"'): pgtrigger.FSM(field="hello", transitions=[("a", ":")]) with pytest.raises(ValueError, match='contains separator ","'): pgtrigger.FSM(field="hello", separator=",", transitions=[("a", ",")]) with pytest.raises(ValueError, match="contains quotes"): pgtrigger.FSM(field="hello", transitions=[("a", "b'")]) with pytest.raises(ValueError, match="contains quotes"): pgtrigger.FSM(field="hello", transitions=[("a", 'b"')]) with pytest.raises(ValueError, match="single character"): pgtrigger.FSM(field="hello", separator="aa", transitions=[("a", "b")]) with pytest.raises(ValueError, match="must not have quotes"): pgtrigger.FSM(field="hello", separator="'", transitions=[("a", "b")]) @pytest.mark.django_db def test_protect(): """Verify deletion protect trigger works on test model""" deletion_protected_model = ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() @pytest.mark.django_db def test_custom_db_table_protect_trigger(): """Verify custom DB table names have successful triggers""" deletion_protected_model = ddf.G(models.CustomTableName) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() django-pgtrigger-4.11.1/pgtrigger/tests/test_core.py000066400000000000000000000664101460427745000225450ustar00rootroot00000000000000import datetime as dt import ddf import django import pytest from django.contrib.auth.models import User from django.db import transaction from django.db.utils import NotSupportedError import pgtrigger from pgtrigger import core from pgtrigger.tests import models, utils def test_func(): """Tests using custom Func object""" trigger = pgtrigger.Trigger( name="example", when=pgtrigger.After, operation=pgtrigger.Delete, func=pgtrigger.Func("SELECT {columns.int_field} FROM {meta.db_table}"), ) assert trigger.render_func(models.TestModel) == "SELECT int_field FROM tests_testmodel" @pytest.mark.django_db def test_partition(): p1 = ddf.G(models.PartitionModel, timestamp=dt.datetime(2019, 1, 3)) p2 = ddf.G(models.PartitionModel, timestamp=dt.datetime(2019, 2, 4)) p3 = ddf.G(models.PartitionModel, timestamp=dt.datetime(2019, 3, 5)) default = ddf.G(models.PartitionModel, timestamp=dt.datetime(2019, 4, 5)) with utils.raises_trigger_error(match="Cannot delete"): p1.delete() with utils.raises_trigger_error(match="Cannot delete"): p2.delete() with utils.raises_trigger_error(match="Cannot delete"): p3.delete() with utils.raises_trigger_error(match="Cannot delete"): default.delete() with pgtrigger.ignore("tests.PartitionModel:protect_delete"): p1.delete() p2.delete() p3.delete() default.delete() @pytest.mark.django_db def test_through_model(): """ Tests the "ThroughTrigger" model to verify that triggers execute on M2M through models """ test_trigger = ddf.G(models.TestTrigger) test_trigger.m2m_field.add(ddf.G("auth.User")) with utils.raises_trigger_error(match="Cannot delete"): test_trigger.m2m_field.clear() @pytest.mark.django_db def test_statement_row_level_logging(): """ Updates "ToLogModel" entries, which have statement, row-level, and referencing statement triggers that create log entries. """ ddf.G(models.ToLogModel, n=5, field="old_field") assert not models.LogEntry.objects.exists() models.ToLogModel.objects.update(field="new_field") # The statement-level trigger without references should have produced # one log entry assert models.LogEntry.objects.filter(level="STATEMENT", old_field__isnull=True).count() == 1 # The statement-level trigger with references should have made log # entries for all of the old values and the new updated values assert models.LogEntry.objects.filter(level="STATEMENT", old_field__isnull=False).count() == 5 assert ( models.LogEntry.objects.filter( level="STATEMENT", old_field="old_field", new_field="new_field" ).count() == 5 ) # The row-level trigger should have produced five entries assert models.LogEntry.objects.filter(level="ROW").count() == 5 obj = models.ToLogModel.objects.first() obj.save() # A duplicate update shouldn't fire any more row-level log entries assert models.LogEntry.objects.filter(level="ROW").count() == 5 @pytest.mark.django_db(transaction=True) def test_deferred_trigger(): """ Tests deferrable execution of a trigger """ # Make the LogEntry model a soft delete model where # "level" is set to "inactive" trigger = pgtrigger.Protect( name="protect_delete", when=pgtrigger.After, operation=pgtrigger.Delete, timing=pgtrigger.Deferred, ) with trigger.register(models.TestModel), trigger.install(models.TestModel): obj = ddf.G(models.TestModel) deferring_worked = False with utils.raises_trigger_error(match="Cannot delete"): with transaction.atomic(): obj.delete() # Deletion works within the transaction, but fails # when the transaction commits. assert not models.TestModel.objects.exists() deferring_worked = True assert deferring_worked assert models.TestModel.objects.exists() obj = models.TestModel.objects.get() # Verify that we can ignore deferrable triggers with pgtrigger.ignore("tests.TestModel:protect_delete"): with transaction.atomic(): obj.delete() assert not models.TestModel.objects.exists() # The object should still not exist outside of the transaction assert not models.TestModel.objects.exists() @pytest.mark.django_db def test_updating_trigger_condition(): """ Tests re-installing a trigger when the condition changes """ # Make the LogEntry model a soft delete model where # "level" is set to "inactive" trigger = pgtrigger.Protect(name="protect_delete", operation=pgtrigger.Delete) with trigger.install(models.LogEntry): le = ddf.G(models.LogEntry, level="good") with utils.raises_trigger_error(match="Cannot delete"): le.delete() # Protect deletes when "level" is "bad". The trigger should be reinstalled # appropriately trigger.condition = pgtrigger.Q(old__level="bad") with trigger.install(models.LogEntry): le.delete() def test_declaration_rendering(): """Verifies that triggers with a DECLARE are rendered correctly""" class DeclaredTrigger(pgtrigger.Trigger): def get_declare(self, model): return [("var_name", "UUID")] rendered = DeclaredTrigger( name="test", when=pgtrigger.Before, operation=pgtrigger.Insert ).render_declare(None) assert rendered == "DECLARE var_name UUID;" class DeclaredTriggerMultiple(pgtrigger.Trigger): def get_declare(self, model): return [("var_name", "UUID"), ("var2_name", "UUID")] rendered = DeclaredTriggerMultiple( name="test", when=pgtrigger.Before, operation=pgtrigger.Insert ).render_declare(None) assert rendered == "DECLARE var_name UUID; var2_name UUID;" def test_f(): """Tests various properties of the pgtrigger.F object""" with pytest.raises(ValueError, match="must reference"): pgtrigger.F("bad_value") assert pgtrigger.F("old__value").resolved_name == 'OLD."value"' @pytest.mark.django_db def test_is_distinct_from_condition(): """Tests triggers where the old and new are distinct from one another Note that distinct is the not the same as not being equal since nulls are never equal """ test_model = ddf.G(models.TestTrigger, int_field=0) # Protect a field from being updated to a different value trigger = pgtrigger.Protect( name="protect", when=pgtrigger.Before, operation=pgtrigger.Update, condition=pgtrigger.Q(old__int_field__df=pgtrigger.F("new__int_field")) | pgtrigger.Q(new__nullable__df=pgtrigger.F("old__nullable")), ) with trigger.install(models.TestTrigger): with utils.raises_trigger_error(match="Cannot update rows"): test_model.int_field = 1 test_model.save() # Ensure the null case works with utils.raises_trigger_error(match="Cannot update rows"): test_model.nullable = "1" test_model.save() # Saving the same values should work fine test_model.int_field = 0 test_model.nullable = None test_model.save() @pytest.mark.django_db def test_invalid_trigger(): """Ensures triggers with invalid syntax are not installed""" # Truncates can only be used on statement level triggers trigger = pgtrigger.Protect( name="test_invalid", operation=pgtrigger.Truncate, ) with pytest.raises(NotSupportedError, match="are not supported"): trigger.install(models.TestTrigger) @pytest.mark.django_db def test_is_distinct_from_condition_fk_field(): """Tests triggers where the old and new are distinct from one another on a foreign key field Django doesnt support custom lookups by default, and this tests some of the overridden behavior """ test_int_fk_model = ddf.G(models.TestTrigger, fk_field=None) # Protect a foreign key from being updated to a different value trigger = pgtrigger.Protect( name="test_is_distinct_from_condition_fk_field1", when=pgtrigger.Before, operation=pgtrigger.Update, condition=pgtrigger.Q(old__fk_field__df=pgtrigger.F("new__fk_field")), ) with trigger.install(models.TestTrigger): with utils.raises_trigger_error(match="Cannot update rows"): test_int_fk_model.fk_field = User(id=1) test_int_fk_model.save() # Saving the same values should work fine test_int_fk_model.fk_field = None test_int_fk_model.save() # Protect a non-int foreign key from being updated to a different value char_pk = ddf.G(models.CharPk) test_char_fk_model = ddf.G(models.TestTrigger, char_pk_fk_field=char_pk) trigger = pgtrigger.Protect( name="test_is_distinct_from_condition_fk_field2", when=pgtrigger.Before, operation=pgtrigger.Update, condition=pgtrigger.Q(old__char_pk_fk_field__df=pgtrigger.F("new__char_pk_fk_field")), ) with trigger.install(models.TestTrigger): with utils.raises_trigger_error(match="Cannot update rows"): test_char_fk_model.char_pk_fk_field = None test_char_fk_model.save() # Saving the same values should work fine test_char_fk_model.char_pk_fk_field = char_pk test_char_fk_model.save() @pytest.mark.django_db def test_is_not_distinct_from_condition(): """Tests triggers where the old and new are not distinct from one another Note that distinct is the not the same as not being equal since nulls are never equal """ test_model = ddf.G(models.TestTrigger, int_field=0) # Protect a field from being updated to the same value. In this case, # both int_field and nullable need to change in order for the update to # happen trigger = pgtrigger.Protect( name="test_is_not_distinct_from_condition1", when=pgtrigger.Before, operation=pgtrigger.Update, condition=pgtrigger.Q(old__int_field__ndf=pgtrigger.F("new__int_field")) | pgtrigger.Q(old__nullable__ndf=pgtrigger.F("new__nullable")), ) with trigger.install(models.TestTrigger): with utils.raises_trigger_error(match="Cannot update rows"): test_model.int_field = 1 test_model.save() # Ensure the null case works with utils.raises_trigger_error(match="Cannot update rows"): test_model.int_field = 0 test_model.nullable = "1" test_model.save() # Updating both fields will ignore the trigger test_model.int_field = 1 test_model.nullable = "1" test_model.save() def test_max_name_length(mocker): """ Verifies that a trigger with the exact MAX_NAME_LENGTH can be installed fine. Also checks that going above this by one character results in a database error """ # Protect a field from being updated to the same value. In this case, # both int_field and nullable need to change in order for the update to # happen trigger = pgtrigger.Protect( name="t" * core.MAX_NAME_LENGTH, operation=pgtrigger.Update, ) assert trigger.get_pgid(models.TestTrigger) mocker.patch.object(pgtrigger.Protect, "validate_name") with pytest.raises(ValueError): trigger = pgtrigger.Protect( name="a" * (core.MAX_NAME_LENGTH + 1), operation=pgtrigger.Update, ) trigger.get_pgid(models.TestTrigger) def test_invalid_name_characters(mocker): """ Verifies that trigger names must contain only alphanumeric characters, hyphens, or underscores """ pgtrigger.Protect( name="hello_world-111", operation=pgtrigger.Update, ) with pytest.raises(ValueError, match="alphanumeric"): pgtrigger.Protect( name="hello.world", operation=pgtrigger.Update, ) @pytest.mark.django_db def test_complex_conditions(): """Tests complex OLD and NEW trigger conditions""" zero_to_one = ddf.G(models.TestModel, int_field=0) # Dont let intfield go from 0 -> 1 trigger = pgtrigger.Protect( name="test_complex_conditions1", when=pgtrigger.Before, operation=pgtrigger.Update, condition=pgtrigger.Q(old__int_field=0, new__int_field=1), ) with trigger.install(models.TestModel): with utils.raises_trigger_error(match="Cannot update rows"): zero_to_one.int_field = 1 zero_to_one.save() # Test a condition with a datetime field test_model = ddf.G(models.TestTrigger, int_field=0, dt_field=dt.datetime(2020, 1, 1)) trigger = pgtrigger.Protect( name="test_complex_conditions2", when=pgtrigger.Before, operation=pgtrigger.Update, condition=( pgtrigger.Q(old__int_field=0, new__int_field=1) | pgtrigger.Q(new__dt_field__lt=dt.datetime(2020, 1, 1)) ), ) with trigger.install(models.TestTrigger): with utils.raises_trigger_error(match="Cannot update rows"): test_model.int_field = 1 test_model.save() test_model.int_field = 2 test_model.save() with utils.raises_trigger_error(match="Cannot update rows"): test_model.dt_field = dt.datetime(2019, 1, 1) test_model.save() def test_referencing_rendering(): """Verifies the rendering of the Referencing construct""" assert ( str(pgtrigger.Referencing(old="old_table")).strip() == "REFERENCING OLD TABLE AS old_table" ) assert ( str(pgtrigger.Referencing(new="new_table")).strip() == "REFERENCING NEW TABLE AS new_table" ) assert ( str(pgtrigger.Referencing(old="old_table", new="new_table")).strip() == "REFERENCING OLD TABLE AS old_table NEW TABLE AS new_table" ) def test_arg_checks(): """ There are quite a few places that check arguments in the trigger module. Enumerate these cases here to make sure they work """ with pytest.raises(ValueError, match='Must provide either "old" and/or "new"'): pgtrigger.Referencing() with pytest.raises(ValueError, match="Must provide SQL"): pgtrigger.Condition() with pytest.raises(ValueError, match="Must provide at least one"): pgtrigger.UpdateOf() with pytest.raises(ValueError, match='must have "name"'): pgtrigger.Trigger(when=pgtrigger.Before, operation=pgtrigger.Update) with pytest.raises(ValueError, match='Invalid "level"'): pgtrigger.Trigger(level="invalid") with pytest.raises(ValueError, match='Invalid "when"'): pgtrigger.Trigger(when="invalid") with pytest.raises(ValueError, match='Invalid "operation"'): pgtrigger.Trigger(when=pgtrigger.Before, operation="invalid") with pytest.raises(ValueError, match='Invalid "timing"'): pgtrigger.Trigger(when=pgtrigger.Before, operation=pgtrigger.Update, timing="timing") with pytest.raises(ValueError, match="Row-level triggers cannot have"): pgtrigger.Trigger( when=pgtrigger.Before, operation=pgtrigger.Update, referencing=pgtrigger.Referencing(old="old_table"), ) with pytest.raises(ValueError, match='must have "level" attribute'): pgtrigger.Trigger( level=pgtrigger.Statement, when=pgtrigger.After, operation=pgtrigger.Update, timing=pgtrigger.Immediate, ) with pytest.raises(ValueError, match='must have "when" attribute'): pgtrigger.Trigger( level=pgtrigger.Row, when=pgtrigger.Before, operation=pgtrigger.Update, timing=pgtrigger.Immediate, ) with pytest.raises(ValueError, match="Must define func"): pgtrigger.Trigger(name="test", when=pgtrigger.Before, operation=pgtrigger.Update).get_func( None ) with pytest.raises(ValueError, match="> 47"): pgtrigger.Trigger( # noqa when=pgtrigger.Before, operation=pgtrigger.Update, name="1" * 48 ).pgid def test_operations(): """Tests Operation objects and ORing them together""" assert str(pgtrigger.Update) == "UPDATE" assert str(pgtrigger.UpdateOf("col1")) == 'UPDATE OF "col1"' assert str(pgtrigger.UpdateOf("c1", "c2")) == 'UPDATE OF "c1", "c2"' assert str(pgtrigger.Update | pgtrigger.Delete) == "UPDATE OR DELETE" assert ( str(pgtrigger.Update | pgtrigger.Delete | pgtrigger.Insert) == "UPDATE OR DELETE OR INSERT" ) assert str(pgtrigger.Delete | pgtrigger.Update) == "DELETE OR UPDATE" @pytest.mark.django_db def test_custom_trigger_definitions(): """Test a variety of custom trigger definitions""" test_model = ddf.G(models.TestTrigger) # Protect against inserts or updates # Note: Although we could use the "protect" trigger for this, # we manually provide the trigger code to test manual declarations trigger = pgtrigger.Trigger( name="test_custom_definition1", when=pgtrigger.Before, operation=pgtrigger.Insert | pgtrigger.Update, func="RAISE EXCEPTION 'no no no!';", ) with trigger.install(test_model): # Inserts and updates are no longer available with utils.raises_trigger_error(match="no no no!"): models.TestTrigger.objects.create() with utils.raises_trigger_error(match="no no no!"): test_model.save() # Inserts and updates should work again ddf.G(models.TestTrigger) test_model.save() # Protect updates of a single column trigger = pgtrigger.Trigger( name="test_custom_definition2", when=pgtrigger.Before, operation=pgtrigger.UpdateOf("int_field"), func="RAISE EXCEPTION 'no no no!';", ) with trigger.install(models.TestTrigger): # "field" should be able to be updated, but other_field should not test_model.save(update_fields=["field"]) with utils.raises_trigger_error(match="no no no!"): test_model.save(update_fields=["int_field"]) # Protect statement-level creates trigger = pgtrigger.Trigger( name="test_custom_definition3", level=pgtrigger.Statement, when=pgtrigger.Before, operation=pgtrigger.Update, func="RAISE EXCEPTION 'bad statement!';", ) with trigger.install(models.TestTrigger): with utils.raises_trigger_error(match="bad statement!"): test_model.save() @pytest.mark.django_db def test_trigger_conditions(): """Tests triggers with custom conditions""" test_model = ddf.G(models.TestTrigger) # Protect against inserts only when "field" is "hello" trigger = pgtrigger.Trigger( name="test_condition1", when=pgtrigger.Before, operation=pgtrigger.Insert, func="RAISE EXCEPTION 'no no no!';", condition=pgtrigger.Q(new__field="hello"), ) with trigger.install(test_model): ddf.G(models.TestTrigger, field="hi!") with utils.raises_trigger_error(match="no no no!"): models.TestTrigger.objects.create(field="hello") # Protect updates where nothing is actually updated trigger = pgtrigger.Trigger( name="test_condition2", when=pgtrigger.Before, operation=pgtrigger.Update, func="RAISE EXCEPTION 'no no no!';", condition=pgtrigger.Condition("OLD.* IS NOT DISTINCT FROM NEW.*"), ) with trigger.install(test_model): test_model.int_field = test_model.int_field + 1 test_model.save() # Saving the same fields again will cause an error with utils.raises_trigger_error(match="no no no!"): test_model.save() # Make a model readonly when the int_field is -1 read_only = ddf.G(models.TestModel, int_field=-1) non_read_only = ddf.G(models.TestModel, int_field=-2) trigger = pgtrigger.Trigger( name="test_condition3", when=pgtrigger.Before, operation=pgtrigger.Update | pgtrigger.Delete, func="RAISE EXCEPTION 'no no no!';", condition=pgtrigger.Q(old__int_field=-1), ) with trigger.install(models.TestModel): with utils.raises_trigger_error(match="no no no!"): read_only.save() with utils.raises_trigger_error(match="no no no!"): read_only.delete() non_read_only.save() non_read_only.delete() @pytest.mark.parametrize( "condition, expected_sql", [ (pgtrigger.AnyChange(), "OLD.* IS DISTINCT FROM NEW.*"), (pgtrigger.AnyDontChange(), "OLD.* IS NOT DISTINCT FROM NEW.*"), ( pgtrigger.AllChange(), '(OLD."char_pk_fk_field_id" IS DISTINCT FROM (NEW."char_pk_fk_field_id") AND OLD."dt_field" IS DISTINCT FROM (NEW."dt_field") AND OLD."field" IS DISTINCT FROM (NEW."field") AND OLD."fk_field_id" IS DISTINCT FROM (NEW."fk_field_id") AND OLD."id" IS DISTINCT FROM (NEW."id") AND OLD."int_field" IS DISTINCT FROM (NEW."int_field") AND OLD."nullable" IS DISTINCT FROM (NEW."nullable"))', # noqa ), ( pgtrigger.AllDontChange(), '(OLD."char_pk_fk_field_id" IS NOT DISTINCT FROM (NEW."char_pk_fk_field_id") AND OLD."dt_field" IS NOT DISTINCT FROM (NEW."dt_field") AND OLD."field" IS NOT DISTINCT FROM (NEW."field") AND OLD."fk_field_id" IS NOT DISTINCT FROM (NEW."fk_field_id") AND OLD."id" IS NOT DISTINCT FROM (NEW."id") AND OLD."int_field" IS NOT DISTINCT FROM (NEW."int_field") AND OLD."nullable" IS NOT DISTINCT FROM (NEW."nullable"))', # noqa ), (~pgtrigger.AnyChange(), "NOT (OLD.* IS DISTINCT FROM NEW.*)"), ( ~pgtrigger.AllChange(), 'NOT ((OLD."char_pk_fk_field_id" IS DISTINCT FROM (NEW."char_pk_fk_field_id") AND OLD."dt_field" IS DISTINCT FROM (NEW."dt_field") AND OLD."field" IS DISTINCT FROM (NEW."field") AND OLD."fk_field_id" IS DISTINCT FROM (NEW."fk_field_id") AND OLD."id" IS DISTINCT FROM (NEW."id") AND OLD."int_field" IS DISTINCT FROM (NEW."int_field") AND OLD."nullable" IS DISTINCT FROM (NEW."nullable")))', # noqa ), (pgtrigger.AnyChange("field"), 'OLD."field" IS DISTINCT FROM (NEW."field")'), (pgtrigger.AllChange("field"), 'OLD."field" IS DISTINCT FROM (NEW."field")'), (~pgtrigger.AnyChange("field"), 'NOT (OLD."field" IS DISTINCT FROM (NEW."field"))'), (~pgtrigger.AllChange("field"), 'NOT (OLD."field" IS DISTINCT FROM (NEW."field"))'), ( pgtrigger.AnyChange("int_field", "dt_field"), '(OLD."dt_field" IS DISTINCT FROM (NEW."dt_field") OR OLD."int_field" IS DISTINCT FROM (NEW."int_field"))', # noqa ), ( pgtrigger.AnyDontChange("dt_field", "fk_field"), '(OLD."dt_field" IS NOT DISTINCT FROM (NEW."dt_field") OR OLD."fk_field_id" IS NOT DISTINCT FROM (NEW."fk_field_id"))', # noqa ), ( pgtrigger.AllChange("int_field", "dt_field"), '(OLD."dt_field" IS DISTINCT FROM (NEW."dt_field") AND OLD."int_field" IS DISTINCT FROM (NEW."int_field"))', # noqa ), ( pgtrigger.AllDontChange("int_field", "dt_field"), '(OLD."dt_field" IS NOT DISTINCT FROM (NEW."dt_field") AND OLD."int_field" IS NOT DISTINCT FROM (NEW."int_field"))', # noqa ), ( pgtrigger.AnyChange("int_field", "dt_field", exclude=["int_field"]), 'OLD."dt_field" IS DISTINCT FROM (NEW."dt_field")', ), ( pgtrigger.AnyChange( "fk_field", "char_pk_fk_field", "int_field", "dt_field", exclude=["char_pk_fk_field", "field"], exclude_auto=True, ), '(OLD."fk_field_id" IS DISTINCT FROM (NEW."fk_field_id") OR OLD."int_field" IS DISTINCT FROM (NEW."int_field"))', # noqa ), ( pgtrigger.AnyChange(exclude_auto=True), '(OLD."char_pk_fk_field_id" IS DISTINCT FROM (NEW."char_pk_fk_field_id") OR OLD."field" IS DISTINCT FROM (NEW."field") OR OLD."fk_field_id" IS DISTINCT FROM (NEW."fk_field_id") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."int_field" IS DISTINCT FROM (NEW."int_field") OR OLD."nullable" IS DISTINCT FROM (NEW."nullable"))', # noqa ), ( pgtrigger.AllChange(exclude_auto=True), '(OLD."char_pk_fk_field_id" IS DISTINCT FROM (NEW."char_pk_fk_field_id") AND OLD."field" IS DISTINCT FROM (NEW."field") AND OLD."fk_field_id" IS DISTINCT FROM (NEW."fk_field_id") AND OLD."id" IS DISTINCT FROM (NEW."id") AND OLD."int_field" IS DISTINCT FROM (NEW."int_field") AND OLD."nullable" IS DISTINCT FROM (NEW."nullable"))', # noqa ), ], ) def test_changed_condition(condition, expected_sql): """Tests the pgtrigger.Changed condition utility""" sql = condition.resolve(models.ChangedCondition) # There are subtle SQL differences in django<4 if django.VERSION[0] < 4: expected_sql = expected_sql.replace("(", "").replace(")", "") sql = sql.replace("(", "").replace(")", "") assert sql == expected_sql def test_changed_condition_bad_field(): """Verifies incorrect fields aren't allowed in changed conditions""" with pytest.raises(ValueError, match="not found on model"): pgtrigger.AnyChange("bad_field").resolve(models.ChangedCondition) with pytest.raises(ValueError, match="not found on model"): pgtrigger.AnyChange("m2m_field").resolve(models.ChangedCondition) with pytest.raises(ValueError, match="not found on model"): pgtrigger.AnyChange(exclude=["bad_field"]).resolve(models.ChangedCondition) @pytest.mark.django_db(databases=["default", "other"], transaction=True) @pytest.mark.order(-2) # This is a leaky test that modifies the schema. Always run last def test_trigger_management(mocker): """Verifies dropping and recreating triggers works""" deletion_protected_model = ddf.G(models.TestTrigger) # Triggers should be installed initially with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() # Deactivate triggers. Deletions should happen without issue. # Note: run twice for idempotency checks pgtrigger.disable() pgtrigger.disable() deletion_protected_model.delete() # Reactivate triggers. Deletions should be protected pgtrigger.enable() pgtrigger.enable() deletion_protected_model = ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() # Do the same tests again, except this time uninstall and reinstall # triggers pgtrigger.uninstall() pgtrigger.uninstall() deletion_protected_model.delete() # Reactivate triggers. Deletions should be protected pgtrigger.install() pgtrigger.install() deletion_protected_model = ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() # Pruning triggers should do nothing at the moment pgtrigger.prune() pgtrigger.prune() with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() # However, changing the trigger name will cause the old triggers to # be pruned mocker.patch( "pgtrigger.Protect.name", new_callable=mocker.PropertyMock, return_value="hi", ) pgtrigger.prune() pgtrigger.prune() deletion_protected_model.delete() django-pgtrigger-4.11.1/pgtrigger/tests/test_migrations.py000066400000000000000000000372371460427745000237760ustar00rootroot00000000000000"""Tests behavior related to migrations""" import pathlib import shutil import time import ddf import django.contrib.auth.models as auth_models import pytest from django.apps import apps from django.core.management import call_command from django.db import models from django.db.utils import ProgrammingError import pgtrigger import pgtrigger.tests.models as test_models from pgtrigger import core from pgtrigger.tests import utils @pytest.fixture(autouse=True) def disble_install_on_migrate(settings): settings.PGTRIGGER_INSTALL_ON_MIGRATE = False def migration_dir(): return pathlib.Path(__file__).parent / "migrations" def num_migration_files(): return len(list(migration_dir().glob("0*.py"))) @pytest.fixture def reset_triggers(): """Ensures all triggers are uninstalled before the tests""" pgtrigger.uninstall(database="default") yield pgtrigger.install(database="default") @pytest.fixture def reset_migrations(tmp_path, request): """Ensures the migration dir is reset after the test""" num_orig_migrations = num_migration_files() shutil.copytree(migration_dir(), tmp_path / "migrations") try: yield finally: # Migrate back to the initial migration of the test to allevitate # some of the issues when re-using a test DB call_command( "migrate", "tests", str(num_orig_migrations).rjust(4, "0"), verbosity=request.config.option.verbose, ) shutil.rmtree(migration_dir()) shutil.copytree(tmp_path / "migrations", migration_dir()) def assert_all_triggers_installed(): for model, trigger in pgtrigger.registered(): status = trigger.get_installation_status(model) assert status[0] == core.INSTALLED @pytest.mark.django_db( databases=["default", "other", "receipt", "order", "sqlite"], transaction=True ) @pytest.mark.usefixtures("reset_triggers", "reset_migrations") @pytest.mark.order(-1) # This is a possibly leaky test if it fails midway. Always run last def test_makemigrations_existing_models(settings, request): """ Create migrations for existing models and test various scenarios where triggers are dynamically added and removed """ # Verify that we've configured our test settings properly assert not settings.PGTRIGGER_INSTALL_ON_MIGRATE assert settings.PGTRIGGER_MIGRATIONS num_orig_migrations = num_migration_files() call_command("makemigrations", verbosity=request.config.option.verbose) assert num_migration_files() == num_orig_migrations + 1 call_command("migrate", verbosity=request.config.option.verbose) assert_all_triggers_installed() # Add a new trigger to the registry that should be migrated trigger = pgtrigger.Trigger( when=pgtrigger.Before, name="my_migrated_trigger", operation=pgtrigger.Insert | pgtrigger.Update, func="RAISE EXCEPTION 'no no no!';", ) with trigger.register(test_models.TestModel): call_command("makemigrations", verbosity=request.config.option.verbose) assert num_migration_files() == num_orig_migrations + 2 # As a sanity check, ensure makemigrations doesnt make dups call_command("makemigrations", verbosity=request.config.option.verbose) assert num_migration_files() == num_orig_migrations + 2 # Before migrating, I should be able to make a `TestModel` ddf.G("tests.TestModel") call_command("migrate", verbosity=request.config.option.verbose) assert_all_triggers_installed() # After migrating, test models should be protected with utils.raises_trigger_error(match="no no no!"): test_models.TestModel.objects.create() # Update the trigger to allow inserts, but not updates. # We should have a new migration trigger.operation = pgtrigger.Update call_command("makemigrations", verbosity=request.config.option.verbose) assert num_migration_files() == num_orig_migrations + 3 call_command("migrate", verbosity=request.config.option.verbose) assert_all_triggers_installed() # We should be able to make test models but not update them test_model = ddf.G("tests.TestModel") with utils.raises_trigger_error(match="no no no!"): test_model.save() # The trigger is now removed from the registry. It should create # a new migration call_command("makemigrations", verbosity=request.config.option.verbose) assert num_migration_files() == num_orig_migrations + 4 call_command("migrate", verbosity=request.config.option.verbose) assert_all_triggers_installed() # We should be able to create and update the test model now that # the trigger is removed test_model = ddf.G("tests.TestModel") test_model.save() # Create a protection trigger on the external user model and # migrate it trigger = pgtrigger.Trigger( when=pgtrigger.Before, name="nothing_allowed", operation=pgtrigger.Insert | pgtrigger.Update, func="RAISE EXCEPTION 'no no no!';", ) # Test that special characters migrate correctly trigger = pgtrigger.Protect( name="special_characters", operation=pgtrigger.Update, condition=pgtrigger.Q(new__char_field="%"), ) with trigger.register(test_models.TestModel): call_command("makemigrations", verbosity=request.config.option.verbose) assert num_migration_files() == num_orig_migrations + 5 call_command("migrate", verbosity=request.config.option.verbose) assert_all_triggers_installed() tm = ddf.G("tests.TestModel", char_field="hello") with utils.raises_trigger_error(match="Cannot update rows"): tm.char_field = "%" tm.save() def make_migrations(atomic: bool): """Call makemigrations. Set atomic property of last migration if it is specified""" call_command("makemigrations", name=f"a{time.time()}".replace(".", "")) last_migration = sorted(pathlib.Path(migration_dir()).glob("[0-9]*.py"))[-1] with open(last_migration, "r") as f: contents = f.read() contents = contents.replace( "class Migration(migrations.Migration):\n", f"class Migration(migrations.Migration):\n atomic = {atomic}\n", ) with open(last_migration, "w") as f: f.write(contents) @pytest.mark.django_db( databases=["default", "other", "receipt", "order", "sqlite"], transaction=True ) @pytest.mark.usefixtures("reset_triggers", "reset_migrations") @pytest.mark.order(-1) # This is a possibly leaky test if it fails midway. Always run last # Run independently of core test suite since since this creates/removes models @pytest.mark.independent @pytest.mark.parametrize("atomic", [True, False]) def test_makemigrations_create_remove_models(settings, atomic): """ Tests migration scenarios where models are dynamically added and removed. """ assert not settings.PGTRIGGER_INSTALL_ON_MIGRATE assert settings.PGTRIGGER_MIGRATIONS num_orig_migrations = num_migration_files() num_expected_migrations = num_orig_migrations ### # Make the initial trigger migrations ### make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() ### # Create a new model, migrate it, and verify triggers ### class BaseDynamicTestModel(models.Model): field = models.CharField(max_length=120) user = models.ForeignKey(auth_models.User, on_delete=models.CASCADE) class Meta: abstract = True triggers = [ pgtrigger.Protect( name="protect_deletes", operation=pgtrigger.Delete, condition=~pgtrigger.Q(old__field="nothing"), ), pgtrigger.Protect( name="protect_updates", operation=pgtrigger.Update, condition=~pgtrigger.Q(old__field="nothing"), ), ] class DynamicTestModel(BaseDynamicTestModel): pass test_models.DynamicTestModel = DynamicTestModel make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() # Sanity check that we cannot delete or update a DynamicTestModel protected_model = ddf.G(test_models.DynamicTestModel) with utils.raises_trigger_error(match="Cannot update"): protected_model.field = "hello_world" protected_model.save() with utils.raises_trigger_error(match="Cannot delete"): protected_model.delete() ### # Alter the column type when a condition depends on it. This should # correctly drop the trigger, update the column type, and add # the trigger ### class DynamicTestModel(BaseDynamicTestModel): field = models.TextField() test_models.DynamicTestModel = DynamicTestModel make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() # Sanity check that we cannot delete or update a DynamicTestModel protected_model = ddf.G(test_models.DynamicTestModel) with utils.raises_trigger_error(match="Cannot update"): protected_model.field = "hello_world" protected_model.save() with utils.raises_trigger_error(match="Cannot delete"): protected_model.delete() ### # Keep only deletion protection, migrate, and verify it's removed ### DynamicTestModel._meta.triggers = [ pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) ] DynamicTestModel._meta.original_attrs["triggers"] = DynamicTestModel._meta.triggers make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() # Updates work, but deletes dont protected_model.field = "hello_there" protected_model.save() with utils.raises_trigger_error(match="Cannot delete"): protected_model.delete() ### # Remove the model and verify it migrates ### del test_models.DynamicTestModel del apps.app_configs["tests"].models["dynamictestmodel"] apps.clear_cache() make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() with pytest.raises(ProgrammingError): protected_model.delete() # Unapply migration where a model with a trigger is removed # Any triggers that were defined on the model when it was removed should be # recreated. call_command("migrate", "tests", str(num_expected_migrations - 1).rjust(4, "0")) test_models.DynamicTestModel = DynamicTestModel protected_model = ddf.G(test_models.DynamicTestModel) with utils.raises_trigger_error(match="Cannot delete"): protected_model.delete() del test_models.DynamicTestModel # Reapply the migration we just unapplied call_command("migrate") # Create a new proxy model on a third-party app and add it to the test models class DynamicProxyModel(auth_models.User): class Meta: proxy = True triggers = [ pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete), pgtrigger.Protect(name="protect_updates", operation=pgtrigger.Update), ] test_models.DynamicProxyModel = DynamicProxyModel make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() # Sanity check that we cannot delete or update a user protected_model = ddf.G(auth_models.User) with utils.raises_trigger_error(match="Cannot update"): protected_model.username = "wes" protected_model.save() with utils.raises_trigger_error(match="Cannot delete"): protected_model.delete() ### # Keep only deletion protection for proxy models and migrate ### DynamicProxyModel._meta.triggers = [ pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) ] DynamicProxyModel._meta.original_attrs["triggers"] = DynamicProxyModel._meta.triggers make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() # Updates work, but deletes dont protected_model.username = "wes" protected_model.save() with utils.raises_trigger_error(match="Cannot delete"): protected_model.delete() ### # Remove the proxy model and verify it migrates ### del test_models.DynamicProxyModel del apps.app_configs["tests"].models["dynamicproxymodel"] apps.clear_cache() make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() # We can delete the original model protected_model.delete() # Create a new proxy model on auth_models.User group relationships and add it # to the test models class DynamicThroughModel(auth_models.User.groups.through): class Meta: proxy = True triggers = [ pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete), pgtrigger.Protect(name="protect_inserts", operation=pgtrigger.Insert), ] test_models.DynamicThroughModel = DynamicThroughModel # Sanity check that we cannot insert or delete a group protected_model = ddf.G(auth_models.User) protected_model.groups.add(ddf.G(auth_models.Group)) make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() with utils.raises_trigger_error(match="Cannot insert"): protected_model.groups.add(ddf.G(auth_models.Group)) with utils.raises_trigger_error(match="Cannot delete"): protected_model.groups.clear() ### # Keep only deletion protection for a dynamic through model and migrate ### DynamicThroughModel._meta.triggers = [ pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) ] DynamicThroughModel._meta.original_attrs["triggers"] = DynamicThroughModel._meta.triggers make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() # Inserts work, but deletes dont protected_model.groups.add(ddf.G(auth_models.Group)) with utils.raises_trigger_error(match="Cannot delete"): protected_model.groups.clear() # Remove the model and verify it migrates del test_models.DynamicThroughModel del apps.app_configs["tests"].models["dynamicthroughmodel"] apps.clear_cache() make_migrations(atomic) num_expected_migrations += 1 assert num_migration_files() == num_expected_migrations call_command("migrate") assert_all_triggers_installed() # We can delete the groups protected_model.groups.clear() # Django has a known issue with using a default through model as a base in # migrations. We revert the migrations we just made up until the through model # so that the test doesn't pass when it cleans up all migrations call_command("migrate", "tests", str(num_orig_migrations + 8).rjust(4, "0")) django-pgtrigger-4.11.1/pgtrigger/tests/test_multi_db.py000066400000000000000000000121641460427745000234110ustar00rootroot00000000000000"""Tests multi-database support""" import contextlib import ddf import pytest from django.contrib.auth.models import User from django.core.management import call_command from django.db import transaction import pgtrigger from pgtrigger import core from pgtrigger.tests import models, utils class ToLogRouter: """ Route the "ToLog" model to the "other" database """ route_app_labels = {"auth", "contenttypes"} def db_for_write(self, model, **hints): if model == models.ToLogModel: return "other" return None @pytest.fixture(autouse=True) def routed_db(settings): settings.DATABASE_ROUTERS = [ "pgtrigger.tests.test_multi_db.ToLogRouter", "pgtrigger.tests.models.Router", ] @pytest.mark.django_db(databases=["default", "sqlite", "other"], transaction=True) def test_multi_db_ignore(): """Tests ignoring triggers across multiple databases""" trigger = pgtrigger.Protect(operation=pgtrigger.Delete, name="protect_deletes") with contextlib.ExitStack() as stack: stack.enter_context(trigger.register(models.ToLogModel)) stack.enter_context(trigger.register(User)) stack.enter_context(trigger.install(models.ToLogModel, database="other")) stack.enter_context(trigger.install(User)) with utils.raises_trigger_error(match="Cannot delete", database="other"): log = ddf.G(models.ToLogModel) log.delete() with utils.raises_trigger_error(match="Cannot delete"): user = ddf.G(User) user.delete() with transaction.atomic(): with pgtrigger.ignore("tests.ToLogModel:protect_deletes", "auth.User:protect_deletes"): log = models.ToLogModel.objects.create() log.delete() user = ddf.G(User) user.delete() with utils.raises_trigger_error(match="Cannot delete"): user = User.objects.create(username="hi") user.delete() with utils.raises_trigger_error(match="Cannot delete", database="other"): log = models.ToLogModel.objects.create() log.delete() @pytest.mark.django_db(databases=["default", "sqlite", "other"]) def test_full_ls(capsys): call_command("pgtrigger", "ls") captured = capsys.readouterr() lines = [line for line in captured.out.split("\n") if line] for line in lines: assert "\x1b[92mINSTALLED\x1b[0m" in line call_command("pgtrigger", "ls", "-d", "other") captured = capsys.readouterr() lines = [line for line in captured.out.split("\n") if line] for line in lines: # The router ignores partition models for the default DB if "tests.PartitionModel:protect_delete" in line: assert "\x1b[94mUNALLOWED\x1b[0m" in line else: assert "\x1b[92mINSTALLED\x1b[0m" in line call_command("pgtrigger", "ls", "-d", "sqlite") captured = capsys.readouterr() lines = [line for line in captured.out.split("\n") if line] for line in lines: assert "\x1b[94mUNALLOWED\x1b[0m" in line @pytest.mark.django_db(databases=["other"]) def test_disable_enable(capsys): call_command("pgtrigger", "disable", "-d", "other") for model, trigger in pgtrigger.registered(): expected_status = None if model == models.PartitionModel else False assert trigger.get_installation_status(model, database="other")[1] is expected_status call_command("pgtrigger", "enable", "--database", "other") for model, trigger in pgtrigger.registered(): expected_status = None if model == models.PartitionModel else True assert trigger.get_installation_status(model, database="other")[1] is expected_status @pytest.mark.django_db(databases=["sqlite"]) def test_ignore_non_postgres_dbs(): call_command("pgtrigger", "uninstall", "-d", "sqlite") call_command("pgtrigger", "install", "-d", "sqlite") call_command("pgtrigger", "install", "-d", "sqlite") call_command("pgtrigger", "prune", "-d", "sqlite") @pytest.mark.django_db(databases=["other", "default", "sqlite"]) def test_uninstall_install(): for model, trigger in pgtrigger.registered(): expected_status = core.UNALLOWED if model == models.PartitionModel else core.INSTALLED assert trigger.get_installation_status(model, database="other")[0] == expected_status call_command("pgtrigger", "uninstall", "-d", "other") call_command("pgtrigger", "uninstall", "-d", "default") for model, trigger in pgtrigger.registered(): expected_status = core.UNALLOWED if model == models.PartitionModel else core.UNINSTALLED assert trigger.get_installation_status(model, database="other")[0] == expected_status call_command("pgtrigger", "install", "--database", "other") for model, trigger in pgtrigger.registered(): expecetd_status = core.UNALLOWED if model == models.PartitionModel else core.INSTALLED assert trigger.get_installation_status(model, database="other")[0] == expecetd_status for model, trigger in pgtrigger.registered(): assert trigger.get_installation_status(model, database="default")[0] == core.UNINSTALLED django-pgtrigger-4.11.1/pgtrigger/tests/test_multi_schema.py000066400000000000000000000172741460427745000242730ustar00rootroot00000000000000"""Tests multi-database support""" # flake8: noqa import contextlib import ddf from django.core.management import call_command import pytest import pgtrigger from pgtrigger.tests import models, utils class SchemaRouter: """ A router to control tables that should be migrated to different schemas """ def db_for_read(self, model, **hints): if model == models.OrderSchema: return "order" elif model == models.ReceiptSchema: # pragma: no branch return "receipt" def db_for_write(self, model, **hints): return self.db_for_read(model, **hints) def allow_migrate(self, db, app_label, model_name=None, **hints): if model_name == "orderschema": return db == "order" elif model_name == "receiptschema": return db == "receipt" @pytest.fixture(autouse=True) def routed_db(settings): settings.DATABASE_ROUTERS = [ "pgtrigger.tests.test_multi_schema.SchemaRouter", "pgtrigger.tests.models.Router", ] @pytest.fixture(autouse=True) def schema_triggers(): protect_deletes = pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete) protect_updates = pgtrigger.Protect(name="protect_updates", operation=pgtrigger.Update) with contextlib.ExitStack() as stack: stack.enter_context(protect_deletes.register(models.OrderSchema)) stack.enter_context(protect_updates.register(models.ReceiptSchema)) yield @pytest.mark.django_db(databases=["order", "receipt"], transaction=True) def test_multi_schema_triggers_work(): """Verify the triggers in the schema_triggers fixture work""" call_command("pgtrigger", "install", "-d", "order") call_command("pgtrigger", "install", "-d", "receipt") order = ddf.G("tests.OrderSchema") receipt = ddf.G("tests.ReceiptSchema") with utils.raises_trigger_error(match="Cannot delete", database="order"): order.delete() with utils.raises_trigger_error(match="Cannot update", database="receipt"): receipt.char_field = "hello" receipt.save() receipt.delete() order = ddf.G("tests.OrderSchema") with pgtrigger.ignore("tests.OrderSchema:protect_deletes"): order.delete() @pytest.mark.django_db(databases=["order", "receipt", "default", "other"], transaction=True) def test_commands(capsys): """Verify commands work""" call_command("pgtrigger", "ls") lines = capsys.readouterr().out.split("\n") expected_lines = [ "", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomSoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomTableName:protect_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.FSM:fsm", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_title_to_vector", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_to_vector", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestDefaultThrough:protect_it", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTrigger:protect_misc_insert", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_row_test", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_statement_test", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:update_of_statement_test", "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.OrderSchema:protect_deletes", "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.ReceiptSchema:protect_updates", ] assert set(expected_lines).issubset(set(lines)) call_command("pgtrigger", "ls", "-d", "receipt") lines = capsys.readouterr().out.split("\n") expected_lines = [ "", "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.ReceiptSchema:protect_updates", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomSoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomTableName:protect_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.FSM:fsm", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_title_to_vector", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SearchModel:add_body_to_vector", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.SoftDelete:soft_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestDefaultThrough:protect_it", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTrigger:protect_misc_insert", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.TestTriggerProxy:protect_delete", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_row_test", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:after_update_statement_test", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ToLogModel:update_of_statement_test", "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.OrderSchema:protect_deletes", ] assert set(expected_lines).issubset(set(lines)) call_command("pgtrigger", "install", "-d", "receipt") call_command("pgtrigger", "ls", "-d", "receipt") lines = capsys.readouterr().out.split("\n") expected_lines = [ "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.ReceiptSchema:protect_updates", ] assert set(expected_lines).issubset(set(lines)) # Installed a trigger to be pruned. protect_inserts = pgtrigger.Protect(name="protect_inserts", operation=pgtrigger.Insert) protect_inserts.install(models.OrderSchema, database="order") call_command("pgtrigger", "ls", "-d", "order") lines = capsys.readouterr().out.split("\n") expected_lines = [ "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.OrderSchema:protect_deletes", "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.ReceiptSchema:protect_updates", "\x1b[96mPRUNE\x1b[0m \x1b[92mENABLED\x1b[0m tests_orderschema:pgtrigger_protect_inserts_a0767", ] assert set(expected_lines).issubset(set(lines)) call_command("pgtrigger", "prune", "-d", "order") call_command("pgtrigger", "install", "-d", "order") call_command("pgtrigger", "ls", "-d", "order") lines = capsys.readouterr().out.split("\n") for line in lines: assert "PRUNE" not in line expected_lines = [ "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.OrderSchema:protect_deletes", "\x1b[94mUNALLOWED\x1b[0m \x1b[94mN/A\x1b[0m tests.ReceiptSchema:protect_updates", ] assert set(expected_lines).issubset(set(lines)) # Set the search path to a schema and check results call_command("pgtrigger", "uninstall", "-s", "receipt") call_command("pgtrigger", "ls", "tests.CustomSoftDelete:soft_delete", "-s", "receipt") lines = capsys.readouterr().out.split("\n") expected_lines = [ "", "\x1b[91mUNINSTALLED\x1b[0m \x1b[94mN/A\x1b[0m tests.CustomSoftDelete:soft_delete", ] assert set(expected_lines) == set(lines) call_command("pgtrigger", "install", "tests.CustomSoftDelete:soft_delete", "-s", "receipt") call_command("pgtrigger", "ls", "tests.CustomSoftDelete:soft_delete", "-s", "receipt") lines = capsys.readouterr().out.split("\n") expected_lines = [ "", "\x1b[92mINSTALLED\x1b[0m \x1b[92mENABLED\x1b[0m tests.CustomSoftDelete:soft_delete", ] assert set(expected_lines) == set(lines) django-pgtrigger-4.11.1/pgtrigger/tests/test_registry.py000066400000000000000000000070631460427745000234640ustar00rootroot00000000000000import pytest import pgtrigger from pgtrigger import registry from pgtrigger.tests import models def test_registered_invalid_args(): with pytest.raises(ValueError): pgtrigger.registered("uri") def test_registry(): """ Tests dynamically registering and unregistering triggers """ init_registry_size = len(registry._registry) # The trigger registry should already be populated with our test triggers assert init_registry_size >= 6 # Add a trigger to the registry trigger = pgtrigger.Trigger( when=pgtrigger.Before, name="my_aliased_trigger", operation=pgtrigger.Insert | pgtrigger.Update, func="RAISE EXCEPTION 'no no no!';", ) # Register/unregister in context managers. The state should be the same # at the end as the beginning with trigger.register(models.TestModel): assert len(registry._registry) == init_registry_size + 1 assert f"tests.TestModel:{trigger.name}" in registry._registry with trigger.unregister(models.TestModel): assert len(registry._registry) == init_registry_size assert f"tests.TestModel:{trigger.name}" not in registry._registry # Try obtaining trigger by alias assert pgtrigger.registered("tests.TestModel:my_aliased_trigger") assert len(registry._registry) == init_registry_size assert f"tests.TestModel:{trigger.name}" not in registry._registry with pytest.raises(KeyError, match="not found"): pgtrigger.registered(f"tests.TestModel:{trigger.name}") with pytest.raises(ValueError, match="must be in the format"): pgtrigger.registered("tests.TestMode") def test_duplicate_trigger_names(mocker): """Ensure that duplicate trigger names are properly detected""" # Add a trigger to the registry trigger1 = pgtrigger.Trigger( name="mytrigger", when=pgtrigger.Before, operation=pgtrigger.Insert ) trigger2 = pgtrigger.Protect( name="mytrigger", when=pgtrigger.Before, operation=pgtrigger.Insert ) trigger3 = pgtrigger.Trigger( name="MyTrigger", when=pgtrigger.Before, operation=pgtrigger.Insert ) assert trigger1.get_pgid(models.TestModel) == "pgtrigger_mytrigger_b34c5" assert trigger3.get_pgid(models.TestModel) == "pgtrigger_mytrigger_4a08f" # Check that a conflict cannot happen in the registry. # NOTE - use context managers to ensure we don't keep around # these registered triggers in other tests with trigger1.register(models.TestModel): with pytest.raises(KeyError, match="already used"): with trigger2.register(models.TestModel): pass mocker.patch.object(pgtrigger.Trigger, "get_pgid", return_value="duplicate") # Check that a conflict cannot happen in the generated postgres ID. # NOTE - use context managers to ensure we don't keep around # these registered triggers in other tests with pytest.raises(KeyError, match="already in use"): with trigger1.register(models.TestModel): pass def test_duplicate_trigger_names_proxy_model(mocker): """Test that duplicate trigger names are detected when using proxy models""" # TestTriggerProxy registers "protect_delete" for TestTrigger. # If we try to register this trigger directly on TestTrigger, it should result # in a duplicate error trigger = pgtrigger.Trigger( name="protect_delete", when=pgtrigger.Before, operation=pgtrigger.Insert ) with pytest.raises(KeyError, match="already used"): with trigger.register(models.TestTrigger): pass django-pgtrigger-4.11.1/pgtrigger/tests/test_runtime.py000066400000000000000000000211241460427745000232710ustar00rootroot00000000000000import ddf import pytest from django.db import IntegrityError, connection, transaction import pgtrigger from pgtrigger.tests import models, utils @pytest.mark.django_db def test_schema(): """Verifies behavior of pgtrigger.schema""" def _search_path(): with connection.cursor() as cursor: cursor.execute("SHOW search_path;") return cursor.fetchall()[0][0] assert _search_path() == '"$user", public' with pgtrigger.schema("hello"): assert _search_path() == "hello" with pgtrigger.schema("hello", "$user"): assert _search_path() == 'hello, "$user"' assert _search_path() == "hello" with connection.cursor() as cursor: cursor.execute("SET search_path=custom;") with transaction.atomic(): assert _search_path() == "custom" with pgtrigger.schema("hello", databases=["default"]): assert _search_path() == "hello" assert _search_path() == "custom" with pgtrigger.schema.session(databases=["default"]): assert _search_path() == "custom" assert _search_path() == "custom" @pytest.mark.django_db(transaction=True) def test_constraints(): """ Tests running [pgtrigger.constraints][] on deferrable triggers """ # Not every trigger is deferrable, so this should raise an error with transaction.atomic(): with pytest.raises(ValueError, match="is not deferrable"): pgtrigger.constraints(pgtrigger.Immediate) # Make the LogEntry model a soft delete model where # "level" is set to "inactive" trigger = pgtrigger.Protect( name="protect_delete", when=pgtrigger.After, operation=pgtrigger.Delete, timing=pgtrigger.Deferred, ) with trigger.register(models.TestModel), trigger.install(models.TestModel): # Verify we have to be in a transaction with pytest.raises(RuntimeError, match="not in a transaction"): pgtrigger.constraints(pgtrigger.Immediate, "tests.TestModel:protect_delete") obj = ddf.G(models.TestModel) with transaction.atomic(): # This "with" is only here to validate that ignoring the trigger will # NOT happen. After this "with" is done, the transaction still hasn't finished # and the trigger hasn't executed yet, so it won't be ignored. with pgtrigger.ignore("tests.TestModel:protect_delete"): obj.delete() # Deletion works within the transaction so far since trigger is deferred assert not models.TestModel.objects.exists() # When we set constraints to Immediate, it should fail inside # of the transaction with utils.raises_trigger_error(match="Cannot delete", transaction=False): # The first statement does nothing because the trigger is already deferred pgtrigger.constraints( pgtrigger.Deferred, "tests.TestModel:protect_delete", databases=["default"] ) pgtrigger.constraints( pgtrigger.Immediate, "tests.TestModel:protect_delete", databases=["default"] ) @pytest.mark.django_db(transaction=True) def test_ignore_nested_transactions(): """Verifies a trigger can be ignored during nested transactions""" ddf.G(models.CustomTableName, int_field=1) trigger = pgtrigger.Protect( name="protect_insert", when=pgtrigger.Before, operation=pgtrigger.Insert, ) with trigger.register(models.CustomTableName): with trigger.install(models.CustomTableName): with transaction.atomic(): with pgtrigger.ignore("tests.CustomTableName:protect_insert"): try: with transaction.atomic(): # pragma: no branch models.CustomTableName.objects.create(int_field=1) except IntegrityError: models.CustomTableName.objects.create(int_field=2) @pytest.mark.django_db(transaction=True) def test_ignore_session(): """Verifies an ignore session can be used to avoid transaction-related issues""" ddf.G(models.CustomTableName, int_field=1) trigger = pgtrigger.Protect( name="protect_insert", when=pgtrigger.Before, operation=pgtrigger.Insert, ) with trigger.register(models.CustomTableName), trigger.install(models.CustomTableName): with pgtrigger.ignore.session(): with transaction.atomic(): with pgtrigger.ignore("tests.CustomTableName:protect_insert"): try: models.CustomTableName.objects.create(int_field=1) except IntegrityError: pass @pytest.mark.django_db def test_ignore_no_transaction_leaks(): """Verify ignore does not leak during a transaction""" deletion_protected_model = ddf.G(models.TestTrigger) with pgtrigger.ignore("tests.TestTriggerProxy:protect_delete"): deletion_protected_model.delete() assert not models.TestTrigger.objects.exists() deletion_protected_model = ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() @pytest.mark.django_db @pytest.mark.parametrize("model_class", [models.TestTriggerProxy, models.CustomTableName]) def test_basic_ignore(model_class): """Verify basic dynamic ignore functionality""" deletion_protected_model = ddf.G(model_class) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() with pgtrigger.ignore(f"tests.{model_class.__name__}:protect_delete"): deletion_protected_model.delete() assert not models.TestTrigger.objects.exists() deletion_protected_model = ddf.G(model_class) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() # Verify that named cursors are ignored and that valid SQL is still generated with pgtrigger.ignore(f"tests.{model_class.__name__}:protect_delete"): assert len(list(model_class.objects.all().iterator())) == 1 @pytest.mark.django_db def test_nested_ignore(): """Test nesting pgtrigger.ignore()""" deletion_protected_model1 = ddf.G(models.TestTrigger) deletion_protected_model2 = ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model1.delete() with pgtrigger.ignore("tests.TestTriggerProxy:protect_delete"): with pgtrigger.ignore("tests.TestTriggerProxy:protect_delete"): deletion_protected_model1.delete() deletion_protected_model2.delete() assert not models.TestTrigger.objects.exists() deletion_protected_model = ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() with pgtrigger.ignore.session(databases=["default"]): deletion_protected_model = ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() @pytest.mark.django_db def test_multiple_ignores(): """Tests multiple pgtrigger.ignore()""" deletion_protected_model1 = ddf.G(models.TestTrigger) ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model1.delete() ddf.G(models.TestTrigger, field="hi!") with utils.raises_trigger_error(match="no no no!"): models.TestTrigger.objects.create(field="misc_insert") with pgtrigger.ignore("tests.TestTriggerProxy:protect_delete"): deletion_protected_model1.delete() with utils.raises_trigger_error(match="no no no!"): models.TestTrigger.objects.create(field="misc_insert") with pgtrigger.ignore("tests.TestTrigger:protect_misc_insert"): m = models.TestTrigger.objects.create(field="misc_insert") m.delete() models.TestTrigger.objects.all().delete() assert not models.TestTrigger.objects.exists() deletion_protected_model = ddf.G(models.TestTrigger) with utils.raises_trigger_error(match="Cannot delete rows"): deletion_protected_model.delete() @pytest.mark.django_db def test_custom_db_table_ignore(): """Verify we can ignore triggers on custom table names""" deletion_protected_model = ddf.G(models.CustomTableName) # Ensure we can ignore the deletion trigger with pgtrigger.ignore("tests.CustomTableName:protect_delete"): deletion_protected_model.delete() assert not models.CustomTableName.objects.exists() django-pgtrigger-4.11.1/pgtrigger/tests/test_syncdb.py000066400000000000000000000016171460427745000230750ustar00rootroot00000000000000import pytest from django.db import connection import pgtrigger.tests.syncdb_app.models as syncdb_models from pgtrigger.tests import utils @pytest.mark.django_db def test_create_model_creates_triggers(): """ Tests trigger installation with syncdb `DatabaseSchemaEditorMixin.create_model` is called when the django app doesn't have a migrations module. `DatabaseSchemaEditorMixin.create_model` is also called during a `CreateTable` migration operation but as the triggers aren't stored with the `CreateTable operation`, the specific code that creates triggers in `DatabaseSchemaEditorMixin.create_model` isn't executed. """ with connection.schema_editor() as editor: editor.create_model(syncdb_models.NoMigrationModel) with utils.raises_trigger_error(match="no no no!"): syncdb_models.NoMigrationModel.objects.create(field="misc_insert", int_field=1) django-pgtrigger-4.11.1/pgtrigger/tests/utils.py000066400000000000000000000011441460427745000217070ustar00rootroot00000000000000import contextlib import pytest from django.db import DEFAULT_DB_ALIAS, connections from django.db import transaction as db_transaction from django.db.utils import DatabaseError @contextlib.contextmanager def raises_trigger_error(match=None, database=DEFAULT_DB_ALIAS, transaction=None): with contextlib.ExitStack() as stack: stack.enter_context(pytest.raises(DatabaseError, match=match)) if transaction is None: transaction = connections[database].in_atomic_block if transaction: stack.enter_context(db_transaction.atomic(using=database)) yield django-pgtrigger-4.11.1/pgtrigger/utils.py000066400000000000000000000047411460427745000205530ustar00rootroot00000000000000from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db import DEFAULT_DB_ALIAS, connections from django.utils.version import get_version_tuple def _psycopg_version(): try: import psycopg as Database except ImportError: import psycopg2 as Database except Exception as exc: # pragma: no cover raise ImproperlyConfigured("Error loading psycopg2 or psycopg module") from exc version_tuple = get_version_tuple(Database.__version__.split(" ", 1)[0]) if version_tuple[0] not in (2, 3): # pragma: no cover raise ImproperlyConfigured(f"Pysocpg version {version_tuple[0]} not supported") return version_tuple psycopg_version = _psycopg_version() psycopg_maj_version = psycopg_version[0] class AttrDict(dict): """A dictionary where keys can be accessed as attributes""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.__dict__ = self def connection(database=None): """ Obtains the connection used for a trigger / model pair. The database for the connection is selected based on the write DB in the database router config. """ return connections[database or DEFAULT_DB_ALIAS] def pg_maj_version(cursor): """Return the major version of Postgres that's running""" version = getattr(cursor.connection, "server_version", cursor.connection.info.server_version) return int(str(version)[:-4]) def is_postgres(database): return connection(database).vendor == "postgresql" def postgres_databases(databases=None): """Return postgres databases from the provided list of databases. If no databases are provided, return all postgres databases """ databases = databases or list(settings.DATABASES) assert isinstance(databases, list) return [database for database in databases if is_postgres(database)] def exec_sql(sql, database=None, fetchall=False): if is_postgres(database): # pragma: no branch with connection(database).cursor() as cursor: cursor.execute(sql) if fetchall: return cursor.fetchall() def quote(label, char='"'): """Conditionally wraps a label in quotes""" if label.startswith(char) or label.endswith(char): return label else: return f"{char}{label}{char}" def render_uninstall(table, trigger_pgid): """Renders uninstallation SQL""" return f"DROP TRIGGER IF EXISTS {trigger_pgid} ON {quote(table)};" django-pgtrigger-4.11.1/pgtrigger/version.py000066400000000000000000000001231460427745000210660ustar00rootroot00000000000000from importlib import metadata __version__ = metadata.version("django-pgtrigger") django-pgtrigger-4.11.1/poetry.lock000066400000000000000000003664351460427745000172560ustar00rootroot00000000000000# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "ansimarkup" version = "2.0.0" description = "Produce colored terminal text with an xml-like markup" optional = false python-versions = ">=3.6" files = [ {file = "ansimarkup-2.0.0-py3-none-any.whl", hash = "sha256:7edbfc6fc9cbf67589bc510ff5502aa34a2de8db9ceed61b186864af6426a70e"}, {file = "ansimarkup-2.0.0.tar.gz", hash = "sha256:ffd040e822e6d329d42d250179bd3d9a9c9f6ed6936a30d17b5f7d56a8f03ef0"}, ] [package.dependencies] colorama = "*" [package.extras] test = ["pytest", "pytest-cov"] [[package]] name = "arrow" version = "1.3.0" description = "Better dates & times for Python" optional = false python-versions = ">=3.8" files = [ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, ] [package.dependencies] python-dateutil = ">=2.7.0" types-python-dateutil = ">=2.8.10" [package.extras] doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] [[package]] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.7" files = [ {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, ] [package.dependencies] typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "babel" version = "2.13.0" description = "Internationalization utilities" optional = false python-versions = ">=3.7" files = [ {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"}, {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" optional = false python-versions = ">=3.6" files = [ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, ] [package.extras] tzdata = ["tzdata"] [[package]] name = "binaryornot" version = "0.4.4" description = "Ultra-lightweight pure Python package to check if a file is binary or text." optional = false python-versions = "*" files = [ {file = "binaryornot-0.4.4-py2.py3-none-any.whl", hash = "sha256:b8b71173c917bddcd2c16070412e369c3ed7f0528926f70cac18a6c97fd563e4"}, {file = "binaryornot-0.4.4.tar.gz", hash = "sha256:359501dfc9d40632edc9fac890e19542db1a287bbcfa58175b66658392018061"}, ] [package.dependencies] chardet = ">=3.0.2" [[package]] name = "black" version = "23.9.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, ] [[package]] name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] name = "chardet" version = "5.2.0" description = "Universal encoding detector for Python 3" optional = false python-versions = ">=3.7" files = [ {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, ] [[package]] name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, ] [[package]] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "cookiecutter" version = "1.7.3" description = "A command-line utility that creates projects from project templates, e.g. creating a Python package project from a Python package project template." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ {file = "cookiecutter-1.7.3-py2.py3-none-any.whl", hash = "sha256:f8671531fa96ab14339d0c59b4f662a4f12a2ecacd94a0f70a3500843da588e2"}, {file = "cookiecutter-1.7.3.tar.gz", hash = "sha256:6b9a4d72882e243be077a7397d0f1f76fe66cf3df91f3115dbb5330e214fa457"}, ] [package.dependencies] binaryornot = ">=0.4.4" click = ">=7.0" Jinja2 = ">=2.7,<4.0.0" jinja2-time = ">=0.2.0" poyo = ">=0.5.0" python-slugify = ">=4.0.0" requests = ">=2.23.0" six = ">=1.10" [[package]] name = "coverage" version = "7.3.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "distlib" version = "0.3.7" description = "Distribution utilities" optional = false python-versions = "*" files = [ {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, ] [[package]] name = "dj-database-url" version = "2.1.0" description = "Use Database URLs in your Django Application." optional = false python-versions = "*" files = [ {file = "dj-database-url-2.1.0.tar.gz", hash = "sha256:f2042cefe1086e539c9da39fad5ad7f61173bf79665e69bf7e4de55fa88b135f"}, {file = "dj_database_url-2.1.0-py3-none-any.whl", hash = "sha256:04bc34b248d4c21aaa13e4ab419ae6575ef5f10f3df735ce7da97722caa356e0"}, ] [package.dependencies] Django = ">=3.2" typing-extensions = ">=3.10.0.0" [[package]] name = "django" version = "4.2.6" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ {file = "Django-4.2.6-py3-none-any.whl", hash = "sha256:a64d2487cdb00ad7461434320ccc38e60af9c404773a2f95ab0093b4453a3215"}, {file = "Django-4.2.6.tar.gz", hash = "sha256:08f41f468b63335aea0d904c5729e0250300f6a1907bf293a65499496cdbc68f"}, ] [package.dependencies] asgiref = ">=3.6.0,<4" "backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} sqlparse = ">=0.3.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] [[package]] name = "django-dynamic-fixture" version = "4.0.1" description = "A full library to create dynamic model instances for testing purposes." optional = false python-versions = "*" files = [ {file = "django-dynamic-fixture-4.0.1.tar.gz", hash = "sha256:2a2197578b7702db8f5eed9ad704f6be33bac8bf0111c7c92f6063c2a4d02933"}, {file = "django_dynamic_fixture-4.0.1-py3-none-any.whl", hash = "sha256:d0611b6dc594fb1bccad1fd94dade89cc8deca12385bc2763baded3e48322547"}, ] [[package]] name = "django-postgres-extra" version = "2.0.4" description = "Bringing all of PostgreSQL's awesomeness to Django." optional = false python-versions = ">=3.6" files = [ {file = "django-postgres-extra-2.0.4.tar.gz", hash = "sha256:95030211916fcc782b514ca73bede24aefa0ab73a80b2a7e0b59d6f168db3708"}, {file = "django_postgres_extra-2.0.4-py3-none-any.whl", hash = "sha256:5c5d35e1edc0d6aa5374a72f92b0d4858ca35f9772c540e3fa8280c086a8c7f7"}, ] [package.dependencies] ansimarkup = ">=1.4.0,<=2.0.0" Django = ">=2.0" python-dateutil = ">=2.8.0,<=3.0.0" [package.extras] analysis = ["autoflake (==1.4)", "autopep8 (==1.6.0)", "black (==21.10b0)", "docformatter (==1.4)", "flake8 (==4.0.1)", "isort (==5.10.0)"] docs = ["Sphinx (==2.2.0)", "docutils (<0.18)", "sphinx-rtd-theme (==0.4.3)"] test = ["coveralls (==3.3.0)", "dj-database-url (==0.5.0)", "freezegun (==1.1.0)", "psycopg2 (>=2.8.4,<3.0.0)", "pytest (==6.2.5)", "pytest-benchmark (==3.4.1)", "pytest-cov (==3.0.0)", "pytest-django (==4.4.0)", "snapshottest (==0.6.0)", "tox (==3.24.4)"] [[package]] name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "filelock" version = "3.12.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, ] [package.extras] docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] typing = ["typing-extensions (>=4.7.1)"] [[package]] name = "footing" version = "0.1.4" description = "Keep templated projects in sync with their template" optional = false python-versions = ">=3.7.0,<4" files = [ {file = "footing-0.1.4-py3-none-any.whl", hash = "sha256:ae9589a8c9fb9620ca544cf8de517d6119541ee6273ec449d19438a463f21013"}, {file = "footing-0.1.4.tar.gz", hash = "sha256:5d7243bbe6a519a33b769e6c91f14834e54f8e622c7487e3ad76fa2c5ce63a7d"}, ] [package.dependencies] click = ">=6.7" cookiecutter = "<2.0.0" python-gitlab = ">=2.10.1" pyyaml = ">=3.12" requests = ">=2.13.0" tldextract = ">=3.1.2" [[package]] name = "formaldict" version = "1.0.5" description = "Formal structured dictionaries parsed from a schema" optional = false python-versions = ">=3.7.0,<4" files = [ {file = "formaldict-1.0.5-py3-none-any.whl", hash = "sha256:8fdf65620052f34df2808f16c6aaee6e3f866348b1b1a441568d7cf79af94222"}, {file = "formaldict-1.0.5.tar.gz", hash = "sha256:9cf2d0035275858076b84ff07cafcbdef6af67e71a86dbe90e02264f3ec29cf1"}, ] [package.dependencies] kmatch = ">=0.3.0" prompt-toolkit = ">=3.0.2" python-dateutil = ">=2.8.1" [[package]] name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." optional = false python-versions = "*" files = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, ] [package.dependencies] python-dateutil = ">=2.8.1" [package.extras] dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "git-tidy" version = "1.2.0" description = "Tidy git commit messages, linting, and logging" optional = false python-versions = ">=3.7.0,<4" files = [ {file = "git-tidy-1.2.0.tar.gz", hash = "sha256:14d04a4ee2de0ae4c72b1aeb1128c65fd6be6786618d5a0747ec182364fe657b"}, {file = "git_tidy-1.2.0-py3-none-any.whl", hash = "sha256:8d88520390963c698babec05e9c121fd2bb8e3f21865096dbe445f082881dd76"}, ] [package.dependencies] click = ">7.0" formaldict = ">0.2.0" jinja2 = ">2.10.3" packaging = ">20.0" python-dateutil = ">2.8.1" pyyaml = ">5.1.2" requests = ">2.22.0" [[package]] name = "griffe" version = "0.36.5" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ {file = "griffe-0.36.5-py3-none-any.whl", hash = "sha256:62af1ca94a5ac73259278b7692d300bf1c6bd8f9beeabaeaa229009bb82d09c6"}, {file = "griffe-0.36.5.tar.gz", hash = "sha256:b8a672c54b99e958b985b3cfbf1de09e25d686dd8a667aa5ec2d0b1601a542fc"}, ] [package.dependencies] colorama = ">=0.4" [[package]] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] [[package]] name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "jinja2-time" version = "0.2.0" description = "Jinja2 Extension for Dates and Times" optional = false python-versions = "*" files = [ {file = "jinja2-time-0.2.0.tar.gz", hash = "sha256:d14eaa4d315e7688daa4969f616f226614350c48730bfa1692d2caebd8c90d40"}, {file = "jinja2_time-0.2.0-py2.py3-none-any.whl", hash = "sha256:d3eab6605e3ec8b7a0863df09cc1d23714908fa61aa6986a845c20ba488b4efa"}, ] [package.dependencies] arrow = "*" jinja2 = "*" [[package]] name = "kmatch" version = "0.5.0" description = "A language for matching/validating/filtering Python dictionaries" optional = false python-versions = "*" files = [ {file = "kmatch-0.5.0-py3-none-any.whl", hash = "sha256:93045cd2bc359c6d4e7dde114a4e13bd24926192a217c290538b798b027dc719"}, {file = "kmatch-0.5.0.tar.gz", hash = "sha256:7690d16606c87a77a72691f04728b6935f65b0c819dcf070a7c95946ce2b5ac0"}, ] [[package]] name = "markdown" version = "3.5" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ {file = "Markdown-3.5-py3-none-any.whl", hash = "sha256:4afb124395ce5fc34e6d9886dab977fd9ae987fc6e85689f08278cf0c69d4bf3"}, {file = "Markdown-3.5.tar.gz", hash = "sha256:a807eb2e4778d9156c8f07876c6e4d50b5494c5665c4834f67b06459dfd877b3"}, ] [package.dependencies] importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." optional = false python-versions = ">=3.6" files = [ {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, ] [[package]] name = "mkdocs" version = "1.5.3" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" files = [ {file = "mkdocs-1.5.3-py3-none-any.whl", hash = "sha256:3b3a78e736b31158d64dbb2f8ba29bd46a379d0c6e324c2246c3bc3d2189cfc1"}, {file = "mkdocs-1.5.3.tar.gz", hash = "sha256:eb7c99214dcb945313ba30426c2451b735992c73c2e10838f76d09e39ff4d0e2"}, ] [package.dependencies] click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" markdown = ">=3.2.1" markupsafe = ">=2.0.1" mergedeep = ">=1.3.4" packaging = ">=20.5" pathspec = ">=0.11.1" platformdirs = ">=2.2.0" pyyaml = ">=5.1" pyyaml-env-tag = ">=0.1" watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pathspec (==0.11.1)", "platformdirs (==2.2.0)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] [[package]] name = "mkdocs-autorefs" version = "0.5.0" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.8" files = [ {file = "mkdocs_autorefs-0.5.0-py3-none-any.whl", hash = "sha256:7930fcb8ac1249f10e683967aeaddc0af49d90702af111a5e390e8b20b3d97ff"}, {file = "mkdocs_autorefs-0.5.0.tar.gz", hash = "sha256:9a5054a94c08d28855cfab967ada10ed5be76e2bfad642302a610b252c3274c0"}, ] [package.dependencies] Markdown = ">=3.3" mkdocs = ">=1.1" [[package]] name = "mkdocs-material" version = "9.4.4" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ {file = "mkdocs_material-9.4.4-py3-none-any.whl", hash = "sha256:86fe79253afccc7f085f89a2d8e9e3300f82c4813d9b910d9081ce57a7e68380"}, {file = "mkdocs_material-9.4.4.tar.gz", hash = "sha256:ab84a7cfaf009c47cd2926cdd7e6040b8cc12c3806cc533e8b16d57bd16d9c47"}, ] [package.dependencies] babel = ">=2.10,<3.0" colorama = ">=0.4,<1.0" jinja2 = ">=3.0,<4.0" markdown = ">=3.2,<4.0" mkdocs = ">=1.5.3,<2.0" mkdocs-material-extensions = ">=1.2,<2.0" paginate = ">=0.5,<1.0" pygments = ">=2.16,<3.0" pymdown-extensions = ">=10.2,<11.0" regex = ">=2022.4" requests = ">=2.26,<3.0" [package.extras] git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] name = "mkdocs-material-extensions" version = "1.2" description = "Extension pack for Python Markdown and MkDocs Material." optional = false python-versions = ">=3.7" files = [ {file = "mkdocs_material_extensions-1.2-py3-none-any.whl", hash = "sha256:c767bd6d6305f6420a50f0b541b0c9966d52068839af97029be14443849fb8a1"}, {file = "mkdocs_material_extensions-1.2.tar.gz", hash = "sha256:27e2d1ed2d031426a6e10d5ea06989d67e90bb02acd588bc5673106b5ee5eedf"}, ] [[package]] name = "mkdocstrings" version = "0.23.0" description = "Automatic documentation from sources, for MkDocs." optional = false python-versions = ">=3.8" files = [ {file = "mkdocstrings-0.23.0-py3-none-any.whl", hash = "sha256:051fa4014dfcd9ed90254ae91de2dbb4f24e166347dae7be9a997fe16316c65e"}, {file = "mkdocstrings-0.23.0.tar.gz", hash = "sha256:d9c6a37ffbe7c14a7a54ef1258c70b8d394e6a33a1c80832bce40b9567138d1c"}, ] [package.dependencies] importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} Jinja2 = ">=2.11.1" Markdown = ">=3.3" MarkupSafe = ">=1.1" mkdocs = ">=1.2" mkdocs-autorefs = ">=0.3.1" pymdown-extensions = ">=6.3" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} [package.extras] crystal = ["mkdocstrings-crystal (>=0.3.4)"] python = ["mkdocstrings-python (>=0.5.2)"] python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] [[package]] name = "mkdocstrings-python" version = "1.7.2" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.8" files = [ {file = "mkdocstrings_python-1.7.2-py3-none-any.whl", hash = "sha256:2d005729a90f1b86d6d71fad4953d787140996adec5b00a25fafc6ee48e1b79a"}, {file = "mkdocstrings_python-1.7.2.tar.gz", hash = "sha256:75b6af86f9dcdc2d864072d8fed5b1d45ad94dd2ce97843ef52ca87ad53d9b26"}, ] [package.dependencies] griffe = ">=0.35" mkdocstrings = ">=0.20" [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] name = "paginate" version = "0.5.6" description = "Divides large result sets into pages for easier browsing" optional = false python-versions = "*" files = [ {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, ] [[package]] name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "poetry-core" version = "1.7.0" description = "Poetry PEP 517 Build Backend" optional = false python-versions = ">=3.8,<4.0" files = [ {file = "poetry_core-1.7.0-py3-none-any.whl", hash = "sha256:38e174cdb00a84ee4a1cab66a378b435747f72414f5573bc18cfc3850a94df38"}, {file = "poetry_core-1.7.0.tar.gz", hash = "sha256:8f679b83bd9c820082637beca1204124d5d2a786e4818da47ec8acefd0353b74"}, ] [[package]] name = "poyo" version = "0.5.0" description = "A lightweight YAML Parser for Python. 🐓" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "poyo-0.5.0-py2.py3-none-any.whl", hash = "sha256:3e2ca8e33fdc3c411cd101ca395668395dd5dc7ac775b8e809e3def9f9fe041a"}, {file = "poyo-0.5.0.tar.gz", hash = "sha256:e26956aa780c45f011ca9886f044590e2d8fd8b61db7b1c1cf4e0869f48ed4dd"}, ] [[package]] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, ] [package.dependencies] wcwidth = "*" [[package]] name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.7" files = [ {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] [[package]] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] plugins = ["importlib-metadata"] [[package]] name = "pymdown-extensions" version = "10.3" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ {file = "pymdown_extensions-10.3-py3-none-any.whl", hash = "sha256:77a82c621c58a83efc49a389159181d570e370fff9f810d3a4766a75fc678b66"}, {file = "pymdown_extensions-10.3.tar.gz", hash = "sha256:94a0d8a03246712b64698af223848fd80aaf1ae4c4be29c8c61939b0467b5722"}, ] [package.dependencies] markdown = ">=3.2" pyyaml = "*" [package.extras] extra = ["pygments (>=2.12)"] [[package]] name = "pyproject-api" version = "1.6.1" description = "API to interact with the python pyproject.toml based projects" optional = false python-versions = ">=3.8" files = [ {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, ] [package.dependencies] packaging = ">=23.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] [[package]] name = "pytest" version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-django" version = "4.5.2" description = "A Django plugin for pytest." optional = false python-versions = ">=3.5" files = [ {file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"}, {file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"}, ] [package.dependencies] pytest = ">=5.4.0" [package.extras] docs = ["sphinx", "sphinx-rtd-theme"] testing = ["Django", "django-configurations (>=2.0)"] [[package]] name = "pytest-dotenv" version = "0.5.2" description = "A py.test plugin that parses environment files before running tests" optional = false python-versions = "*" files = [ {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, ] [package.dependencies] pytest = ">=5.0.0" python-dotenv = ">=0.9.1" [[package]] name = "pytest-mock" version = "3.6.1" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.6" files = [ {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, ] [package.dependencies] pytest = ">=5.0" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-order" version = "1.1.0" description = "pytest plugin to run your tests in a specific order" optional = false python-versions = ">=3.6" files = [ {file = "pytest-order-1.1.0.tar.gz", hash = "sha256:139d25b30826b78eebb42722f747eab14c44b88059d7a71d4f79d14a057269a5"}, {file = "pytest_order-1.1.0-py3-none-any.whl", hash = "sha256:3b3730969c97900fa5cd31ecff80847680ed56b2490954565c14949ba60d9371"}, ] [package.dependencies] pytest = [ {version = ">=5.0", markers = "python_version < \"3.10\""}, {version = ">=6.2.4", markers = "python_version >= \"3.10\""}, ] [[package]] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] [package.dependencies] six = ">=1.5" [[package]] name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" files = [ {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, ] [package.extras] cli = ["click (>=5.0)"] [[package]] name = "python-gitlab" version = "3.15.0" description = "Interact with GitLab API" optional = false python-versions = ">=3.7.0" files = [ {file = "python-gitlab-3.15.0.tar.gz", hash = "sha256:c9e65eb7612a9fbb8abf0339972eca7fd7a73d4da66c9b446ffe528930aff534"}, {file = "python_gitlab-3.15.0-py3-none-any.whl", hash = "sha256:8f8d1c0d387f642eb1ac7bf5e8e0cd8b3dd49c6f34170cee3c7deb7d384611f3"}, ] [package.dependencies] requests = ">=2.25.0" requests-toolbelt = ">=0.10.1" [package.extras] autocompletion = ["argcomplete (>=1.10.0,<3)"] yaml = ["PyYaml (>=5.2)"] [[package]] name = "python-slugify" version = "8.0.1" description = "A Python slugify application that also handles Unicode" optional = false python-versions = ">=3.7" files = [ {file = "python-slugify-8.0.1.tar.gz", hash = "sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27"}, {file = "python_slugify-8.0.1-py2.py3-none-any.whl", hash = "sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395"}, ] [package.dependencies] text-unidecode = ">=1.3" [package.extras] unidecode = ["Unidecode (>=1.1.1)"] [[package]] name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " optional = false python-versions = ">=3.6" files = [ {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] [package.dependencies] pyyaml = "*" [[package]] name = "regex" version = "2023.10.3" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.7" files = [ {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, ] [[package]] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." optional = false python-versions = ">=3.7" files = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-file" version = "1.5.1" description = "File transport adapter for Requests" optional = false python-versions = "*" files = [ {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, ] [package.dependencies] requests = ">=1.0.0" six = "*" [[package]] name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, ] [package.dependencies] requests = ">=2.0.1,<3.0.0" [[package]] name = "ruff" version = "0.0.292" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, ] [[package]] name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." optional = false python-versions = ">=3.5" files = [ {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, ] [package.extras] dev = ["build", "flake8"] doc = ["sphinx"] test = ["pytest", "pytest-cov"] [[package]] name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" optional = false python-versions = "*" files = [ {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] [[package]] name = "tldextract" version = "3.6.0" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." optional = false python-versions = ">=3.7" files = [ {file = "tldextract-3.6.0-py3-none-any.whl", hash = "sha256:30a492de80f4de215aa998588ba5c2e625ee74ace3a2705cfb52b0021053bcbe"}, {file = "tldextract-3.6.0.tar.gz", hash = "sha256:a5d8b6583791daca268a7592ebcf764152fa49617983c49916ee9de99b366222"}, ] [package.dependencies] filelock = ">=3.0.8" idna = "*" requests = ">=2.1.0" requests-file = ">=1.4" [[package]] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "tox" version = "4.11.3" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ {file = "tox-4.11.3-py3-none-any.whl", hash = "sha256:599af5e5bb0cad0148ac1558a0b66f8fff219ef88363483b8d92a81e4246f28f"}, {file = "tox-4.11.3.tar.gz", hash = "sha256:5039f68276461fae6a9452a3b2c7295798f00a0e92edcd9a3b78ba1a73577951"}, ] [package.dependencies] cachetools = ">=5.3.1" chardet = ">=5.2" colorama = ">=0.4.6" filelock = ">=3.12.3" packaging = ">=23.1" platformdirs = ">=3.10" pluggy = ">=1.3" pyproject-api = ">=1.6.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} virtualenv = ">=20.24.3" [package.extras] docs = ["furo (>=2023.8.19)", "sphinx (>=7.2.4)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.24)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.18)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.12)", "wheel (>=0.41.2)"] [[package]] name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" optional = false python-versions = "*" files = [ {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, ] [[package]] name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] [[package]] name = "urllib3" version = "2.0.6" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" version = "20.24.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<4" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" optional = false python-versions = ">=3.7" files = [ {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, ] [package.extras] watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" version = "0.2.8" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, ] [[package]] name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" python-versions = ">=3.8.0,<4" content-hash = "14d7c2aa262308f3014d1e7c25f6564da25656ac9fe57c0ddd9e76499483fb23" django-pgtrigger-4.11.1/pyproject.toml000066400000000000000000000046011460427745000177560ustar00rootroot00000000000000[build-system] requires = ["poetry_core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.black] line-length = 99 target-version = ['py38'] [tool.coverage.run] branch = true source = ["pgtrigger"] [tool.coverage.report] exclude_lines = [ "pragma: no cover", "raise AssertionError", "raise NotImplementedError", "pass", "pytest.mark.skip", "if TYPE_CHECKING:" ] show_missing = true fail_under = 100 omit = ["pgtrigger/tests/migrations/*"] [tool.poetry] name = "django-pgtrigger" packages = [ { include = "pgtrigger" } ] exclude = [ "*/tests/" ] version = "4.11.1" description = "Postgres trigger support integrated with Django models." authors = ["Wes Kendall"] classifiers = [ "Framework :: Django", "Framework :: Django :: 3.2", "Framework :: Django :: 4.0", "Framework :: Django :: 4.1", "Framework :: Django :: 4.2", "Framework :: Django :: 5.0", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3 :: Only", "Framework :: Django", ] license = "BSD-3-Clause" readme = "README.md" homepage = "https://github.com/Opus10/django-pgtrigger" repository = "https://github.com/Opus10/django-pgtrigger" documentation = "https://django-pgtrigger.readthedocs.io" [tool.poetry.dependencies] python = ">=3.8.0,<4" django = ">=3" [tool.poetry.dev-dependencies] black = "23.9.1" dj-database-url = "2.1.0" git-tidy = "1.2.0" psycopg2-binary = "2.9.9" pytest = "7.4.2" pytest-cov = "4.1.0" pytest-dotenv = "0.5.2" pytest-django = "4.5.2" pytest-mock = "3.6.1" pytest-order = "1.1.0" django-dynamic-fixture = "4.0.1" django-postgres-extra = "2.0.4" tox = "4.11.3" ruff = "0.0.292" mkdocs = "1.5.3" mkdocs-material = "9.4.4" mkdocstrings-python = "1.7.2" footing = "*" setuptools = "*" poetry-core = "*" [tool.pytest.ini_options] xfail_strict = true addopts = "--reuse-db -m 'not independent'" testpaths = "pgtrigger/tests" norecursedirs = ".venv" DJANGO_SETTINGS_MODULE = "test_settings" markers = [ "independent: marks tests that should run independently of normal suite" ] [tool.ruff] select = ["E", "F", "B", "I", "G", "C4"] line-length = 99 target-version = "py38" django-pgtrigger-4.11.1/settings.py000066400000000000000000000025601460427745000172560ustar00rootroot00000000000000import copy import os import django import dj_database_url SECRET_KEY = "django-pgtrigger" # Install the tests as an app so that we can make test models INSTALLED_APPS = [ "pgtrigger", # For testing purposes "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.postgres", "psqlextra", "pgtrigger.tests", "pgtrigger.tests.syncdb_app", ] # Database url comes from the DATABASE_URL env var # We have some multi-database and multi-schema tests DATABASES = { "default": dj_database_url.config(), "sqlite": {"ENGINE": "django.db.backends.sqlite3", "NAME": "test_sqlite"}, } DATABASES["other"] = copy.deepcopy(DATABASES["default"]) DATABASES["other"]["NAME"] += "_other" DATABASES["default"]["ENGINE"] = "psqlextra.backend" DATABASES["order"] = copy.deepcopy(DATABASES["default"]) DATABASES["order"]["OPTIONS"] = {"options": "-c search_path=order"} DATABASES["receipt"] = copy.deepcopy(DATABASES["default"]) DATABASES["receipt"]["OPTIONS"] = {"options": "-c search_path=receipt"} DEFAULT_AUTO_FIELD = "django.db.models.AutoField" # Ensure partitioned models dont get migrated for non-default DBs DATABASE_ROUTERS = ["pgtrigger.tests.models.Router"] # Turn off pgtrigger migrations for normal manage.py use PGTRIGGER_MIGRATIONS = False # Ensure that we always install triggers if running locally PGTRIGGER_INSTALL_ON_MIGRATE = True django-pgtrigger-4.11.1/test_settings.py000066400000000000000000000004451460427745000203150ustar00rootroot00000000000000from settings import * # Turn on pgtrigger migrations in the test suite PGTRIGGER_MIGRATIONS = True # We turn this on in tests to ensure that triggers are installed # when the test database is set up. We dynamically turn it off # when testing migrations. PGTRIGGER_INSTALL_ON_MIGRATE = True django-pgtrigger-4.11.1/tox.ini000066400000000000000000000027301460427745000163560ustar00rootroot00000000000000[tox] isolated_build = true envlist = py{38,39,310,311,312}-django32-psycopg2 py{38,39,310,311,312}-django42-psycopg2 py312-django42-psycopg3 py{310,311,312}-django50-psycopg2 py312-django50-psycopg3 report [testenv] install_command = pip install {opts} --no-compile {packages} deps = django32: Django>=3.2,<3.3 django42: Django>=4.2,<4.3 django50: Django>=5.0rc1,<5.1 psycopg2: psycopg2-binary psycopg3: psycopg[binary] allowlist_externals = poetry bash grep passenv = DATABASE_URL PYTHONDONTWRITEBYTECODE skip_install = true commands = bash -c 'poetry export --with dev --without-hashes -f requirements.txt | grep -v "^[dD]jango==" | grep -v "^psycopg2-binary==" | pip install --no-compile -q --no-deps -r /dev/stdin' pip install --no-compile -q --no-deps --no-build-isolation -e . pytest --create-db --cov --cov-fail-under=0 --cov-append --cov-config pyproject.toml {posargs} # There are some tests that must run independently of the original test suite because of making # dynamic models pytest --create-db --cov --cov-fail-under=0 --cov-append --cov-config pyproject.toml -m independent {posargs} [testenv:report] allowlist_externals = coverage skip_install = true depends = py{38,39,310,311,312}-django{32,42}-psycopg2,py312-django42-psycopg3-py{310,311,312}-django50-psycopg2-py312-django50-psycopg3 parallel_show_output = true commands = coverage report --fail-under 100 coverage erase