pax_global_header 0000666 0000000 0000000 00000000064 14175513017 0014516 g ustar 00root root 0000000 0000000 52 comment=03d71ae0041bd63d0010333d797e623385a50331
django-postgres-extra-2.0.4/ 0000775 0000000 0000000 00000000000 14175513017 0015750 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/.circleci/ 0000775 0000000 0000000 00000000000 14175513017 0017603 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/.circleci/config.yml 0000664 0000000 0000000 00000005255 14175513017 0021602 0 ustar 00root root 0000000 0000000 version: 2.1
executors:
python:
parameters:
version:
type: string
docker:
- image: python:<< parameters.version >>-alpine
- image: postgres:11.0
environment:
POSTGRES_DB: 'psqlextra'
POSTGRES_USER: 'psqlextra'
POSTGRES_PASSWORD: 'psqlextra'
commands:
install-dependencies:
parameters:
extra:
type: string
steps:
- run:
name: Install packages
command: apk add postgresql-libs gcc musl-dev postgresql-dev git
- run:
name: Install Python packages
command: pip install --progress-bar off .[<< parameters.extra >>]
run-tests:
parameters:
pyversion:
type: integer
djversions:
type: string
steps:
- run:
name: Run tests
command: tox -e 'py<< parameters.pyversion >>-dj{<< parameters.djversions >>}'
environment:
DATABASE_URL: 'postgres://psqlextra:psqlextra@localhost:5432/psqlextra'
jobs:
test-python36:
executor:
name: python
version: "3.6"
steps:
- checkout
- install-dependencies:
extra: test
- run-tests:
pyversion: 36
djversions: 20,21,22,30,31,32
test-python37:
executor:
name: python
version: "3.7"
steps:
- checkout
- install-dependencies:
extra: test
- run-tests:
pyversion: 37
djversions: 20,21,22,30,31,32
test-python38:
executor:
name: python
version: "3.8"
steps:
- checkout
- install-dependencies:
extra: test
- run-tests:
pyversion: 38
djversions: 20,21,22,30,31,32
test-python39:
executor:
name: python
version: "3.9"
steps:
- checkout
- install-dependencies:
extra: test
- run-tests:
pyversion: 39
djversions: 21,22,30,31,32
test-python310:
executor:
name: python
version: "3.10"
steps:
- checkout
- install-dependencies:
extra: test
- run-tests:
pyversion: 310
djversions: 21,22,30,31,32
- store_test_results:
path: reports
- run:
name: Upload coverage report
command: coveralls
analysis:
executor:
name: python
version: "3.9"
steps:
- checkout
- install-dependencies:
extra: analysis
- run:
name: Verify
command: python setup.py verify
workflows:
version: 2
build:
jobs:
- test-python36
- test-python37
- test-python38
- test-python39
- test-python310
- analysis
django-postgres-extra-2.0.4/.coveragerc 0000664 0000000 0000000 00000000071 14175513017 0020067 0 ustar 00root root 0000000 0000000 [run]
include = psqlextra/*
omit = *migrations*, *tests*
django-postgres-extra-2.0.4/.gitignore 0000664 0000000 0000000 00000000601 14175513017 0017735 0 ustar 00root root 0000000 0000000 # Ignore virtual environments
env/
.env/
# Ignore Python byte code cache
*.pyc
__pycache__
.cache
# Ignore coverage reports
.coverage
reports/
# Ignore build results
*.egg-info/
pip-wheel-metadata/
dist/
# Ignore stupid .DS_Store
.DS_Store
# Ignore benchmark results
.benchmarks/
# Ignore temporary tox environments
.tox/
.pytest_cache/
# Ignore PyCharm / IntelliJ files
.idea/
django-postgres-extra-2.0.4/.readthedocs.yml 0000664 0000000 0000000 00000000322 14175513017 0021033 0 ustar 00root root 0000000 0000000 version: 2
sphinx:
builder: html
configuration: docs/source/conf.py
python:
version: 3.7
install:
- method: pip
path: .
extra_requirements:
- docs
- test
django-postgres-extra-2.0.4/CONTRIBUTING.md 0000664 0000000 0000000 00000002766 14175513017 0020214 0 ustar 00root root 0000000 0000000 # Contributing
Contributions to `django-postgres-extra` are definitely welcome! Any contribution that implements a PostgreSQL feature in the Django ORM is welcome.
Please use GitHub pull requests to contribute changes.
##
Information on how to run tests and how to hack on the code can be found at the bottom of the [README](https://github.com/SectorLabs/django-postgres-extra#working-with-the-code).
##
If you're unsure whether your change would be a good fit for `django-postgres-extra`, please submit an issue with the [idea](https://github.com/SectorLabs/django-postgres-extra/labels/idea) label and we can talk about it.
## Requirements
* All contributions must pass our CI.
* Existing tests pass.
* PyLint passes.
* PEP8 passes.
* Features that allow creating custom indexes or fields must also implement the associated migrations. `django-postgres-extra` prides itself on the fact that it integrates smoothly with Django migrations. We'd like to keep it that way for all features.
* Sufficiently complicated changes must be accomponied by tests.
## Our promise
* We'll promise to reply to each pull request within 24 hours of submission.
* We'll let you know whether we welcome the change or not within that timeframe.
* This avoids you wasting time on a feature that we feel is not a good fit.
We feel that these promises are fair to whomever decides its worth spending their free time to contribute to `django-postgres-extra`. Please do let us know if you feel we are not living up to these promises.
django-postgres-extra-2.0.4/LICENSE 0000664 0000000 0000000 00000002054 14175513017 0016756 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) 2017 Sector Labs
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
django-postgres-extra-2.0.4/README.md 0000664 0000000 0000000 00000010546 14175513017 0017235 0 ustar 00root root 0000000 0000000
| | | |
|--------------------|---------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| :white_check_mark: | **Tests** | [](https://circleci.com/gh/SectorLabs/django-postgres-extra/tree/master) |
| :memo: | **License** | [](http://doge.mit-license.org) |
| :package: | **PyPi** | [](https://pypi.python.org/pypi/django-postgres-extra) |
| :four_leaf_clover: | **Code coverage** | [](https://coveralls.io/github/SectorLabs/django-postgres-extra?branch=master) |
|
| **Django Versions** | 2.0, 2.1, 2.2, 3.0, 3.1, 3.2 |
|
| **Python Versions** | 3.6, 3.7, 3.8, 3.9, 3.10 |
| :book: | **Documentation** | [Read The Docs](https://django-postgres-extra.readthedocs.io/en/master/) |
| :warning: | **Upgrade** | [Upgrade from v1.x](https://django-postgres-extra.readthedocs.io/en/master/major_releases.html#new-features)
| :checkered_flag: | **Installation** | [Installation Guide](https://django-postgres-extra.readthedocs.io/en/master/installation.html) |
| :fire: | **Features** | [Features & Documentation](https://django-postgres-extra.readthedocs.io/en/master/index.html#features) |
| :droplet: | **Future enhancements** | [Potential features](https://github.com/SectorLabs/django-postgres-extra/issues?q=is%3Aopen+is%3Aissue+label%3Aenhancement) |
`django-postgres-extra` aims to make all of PostgreSQL's awesome features available through the Django ORM. We do this by taking care of all the hassle. As opposed to the many small packages that are available to try to bring a single feature to Django with minimal effort. ``django-postgres-extra`` goes the extra mile, with well tested implementations, seamless migrations and much more.
With seamless we mean that any features we add will work truly seamlessly. You should not have to manually modify your migrations to work with fields and objects provided by this package.
---
:warning: **This README is for v2. See the `v1` branch for v1.x.**
---
## Major features
[See the full list](http://django-postgres-extra.readthedocs.io/#features)
* **Native upserts**
* Single query
* Concurrency safe
* With bulk support (single query)
* **Extended support for HStoreField**
* Unique constraints
* Null constraints
* Select individual keys using ``.values()`` or ``.values_list()``
* **PostgreSQL 11.x declarative table partitioning**
* Supports both range and list partitioning
* **Faster deletes**
* Truncate tables (with cascade)
* **Indexes**
* Conditional unique index.
* Case sensitive unique index.
## Working with the code
### Prerequisites
* PostgreSQL 10 or newer.
* Django 2.0 or newer (including 3.x).
* Python 3.6 or newer.
### Getting started
1. Clone the repository:
λ git clone https://github.com/SectorLabs/django-postgres-extra.git
2. Create a virtual environment:
λ cd django-postgres-extra
λ virtualenv env
λ source env/bin/activate
3. Create a postgres user for use in tests (skip if your default user is a postgres superuser):
λ createuser --superuser psqlextra --pwprompt
λ export DATABASE_URL=postgres://psqlextra:@localhost/psqlextra
Hint: if you're using virtualenvwrapper, you might find it beneficial to put
the ``export`` line in ``$VIRTUAL_ENV/bin/postactivate`` so that it's always
available when using this virtualenv.
4. Install the development/test dependencies:
λ pip install .[test] .[analysis]
5. Run the tests:
λ tox
6. Run the benchmarks:
λ py.test -c pytest-benchmark.ini
7. Auto-format code, sort imports and auto-fix linting errors:
λ python setup.py fix
django-postgres-extra-2.0.4/docs/ 0000775 0000000 0000000 00000000000 14175513017 0016700 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/docs/.gitignore 0000664 0000000 0000000 00000000007 14175513017 0020665 0 ustar 00root root 0000000 0000000 build/
django-postgres-extra-2.0.4/docs/Makefile 0000664 0000000 0000000 00000001176 14175513017 0020345 0 ustar 00root root 0000000 0000000 # Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
django-postgres-extra-2.0.4/docs/make.bat 0000664 0000000 0000000 00000001437 14175513017 0020312 0 ustar 00root root 0000000 0000000 @ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd
django-postgres-extra-2.0.4/docs/source/ 0000775 0000000 0000000 00000000000 14175513017 0020200 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/docs/source/annotations.rst 0000664 0000000 0000000 00000001264 14175513017 0023272 0 ustar 00root root 0000000 0000000 .. include:: ./snippets/postgres_doc_links.rst
.. include:: ./snippets/manager_model_warning.rst
.. _annotations_page:
Annotations
===========
Renaming annotations
--------------------
Django does allow you to create an annotation that conflicts with a field on the model. :meth:`psqlextra.query.QuerySet.rename_annotation` makes it possible to do just that.
.. code-block:: python
from psqlextra.models import PostgresModel
from django.db.models import Upper
class MyModel(PostgresModel):
name = models.TextField()
MyModel.objects.annotate(name=Upper('name'))
# OR
MyModel.objects.annotate(name_upper=Upper('name')).rename_annotations(name='name_upper')
django-postgres-extra-2.0.4/docs/source/api_reference.rst 0000664 0000000 0000000 00000001533 14175513017 0023523 0 ustar 00root root 0000000 0000000 API Reference
-------------
.. automodule:: psqlextra.manager
.. autoclass:: PostgresManager
:members:
.. automodule:: psqlextra.query
.. autoclass:: PostgresQuerySet
:members:
:exclude-members: annotate, rename_annotations
.. automodule:: psqlextra.models
:members:
.. automodule:: psqlextra.fields
.. autoclass:: HStoreField
:members:
:exclude-members: deconstruct, get_prep_value
.. automethod:: __init__
.. automodule:: psqlextra.expressions
.. autoclass:: HStoreRef
.. autoclass:: DateTimeEpoch
.. autoclass:: ExcludedCol
.. automodule:: psqlextra.indexes
.. autoclass:: UniqueIndex
.. autoclass:: ConditionalUniqueIndex
.. autoclass:: CaseInsensitiveUniqueIndex
.. automodule:: psqlextra.types
:members:
:undoc-members:
.. automodule:: psqlextra.util
:members:
django-postgres-extra-2.0.4/docs/source/conf.py 0000664 0000000 0000000 00000001216 14175513017 0021477 0 ustar 00root root 0000000 0000000 import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019-2021, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/stable/", "https://docs.djangoproject.com/en/stable/_objects/"),
}
django-postgres-extra-2.0.4/docs/source/conflict_handling.rst 0000664 0000000 0000000 00000025164 14175513017 0024407 0 ustar 00root root 0000000 0000000 .. include:: ./snippets/postgres_doc_links.rst
.. include:: ./snippets/manager_model_warning.rst
.. _conflict_handling_page:
Conflict handling
=================
The :class:`~psqlextra.manager.PostgresManager` comes with full support for PostgreSQL's `ON CONFLICT`_ clause.
This is an extremely useful feature for doing concurrency safe inserts. Often, when you want to insert a row, you want to overwrite it already exists, or simply leave the existing data there. This would require a ``SELECT`` first and then possibly a ``INSERT``. Within those two queries, another process might make a change to the row.
The alternative of trying to insert, ignoring the error and then doing a ``UPDATE`` is also not good. That would result in a lot of write overhead (due to logging).
.. code-block:: python
from django.db import models
from psqlextra.models import PostgresModel
from psqlextra.query import ConflictAction
class MyModel(PostgresModel):
myfield = models.CharField(max_length=255, unique=True)
# insert or update if already exists, then fetch, all in a single query
obj2 = (
MyModel.objects
.on_conflict(['myfield'], ConflictAction.UPDATE)
.insert_and_get(myfield='beer')
)
# insert, or do nothing if it already exists, then fetch
obj1 = (
MyModel.objects
.on_conflict(['myfield'], ConflictAction.NOTHING)
.insert_and_get(myfield='beer')
)
# insert or update if already exists, then fetch only the primary key
id = (
MyModel.objects
.on_conflict(['myfield'], ConflictAction.UPDATE)
.insert(myfield='beer')
)
.. warning::
The standard Django methods for inserting/updating are not affected by :meth:`~psqlextra.query.PostgresQuerySet.on_conflict`. It was a conscious decision to not override or change their behavior. The following completely ignores the :meth:`~psqlextra.query.PostgresQuerySet.on_conflict`:
.. code-block:: python
obj = (
MyModel.objects
.on_conflict(['first_name', 'last_name'], ConflictAction.UPDATE)
.create(first_name='Henk', last_name='Jansen')
)
The same applies to methods such as :meth:`~django:django.db.models.query.QuerySet.update`, :meth:`~django:django.db.models.query.QuerySet.get_or_create` or :meth:`~django:django.db.models.query.QuerySet.update_or_create` etc.
Constraint specification
------------------------
The :meth:`~psqlextra.query.PostgresQuerySet.on_conflict` function's first parameter denotes the name of the column(s) in which the conflict might occur. Although you can specify multiple columns, these columns must somehow have a single constraint. For example, in case of a :attr:`~django:django.db.models.Options.unique_together` constraint.
Multiple columns
****************
Specifying multiple columns is necessary in case of a constraint that spans multiple columns, such as when using Django's :attr:`~django:django.db.models.Options.unique_together`.
.. code-block:: python
from django.db import models
from psqlextra.models import PostgresModel
class MyModel(PostgresModel)
class Meta:
unique_together = ('first_name', 'last_name',)
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
obj = (
MyModel.objects
.on_conflict(['first_name', 'last_name'], ConflictAction.UPDATE)
.insert_and_get(first_name='Henk', last_name='Jansen')
)
HStore keys
***********
Catching conflicts in columns with a ``UNIQUE`` constraint on a :class:`~psqlextra.fields.HStoreField` key is also supported:
.. code-block:: python
from django.db import models
from psqlextra.models import PostgresModel
from psqlextra.fields import HStoreField
class MyModel(PostgresModel)
name = HStoreField(uniqueness=['en'])
id = (
MyModel.objects
.on_conflict([('name', 'en')], ConflictAction.NOTHING)
.insert(name={'en': 'Swen'})
)
This also applies to "unique together" constraints in a :class:`~psqlextra.fields.HStoreField` field:
.. code-block:: python
class MyModel(PostgresModel)
name = HStoreField(uniqueness=[('en', 'ar')])
id = (
MyModel.objects
.on_conflict([('name', 'en'), ('name', 'ar')], ConflictAction.NOTHING)
.insert(name={'en': 'Swen'})
)
insert vs insert_and_get
------------------------
After specifying :meth:`~psqlextra.query.PostgresQuerySet.on_conflict` you can use either :meth:`~psqlextra.query.PostgresQuerySet.insert` or :meth:`~psqlextra.query.PostgresQuerySet.insert_and_get` to perform the insert.
Conflict actions
----------------
There's currently two actions that can be taken when encountering a conflict. The second parameter of :meth:`~psqlextra.query.PostgresQuerySet.on_conflict` allows you to specify that should happen.
ConflictAction.UPDATE
*********************
:attr:`psqlextra.types.ConflictAction.UPDATE`
* If the row does **not exist**, insert a new one.
* If the row **exists**, update it.
This is also known as a "upsert".
Condition
"""""""""
Optionally, a condition can be added. PostgreSQL will then only apply the update if the condition holds true. A condition is specified as a custom expression.
A row level lock is acquired before evaluating the condition and proceeding with the update.
.. note::
The update condition is translated as a condition for `ON CONFLICT`_. The PostgreSQL documentation states the following:
An expression that returns a value of type boolean. Only rows for which this expression returns true will be updated, although all rows will be locked when the ON CONFLICT DO UPDATE action is taken. Note that condition is evaluated last, after a conflict has been identified as a candidate to update.
.. code-block:: python
from psqlextra.expressions import CombinedExpression, ExcludedCol
pk = (
MyModel
.objects
.on_conflict(
['name'],
ConflictAction.UPDATE,
update_condition=CombinedExpression(
MyModel._meta.get_field('priority').get_col(MyModel._meta.db_table),
'>',
ExcludedCol('priority'),
)
)
.insert(
name='henk',
priority=1,
)
)
if pk:
print('update applied or inserted')
else:
print('condition was false-ish and no changes were made')
When writing expressions, refer to the data you're trying to upsert with the :class:`psqlextra.expressions.ExcludedCol` expression.
Alternatively, with Django 3.1 or newer, :class:`~django:django.db.models.Q` objects can be used instead:
.. code-block:: python
from django.db.models import Q
from psqlextra.expressions import ExcludedCol
Q(name=ExcludedCol('name'))
Q(name__isnull=True)
Q(name__gt=ExcludedCol('priority'))
ConflictAction.NOTHING
**********************
:attr:`psqlextra.types.ConflictAction.NOTHING`
* If the row does **not exist**, insert a new one.
* If the row **exists**, do nothing.
This is preferable when the data you're about to insert is the same as the one that already exists. This is more performant because it avoids a write in case the row already exists.
.. warning::
When using :attr:`~psqlextra.types.ConflictAction.NOTHING`, PostgreSQL only returns the row(s) that were created. Conflicting rows are not returned. See example below:
.. code-block:: python
# obj1 is _not_ none
obj1 = MyModel.objects.on_conflict(['name'], ConflictAction.NOTHING).insert(name="me")
# obj2 is none! object alreaddy exists
obj2 = MyModel.objects.on_conflict(['name'], ConflictAction.NOTHING).insert(name="me")
This applies to both :meth:`~psqlextra.query.PostgresQuerySet.insert` and :meth:`~psqlextra.query.PostgresQuerySet.bulk_insert`
Bulk
----
:meth:`~psqlextra.query.PostgresQuerySet.bulk_insert` allows your to use conflict resolution for bulk inserts:
.. code-block:: python
from django.db import models
from psqlextra.models import PostgresModel
class MyModel(PostgresModel):
name = models.CharField(max_length=255, unique=True)
obj = (
MyModel.objects
.on_conflict(['name'], ConflictAction.UPDATE)
.bulk_insert([
dict(name='swen'),
dict(name='henk'),
dict(name='adela')
])
)
:meth:`~psqlextra.query.PostgresQuerySet.bulk_insert` uses a single query to insert all specified rows at once. It returns a ``list`` of ``dict`` with each ``dict`` being a merge of the ``dict`` passed in along with any index returned from Postgres.
.. note::
In order to stick to the "everything in one query" principle, various, more advanced usages of :meth:`~psqlextra.query.PostgresQuerySet.bulk_insert` are impossible. It is not possible to have different rows specify different amounts of columns. The following example does **not work**:
.. code-block:: python
from django.db import models
from psqlextra.models import PostgresModel
class MyModel(PostgresModel):
first_name = models.CharField(max_length=255, unique=True)
last_name = models.CharField(max_length=255, default='kooij')
obj = (
MyModel.objects
.on_conflict(['name'], ConflictAction.UPDATE)
.bulk_insert([
dict(name='swen'),
dict(name='henk', last_name='poepjes'), # invalid, different column configuration
dict(name='adela')
])
)
An exception is thrown if this behavior is detected.
Shorthands
----------
The :meth:`~psqlextra.query.PostgresQuerySet.on_conflict`, :meth:`~psqlextra.query.PostgresQuerySet.insert` and :meth:`~psqlextra.query.PostgresQuerySet.insert_or_create` methods were only added in v1.6. Before that, only :attr:`~psqlextra.types.ConflictAction.UPDATE` was supported in the following form:
.. code-block:: python
from django.db import models
from psqlextra.models import PostgresModel
class MyModel(PostgresModel):
myfield = models.CharField(max_length=255, unique=True)
obj = (
MyModel.objects
.upsert_and_get(
conflict_target=['myfield']
fields=dict(myfield='beer')
)
)
id = (
MyModel.objects
.upsert(
conflict_target=['myfield']
fields=dict(myfield='beer')
)
)
(
MyModel.objects
.bulk_upsert(
conflict_target=['myfield']
rows=[
dict(myfield='beer'),
dict(myfield='wine')
]
)
)
These two short hands still exist and **are not** deprecated. They behave exactly the same as :attr:`~psqlextra.types.ConflictAction.UPDATE` and are there for convenience. It is up to you to decide what to use.
django-postgres-extra-2.0.4/docs/source/deletion.rst 0000664 0000000 0000000 00000003217 14175513017 0022540 0 ustar 00root root 0000000 0000000 .. include:: ./snippets/postgres_doc_links.rst
Deletion
========
.. _truncate_page:
Truncate
--------
In standard Django, deleting all records in a table is quite slow and cumbersome. It requires retrieving all rows from the database and deleting them one by one (unless you use bulk delete). Postgres has a standard statement for emptying out a table: `TRUNCATE TABLE`_.
Using the :meth:`~psqlextra.manager.PostgresManager.truncate` method on the :class:`~psqlextra.manager.PostgresManager` allows you to delete all records in a table in the blink of an eye:
.. code-block:: python
from django.db import models
from psqlextra.models import PostgresModel
class MyModel(PostgresModel):
myfield = models.CharField(max_length=255, unique=True)
MyModel.objects.create(myfield="1")
MyModel.objects.truncate() # table is empty after this
print(MyModel.objects.count()) # zero records left
Cascade
*******
By default, Postgres will raise an error if any other table is referencing one of the rows you're trying to delete. One can tell Postgres to cascade the truncate operation to all related rows.
.. code-block:: python
from django.db import models
from psqlextra.models import PostgresModel
class MyModel1(PostgresModel):
myfield = models.CharField(max_length=255, unique=True)
class MyModel2(PostgresModel):
mymodel1 = models.ForeignKey(Model1, on_delete=models.CASCADE)
obj1 = MyModel1.objects.create(myfield="1")
MyModel2.objects.create(mymodel1=obj1)
MyModel.objects.truncate(cascade=True)
print(MyModel1.objects.count()) # zero records left
print(MyModel2.objects.count()) # zero records left
django-postgres-extra-2.0.4/docs/source/expressions.rst 0000664 0000000 0000000 00000006146 14175513017 0023323 0 ustar 00root root 0000000 0000000 .. include:: ./snippets/postgres_doc_links.rst
.. include:: ./snippets/manager_model_warning.rst
.. _expressions_page:
Expressions
===========
Selecting an individual HStore key
----------------------------------
Use the :class:`~psqlextra.expressions.HStoreRef` expression to select an indvidiual `hstore`_ key:
.. code-block:: python
from psqlextra.models import PostgresModel
from psqlextra.fields import HStoreField
from psqlextra.expressions import HStoreRef
class MyModel(PostgresModel):
bla = HStoreField()
MyModel.objects.create(bla={'a': '1', 'b': '2'})
# '1'
a = (
MyModel.objects
.annotate(a=HStoreRef('bla', 'a'))
.values_list('a', flat=True)
.first()
)
Selecting a datetime as a UNIX epoch timestamp
----------------------------------------------
Use the :class:`~psqlextra.expressions.DateTimeEpoch` expression to select the value of a :class:`~django:django.db.models.DateTimeField` as a UNIX epoch timestamp.
.. code-block:: python
from psqlextra.models import PostgresModel
from psqlextra.fields import HStoreField
from psqlextra.expressions import DateTimeEpoch
class MyModel(PostgresModel):
datetime = DateTimeField(auto_now_add=True)
MyModel.objects.create()
timestamp = (
MyModel.objects
.annotate(timestamp=DateTimeEpoch('datetime'))
.values_list('timestamp', flat=True)
.first()
)
Multi-field coalesce
--------------------
Use the :class:`~psqlextra.expressions.IsNotNone` expression to perform something similar to a `coalesce`, but with multiple fields. The first non-null value encountered is selected.
.. code-block:: python
from psqlextra.models import PostgresModel
from psqlextra.fields import HStoreField
from psqlextra.expressions import IsNotNone
class MyModel(PostgresModel):
name_1 = models.TextField(null=True)
name_2 = models.TextField(null=True)
name_3 = models.TextField(null=True)
MyModel.objects.create(name_3='test')
# 'test'
name = (
MyModel.objects
.annotate(name=IsNotNone('name_1', 'name_2', 'name_3', default='buh'))
.values_list('name', flat=True)
.first()
)
# 'buh'
name = (
MyModel.objects
.annotate(name=IsNotNone('name_1', 'name_2', default='buh'))
.values_list('name', flat=True)
.first()
)
Excluded column
---------------
Use the :class:`~psqlextra.expressions.ExcludedCol` expression when performing an upsert using `ON CONFLICT`_ to refer to a column/field in the data is about to be upserted.
PostgreSQL keeps that data to be upserted in a special table named `EXCLUDED`. This expression is used to refer to a column in that table.
.. code-block:: python
from django.db.models import Q
from psqlextra.expressions import ExcludedCol
(
MyModel
.objects
.on_conflict(
['name'],
ConflictAction.UPDATE,
# translates to `priority > EXCLUDED.priority`
update_condition=Q(priority__gt=ExcludedCol('priority')),
)
.insert(
name='henk',
priority=1,
)
)
django-postgres-extra-2.0.4/docs/source/hstore.rst 0000664 0000000 0000000 00000004450 14175513017 0022241 0 ustar 00root root 0000000 0000000 .. include:: ./snippets/postgres_doc_links.rst
.. include:: ./snippets/manager_model_warning.rst
.. _hstore_page:
HStore
======
:class:`psqlextra.fields.HStoreField` is based on Django's :class:`~django:django.contrib.postgres.fields.HStoreField` and therefore supports everything Django does natively and more.
Constraints
-----------
Unique
******
The ``uniqueness`` constraint can be added on one or more `hstore`_ keys, similar to how a ``UNIQUE`` constraint can be added to a column. Setting this option causes unique indexes to be created on the specified keys.
You can specify a ``list`` of strings to specify the keys that must be marked as unique:
.. code-block:: python
from psqlextra.fields import HStoreField
from psqlextra.models import PostgresModel
class MyModel(PostgresModel):
myfield = HStoreField(uniqueness=['key1']
MyModel.objects.create(myfield={'key1': 'value1'})
MyModel.objects.create(myfield={'key1': 'value1'})
The second :meth:`~django:django.db.models.query.QuerySet.create` call will fail with a :class:`~django:django.db.IntegrityError` because there's already a row with ``key1=value1``.
Uniqueness can also be enforced "together", similar to Django's :attr:`~django:django.db.models.Options.unique_together` by specifying a tuple of fields rather than a single string:
.. code-block:: python
myfield = HStoreField(uniqueness=[('key1', 'key2'), 'key3'])
In the example above, ``key1`` and ``key2`` must unique **together**, and ``key3`` must unique on its own. By default, none of the keys are marked as "unique".
Required
********
The ``required`` option can be added to ensure that the specified `hstore`_ keys are set for every row. This is similar to a ``NOT NULL`` constraint on a column. You can specify a list of `hstore`_ keys that are required:
.. code-block:: python
from psqlextra.fields import HStoreField
from psqlextra.models import PostgresModel
class MyModel(PostgresModel):
myfield = HStoreField(required=['key1'])
mymodel.objects.create(myfield={'key1': none})
MyModel.objects.create(myfield={'key2': 'value1'})
Both calls to :meth:`~django:django.db.models.query.QuerySet.create` would fail in the example above since they do not provide a non-null value for ``key1``. By default, none of the keys are required.
django-postgres-extra-2.0.4/docs/source/index.rst 0000664 0000000 0000000 00000003650 14175513017 0022045 0 ustar 00root root 0000000 0000000 .. include:: ./snippets/postgres_doc_links.rst
Welcome
=======
``django-postgres-extra`` aims to make all of PostgreSQL's awesome features available through the Django ORM. We do this by taking care of all the hassle. As opposed to the many small packages that are available to try to bring a single feature to Django with minimal effort, ``django-postgres-extra`` goes the extra mile with well tested implementations, seamless migrations and much more.
By seamless, we mean that any features we add will work truly seamlessly. You should not have to manually modify your migrations to work with fields and objects provided by this package.
Features
--------
Explore the documentation to learn about all features:
* :ref:`Conflict handling `
Adds support for PostgreSQL's ``ON CONFLICT`` syntax for inserts. Supports for ``DO UPDATE`` and ``DO NOTHING``. In other words; single statement, atomic, concurrency safe upserts.
* :ref:`HStore `
Built on top Django's built-in support for `hstore`_ fields. Adds support for indices on keys and unique/required constraints. All of these features integrate well with Django's migrations sytem.
* :ref:`Partial unique index `
Partial (unique) index that only applies when a certain condition is true.
* :ref:`Case insensitive index `
Case insensitive index, allows searching a column and ignoring the casing.
* :ref:`Table partitioning `
Adds support for PostgreSQL 11.x declarative table partitioning.
* :ref:`Truncating tables `
Support for ``TRUNCATE TABLE`` statements (including cascading).
.. toctree::
:maxdepth: 2
:caption: Overview
installation
managers_models
hstore
indexes
conflict_handling
deletion
table_partitioning
expressions
annotations
settings
api_reference
major_releases
django-postgres-extra-2.0.4/docs/source/indexes.rst 0000664 0000000 0000000 00000005476 14175513017 0022405 0 ustar 00root root 0000000 0000000 .. _indexes_page:
Indexes
=======
.. _unique_index_page:
Unique Index
-----------------------------
The :class:`~psqlextra.indexes.UniqueIndex` lets you create a unique index. Normally Django only allows you to create unique indexes by specifying ``unique=True`` on the model field.
Although it can be used on any Django model, it is most useful on views and materialized views where ``unique=True`` does not work.
.. code-block:: python
from django.db import models
from psqlextra.indexes import UniqueIndex
class Model(models.Model):
class Meta:
indexes = [
UniqueIndex(fields=['name']),
]
name = models.CharField(max_length=255)
Model.objects.create(name='henk')
Model.objects.create(name='henk') # raises IntegrityError
.. _conditional_unique_index_page:
Conditional Unique Index
------------------------
The :class:`~psqlextra.indexes.ConditionalUniqueIndex` lets you create partial unique indexes in case you ever need :attr:`~django:django.db.models.Options.unique_together` constraints
on nullable columns.
.. warning::
In Django 3.1 or newer, you might want to use :attr:`~django.db.models.indexes.condition` instead.
Before:
.. code-block:: python
from django.db import models
class Model(models.Model):
class Meta:
unique_together = ['a', 'b']
a = models.ForeignKey('some_model', null=True)
b = models.ForeignKey('some_other_model')
# Works like a charm!
b = B()
Model.objects.create(a=None, b=b)
Model.objects.create(a=None, b=b)
After:
.. code-block:: python
from django.db import models
from psqlextra.indexes import ConditionalUniqueIndex
class Model(models.Model):
class Meta:
indexes = [
ConditionalUniqueIndex(fields=['a', 'b'], condition='"a" IS NOT NULL'),
ConditionalUniqueIndex(fields=['b'], condition='"a" IS NULL')
]
a = models.ForeignKey('some_model', null=True)
b = models.ForeignKey('some_other_model')
# Integrity Error!
b = B()
Model.objects.create(a=None, b=b)
Model.objects.create(a=None, b=b)
.. _case_insensitive_unique_index_page:
Case Insensitive Unique Index
-----------------------------
The :class:`~psqlextra.indexes.CaseInsensitiveUniqueIndex` lets you create an index that ignores the casing for the specified field(s).
This makes the field(s) behave more like a text field in MySQL.
.. code-block:: python
from django.db import models
from psqlextra.indexes import CaseInsensitiveUniqueIndex
class Model(models.Model):
class Meta:
indexes = [
CaseInsensitiveUniqueIndex(fields=['name']),
]
name = models.CharField(max_length=255)
Model.objects.create(name='henk')
Model.objects.create(name='Henk') # raises IntegrityError
django-postgres-extra-2.0.4/docs/source/installation.rst 0000664 0000000 0000000 00000001367 14175513017 0023442 0 ustar 00root root 0000000 0000000 .. _installation:
Installation
============
1. Install the package from PyPi:
.. code-block:: bash
$ pip install django-postgres-extra
2. Add ``django.contrib.postgres`` and ``psqlextra`` to your ``INSTALLED_APPS``:
.. code-block:: python
INSTALLED_APPS = [
...
"django.contrib.postgres",
"psqlextra",
]
3. Set the database engine to ``psqlextra.backend``:
.. code-block:: python
DATABASES = {
"default": {
...
"ENGINE": "psqlextra.backend",
},
}
.. note::
Already using a custom back-end? Set :ref:`POSTGRES_EXTRA_DB_BACKEND_BASE ` to your custom back-end.
django-postgres-extra-2.0.4/docs/source/major_releases.rst 0000664 0000000 0000000 00000004617 14175513017 0023735 0 ustar 00root root 0000000 0000000 Major releases
==============
1.x
---
* First release.
2.x
---
New features
************
* Support for PostgreSQL 11.x declarative table partitioning.
* Support for ``TRUNCATE TABLE``
* Case insensitive index
Other changes
*************
* Uses Django 2.x's mechanism for overriding queries and compilers. ``django-postgres-extra`` is extensible in the same way that Django is extensible now.
* Removes hacks because Django 2.x is more extensible.
Breaking changes
****************
* Removes support for ``psqlextra.signals``. Switch to standard Django signals.
* Inserts with ``ConflictAction.NOTHING`` only returns new rows. Conflicting rows are not returned.
* Drop support for Python 3.5.
* Drop support for Django 1.x.
* Removes ``psqlextra.expressions.Min``, ``psqlextra.expressions.Max``, these are natively supported by Django.
FAQ
***
1. Why was ``psqlextra.signals`` removed?
In order to make ``psqlextra.signals.update`` work, ``django-postgres-extra`` hooked into Django's :meth:`django:django.db.models.query.QuerySet.update` method to add a ``RETURNING id`` clause to the statement. This slowed down all update queries, even if no signal handler was registered. To fix the performance impact, a breaking change was needed.
The feature had little to do with PostgreSQL itself. This package focuses on making PostgreSQL specific features available in Django.
Signals being a rarely used feature that slows down unrelated queries was enough motivation to permanently remove it.
2. Why are inserts with ``ConflictAction.NOTHING`` not returning conflicting rows anymore?
This is standard PostgresQL behavior. ``django-postgres-extra`` v1.x tried to working around this by doing a void ``ON CONFLICT UPDATE``. This trick only worked when inserting one row.
The work-around had a significant performance impact and was confusing when performing bulk inserts. In that case, only one row would be returned.
To avoid further confusion, ``ConflictAction.NOTHING`` now follows standard PostgresQL behavior.
3. Why was support dropped for Python 3.5?
Python 3.6 added support for dataclasses.
4. Why was support dropped for Django 1.x?
Mainstream support for the last Django 1.11 was dropped in December 2017. Supporting both Django 1.x and Django 2.x was a major pain point. Dropping support for 1.x simplifies ``django-postgres-extra`` and speeds up the development of new features.
django-postgres-extra-2.0.4/docs/source/managers_models.rst 0000664 0000000 0000000 00000003762 14175513017 0024102 0 ustar 00root root 0000000 0000000 .. _managers_models:
Managers & Models
=================
:class:`~psqlextra.manager.PostgresManager` exposes a lot of functionality. Your model must use this manager in order to use most of this package's functionality.
There are four ways to do this:
* Inherit your model from :class:`psqlextra.models.PostgresModel`:
.. code-block:: python
from psqlextra.models import PostgresModel
class MyModel(PostgresModel):
myfield = models.CharField(max_length=255)
* Override default manager with :class:`psqlextra.manager.PostgresManager`:
.. code-block:: python
from django.db import models
from psqlextra.manager import PostgresManager
class MyModel(models.Model):
# override default django manager
objects = PostgresManager()
myfield = models.CharField(max_length=255)
* Provide :class:`psqlextra.manager.PostgresManager` as a custom manager:
.. code-block:: python
from django.db import models
from psqlextra.manager import PostgresManager
class MyModel(models.Model):
# custom mananger name
beer = PostgresManager()
myfield = models.CharField(max_length=255)
# use like this:
MyModel.beer.upsert(..)
# not like this:
MyModel.objects.upsert(..) # error!
* Use the :meth:`psqlextra.util.postgres_manager` on the fly:
This allows the manager to be used **anywhere** on **any** model, but only within the context. This is especially useful if you want to do upserts into Django's :class:`~django:django.db.models.ManyToManyField` generated :attr:`~django:django.db.models.ManyToManyField.through` table:
.. code-block:: python
from django.db import models
from psqlextra.util import postgres_manager
class MyModel(models.Model):
myself = models.ManyToManyField('self')
# within the context, you can access psqlextra features
with postgres_manager(MyModel.myself.through) as manager:
manager.upsert(...)
django-postgres-extra-2.0.4/docs/source/settings.rst 0000664 0000000 0000000 00000002406 14175513017 0022574 0 ustar 00root root 0000000 0000000 .. _settings:
Settings
========
.. _POSTGRES_EXTRA_DB_BACKEND_BASE:
* ``POSTGRES_EXTRA_DB_BACKEND_BASE``
``DATABASES[db_name]['ENGINE']`` must be set to ``"psqlextra.backend"``. If you're already using a custom back-end, set ``POSTGRES_EXTRA_DB_BACKEND_BASE`` to your custom back-end. This will instruct ``django-postgres-extra`` to wrap the back-end you specified.
A good example of where this might be need is if you are using the PostGIS back-end: ``django.contrib.db.backends.postgis``.
**Default value**: ``django.db.backends.postgresql``
.. warning::
The custom back-end you specify must derive from the standard ``django.db.backends.postgresql``.
.. _POSTGRES_EXTRA_AUTO_EXTENSION_SET_UP:
* ``POSTGRES_EXTRA_AUTO_EXTENSION_SET_UP``
You can stop ``django-postgres-extra`` from automatically trying to enable the ``hstore`` extension on your database. Enabling extensions using ``CREATE EXTENSION`` requires superuser permissions. Disable this behaviour if you are not connecting to your database server using a superuser.
**Default value:** ``True``
.. note::
If set to ``False``, you must ensure that the ``hstore`` extension is enabled on your database manually. If not enabled, any ``hstore`` related functionality will not work.
django-postgres-extra-2.0.4/docs/source/snippets/ 0000775 0000000 0000000 00000000000 14175513017 0022045 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/docs/source/snippets/manager_model_warning.rst 0000664 0000000 0000000 00000000376 14175513017 0027124 0 ustar 00root root 0000000 0000000 .. warning::
In order for any of the features described below to work, you must use the :class:`~psqlextra.manager.PostgresManager` or inherit your models from :class:`~psqlextra.models.PostgresModel`. Read more about this in :ref:`managers_models`.
django-postgres-extra-2.0.4/docs/source/snippets/postgres_doc_links.rst 0000664 0000000 0000000 00000000543 14175513017 0026474 0 ustar 00root root 0000000 0000000 .. _ON CONFLICT: https://www.postgresql.org/docs/11/sql-insert.html#SQL-ON-CONFLICT
.. _TRUNCATE TABLE: https://www.postgresql.org/docs/9.1/sql-truncate.html
.. _hstore: https://www.postgresql.org/docs/11/hstore.html
.. _PostgreSQL Declarative Table Partitioning: https://www.postgresql.org/docs/current/ddl-partitioning.html#DDL-PARTITIONING-DECLARATIVE
django-postgres-extra-2.0.4/docs/source/table_partitioning.rst 0000664 0000000 0000000 00000027273 14175513017 0024623 0 ustar 00root root 0000000 0000000 .. include:: ./snippets/postgres_doc_links.rst
.. warning::
Table partitioning is a relatively new and advanded PostgreSQL feature. It has plenty of ways to shoot yourself in the foot with.
We HIGHLY RECOMMEND you only use this feature if you're already deeply familiar with table partitioning and aware of its advantages and disadvantages.
Do study the PostgreSQL documentation carefully.
.. _table_partitioning_page:
Table partitioning
==================
:class:`~psqlextra.models.PostgresPartitionedModel` adds support for `PostgreSQL Declarative Table Partitioning`_.
The following partitioning methods are available:
* ``PARTITION BY RANGE``
* ``PARTITION BY LIST``
* ``PARTITION BY HASH``
.. note::
Although table partitioning is available in PostgreSQL 10.x, it is highly recommended you use PostgresSQL 11.x. Table partitioning got a major upgrade in PostgreSQL 11.x.
PostgreSQL 10.x does not support creating foreign keys to/from partitioned tables and does not automatically create an index across all partitions.
Creating partitioned tables
---------------------------
Partitioned tables are declared like regular Django models with a special base class and two extra options to set the partitioning method and key. Once declared, they behave like regular Django models.
Declaring the model
*******************
Inherit your model from :class:`psqlextra.models.PostgresPartitionedModel` and declare a child class named ``PartitioningMeta``. On the meta class, specify the partitioning method and key.
* Use :attr:`psqlextra.types.PostgresPartitioningMethod.RANGE` to ``PARTITION BY RANGE``
* Use :attr:`psqlextra.types.PostgresPartitioningMethod.LIST` to ``PARTITION BY LIST``
* Use :attr:`psqlextra.types.PostgresPartitioningMethod.HASH` to ``PARTITION BY HASH``
.. code-block:: python
from django.db import models
from psqlextra.types import PostgresPartitioningMethod
from psqlextra.models import PostgresPartitionedModel
class MyModel(PostgresPartitionedModel):
class PartitioningMeta:
method = PostgresPartitioningMethod.RANGE
key = ["timestamp"]
name = models.TextField()
timestamp = models.DateTimeField()
Generating a migration
**********************
Run the following command to automatically generate a migration:
.. code-block:: bash
python manage.py pgmakemigrations
This will generate a migration that creates the partitioned table with a default partition.
.. warning::
Always use ``python manage.py pgmakemigrations`` for partitioned models.
The model must be created by the :class:`~psqlextra.backend.migrations.operations.PostgresCreatePartitionedModel` operation.
Do not use the standard ``python manage.py makemigrations`` command for partitioned models. Django will issue a standard :class:`~django:django.db.migrations.operations.CreateModel` operation. Doing this will not create a partitioned table and all subsequent operations will fail.
Adding/removing partitions manually
-----------------------------------
Postgres does not have support for automatically creating new partitions as needed. Therefore, one must manually add new partitions. Depending on the partitioning method you have chosen, the partition has to be created differently.
Partitions are tables. Each partition must be given a unique name. :class:`~psqlextra.models.PostgresPartitionedModel` does not require you to create a model for each partition because you are not supposed to query partitions directly.
Adding a range partition
~~~~~~~~~~~~~~~~~~~~~~~~
Use the :class:`~psqlextra.backend.migrations.operations.PostgresAddRangePartition` operation to add a new range partition. Only use this operation when your partitioned model uses the :attr:`psqlextra.types.PostgresPartitioningMethod.RANGE`.
.. code-block:: python
from django.db import migrations, models
from psqlextra.backend.migrations.operations import PostgresAddRangePartition
class Migration(migrations.Migration):
operations = [
PostgresAddRangePartition(
model_name="mypartitionedmodel",
name="pt1",
from_values="2019-01-01",
to_values="2019-02-01",
),
]
Adding a list partition
~~~~~~~~~~~~~~~~~~~~~~~
Use the :class:`~psqlextra.backend.migrations.operations.PostgresAddListPartition` operation to add a new list partition. Only use this operation when your partitioned model uses the :attr:`psqlextra.types.PostgresPartitioningMethod.LIST`.
.. code-block:: python
from django.db import migrations, models
from psqlextra.backend.migrations.operations import PostgresAddListPartition
class Migration(migrations.Migration):
operations = [
PostgresAddListPartition(
model_name="mypartitionedmodel",
name="pt1",
values=["car", "boat"],
),
]
Adding a default partition
~~~~~~~~~~~~~~~~~~~~~~~~~~
Use the :class:`~psqlextra.backend.migrations.operations.PostgresAddDefaultPartition` operation to add a new default partition. A default partition is the partition where records get saved that couldn't fit in any other partition.
Note that you can only have one default partition per partitioned table/model.
.. code-block:: python
from django.db import migrations, models
from psqlextra.backend.migrations.operations import PostgresAddDefaultPartition
class Migration(migrations.Migration):
operations = [
PostgresAddDefaultPartition(
model_name="mypartitionedmodel",
name="default",
),
]
Deleting a default partition
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Use the :class:`~psqlextra.backend.migrations.operations.PostgresDeleteDefaultPartition` operation to delete an existing default partition.
.. code-block:: python
from django.db import migrations, models
from psqlextra.backend.migrations.operations import PostgresDeleteDefaultPartition
class Migration(migrations.Migration):
operations = [
PostgresDeleteDefaultPartition(
model_name="mypartitionedmodel",
name="pt1",
),
]
Deleting a range partition
~~~~~~~~~~~~~~~~~~~~~~~~~~
Use the :class:`psqlextra.backend.migrations.operations.PostgresDeleteRangePartition` operation to delete an existing range partition.
.. code-block:: python
from django.db import migrations, models
from psqlextra.backend.migrations.operations import PostgresDeleteRangePartition
class Migration(migrations.Migration):
operations = [
PostgresDeleteRangePartition(
model_name="mypartitionedmodel",
name="pt1",
),
]
Deleting a list partition
~~~~~~~~~~~~~~~~~~~~~~~~~
Use the :class:`~psqlextra.backend.migrations.operations.PostgresDeleteListPartition` operation to delete an existing list partition.
.. code-block:: python
from django.db import migrations, models
from psqlextra.backend.migrations.operations import PostgresDeleteListPartition
class Migration(migrations.Migration):
operations = [
PostgresDeleteListPartition(
model_name="mypartitionedmodel",
name="pt1",
),
]
Using the schema editor
***********************
Use the :class:`psqlextra.backend.PostgresSchemaEditor` to manage partitions directly in a more imperative fashion. The schema editor is used by the migration operations described above.
Adding a range partition
~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
from django.db import connection
connection.schema_editor().add_range_partition(
model=MyPartitionedModel,
name="pt1",
from_values="2019-01-01",
to_values="2019-02-01",
)
Adding a list partition
~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
from django.db import connection
connection.schema_editor().add_list_partition(
model=MyPartitionedModel,
name="pt1",
values=["car", "boat"],
)
Adding a default partition
~~~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
from django.db import connection
connection.schema_editor().add_default_partition(
model=MyPartitionedModel,
name="default",
)
Deleting a partition
~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
from django.db import connection
connection.schema_editor().delete_partition(
model=MyPartitionedModel,
name="default",
)
Adding/removing partitions automatically
----------------------------------------
:class:`psqlextra.partitioning.PostgresPartitioningManager` an experimental helper class that can be called periodically to automatically create new partitions if you're using range partitioning.
.. note::
There is currently no scheduler or command to automatically create new partitions. You'll have to run this function in your own cron jobs.
The auto partitioner supports automatically creating yearly, monthly, weekly or daily partitions. Use the ``count`` parameter to configure how many partitions it should create ahead.
Partitioning strategies
***********************
Time-based partitioning
~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
from dateutil.relativedelta import relativedelta
from psqlextra.partitioning import (
PostgresPartitioningManager,
PostgresCurrentTimePartitioningStrategy,
PostgresTimePartitionSize,
partition_by_current_time,
)
manager = PostgresPartitioningManager([
# 3 partitions ahead, each partition is one month
# delete partitions older than 6 months
# partitions will be named `[table_name]_[year]_[3-letter month name]`.
PostgresPartitioningConfig(
model=MyPartitionedModel,
strategy=PostgresCurrentTimePartitioningStrategy(
size=PostgresTimePartitionSize(months=1),
count=3,
max_age=relativedelta(months=6),
),
),
# 6 partitions ahead, each partition is two weeks
# delete partitions older than 8 months
# partitions will be named `[table_name]_[year]_week_[week number]`.
PostgresPartitioningConfig(
model=MyPartitionedModel,
strategy=PostgresCurrentTimePartitioningStrategy(
size=PostgresTimePartitionSize(weeks=2),
count=6,
max_age=relativedelta(months=8),
),
),
# 12 partitions ahead, each partition is 5 days
# old partitions are never deleted, `max_age` is not set
# partitions will be named `[table_name]_[year]_[month]_[month day number]`.
PostgresPartitioningConfig(
model=MyPartitionedModel,
strategy=PostgresCurrentTimePartitioningStrategy(
size=PostgresTimePartitionSize(wdyas=5),
count=12,
),
),
])
# these are the default arguments
partioning_plan = manager.plan(
skip_create=False,
skip_delete=False,
using='default'
)
# prints a list of partitions to be created/deleted
partitioning_plan.print()
# apply the plan
partitioning_plan.apply(using='default');
Custom strategy
~~~~~~~~~~~~~~~
You can create a custom partitioning strategy by implementing the :class:`psqlextra.partitioning.PostgresPartitioningStrategy` interface.
You can look at :class:`psqlextra.partitioning.PostgresCurrentTimePartitioningStrategy` as an example.
Switching partitioning strategies
*********************************
When switching partitioning strategies, you might encounter the problem that partitions for part of a particular range already exist. In order to combat this, you can use the :class:`psqlextra.partitioning.PostgresTimePartitioningStrategy` and specify the `start_datetime` parameter. As a result, no partitions will be created before the given date/time.
django-postgres-extra-2.0.4/manage.py 0000664 0000000 0000000 00000000407 14175513017 0017553 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'settings'
)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
django-postgres-extra-2.0.4/psqlextra/ 0000775 0000000 0000000 00000000000 14175513017 0017773 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/__init__.py 0000664 0000000 0000000 00000000200 14175513017 0022074 0 ustar 00root root 0000000 0000000 import django
if django.VERSION < (3, 2): # pragma: no cover
default_app_config = "psqlextra.apps.PostgresExtraAppConfig"
django-postgres-extra-2.0.4/psqlextra/apps.py 0000664 0000000 0000000 00000000212 14175513017 0021303 0 ustar 00root root 0000000 0000000 from django.apps import AppConfig
class PostgresExtraAppConfig(AppConfig):
name = "psqlextra"
verbose_name = "PostgreSQL Extra"
django-postgres-extra-2.0.4/psqlextra/backend/ 0000775 0000000 0000000 00000000000 14175513017 0021362 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/backend/__init__.py 0000664 0000000 0000000 00000000000 14175513017 0023461 0 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/backend/base.py 0000664 0000000 0000000 00000003102 14175513017 0022642 0 ustar 00root root 0000000 0000000 import logging
from django.conf import settings
from django.db import ProgrammingError
from . import base_impl
from .introspection import PostgresIntrospection
from .operations import PostgresOperations
from .schema import PostgresSchemaEditor
logger = logging.getLogger(__name__)
class DatabaseWrapper(base_impl.backend()):
"""Wraps the standard PostgreSQL database back-end.
Overrides the schema editor with our custom schema editor and makes
sure the `hstore` extension is enabled.
"""
SchemaEditorClass = PostgresSchemaEditor
introspection_class = PostgresIntrospection
ops_class = PostgresOperations
def prepare_database(self):
"""Ran to prepare the configured database.
This is where we enable the `hstore` extension if it wasn't
enabled yet.
"""
super().prepare_database()
setup_ext = getattr(
settings, "POSTGRES_EXTRA_AUTO_EXTENSION_SET_UP", True
)
if not setup_ext:
return False
with self.cursor() as cursor:
try:
cursor.execute("CREATE EXTENSION IF NOT EXISTS hstore")
except ProgrammingError: # permission denied
logger.warning(
'Failed to create "hstore" extension. '
"Tables with hstore columns may fail to migrate. "
"If hstore is needed, make sure you are connected "
"to the database as a superuser "
"or add the extension manually.",
exc_info=True,
)
django-postgres-extra-2.0.4/psqlextra/backend/base_impl.py 0000664 0000000 0000000 00000004350 14175513017 0023671 0 ustar 00root root 0000000 0000000 import importlib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.postgresql.base import ( # isort:skip
DatabaseWrapper as Psycopg2DatabaseWrapper,
)
def backend():
"""Gets the base class for the custom database back-end.
This should be the Django PostgreSQL back-end. However,
some people are already using a custom back-end from
another package. We are nice people and expose an option
that allows them to configure the back-end we base upon.
As long as the specified base eventually also has
the PostgreSQL back-end as a base, then everything should
work as intended.
"""
base_class_name = getattr(
settings,
"POSTGRES_EXTRA_DB_BACKEND_BASE",
"django.db.backends.postgresql",
)
base_class_module = importlib.import_module(base_class_name + ".base")
base_class = getattr(base_class_module, "DatabaseWrapper", None)
if not base_class:
raise ImproperlyConfigured(
(
"'%s' is not a valid database back-end."
" The module does not define a DatabaseWrapper class."
" Check the value of POSTGRES_EXTRA_DB_BACKEND_BASE."
)
% base_class_name
)
if isinstance(base_class, Psycopg2DatabaseWrapper):
raise ImproperlyConfigured(
(
"'%s' is not a valid database back-end."
" It does inherit from the PostgreSQL back-end."
" Check the value of POSTGRES_EXTRA_DB_BACKEND_BASE."
)
% base_class_name
)
return base_class
def schema_editor():
"""Gets the base class for the schema editor.
We have to use the configured base back-end's schema editor for
this.
"""
return backend().SchemaEditorClass
def introspection():
"""Gets the base class for the introspection class.
We have to use the configured base back-end's introspection class
for this.
"""
return backend().introspection_class
def operations():
"""Gets the base class for the operations class.
We have to use the configured base back-end's operations class for
this.
"""
return backend().ops_class
django-postgres-extra-2.0.4/psqlextra/backend/introspection.py 0000664 0000000 0000000 00000013156 14175513017 0024642 0 ustar 00root root 0000000 0000000 from dataclasses import dataclass
from typing import List, Optional
from psqlextra.types import PostgresPartitioningMethod
from . import base_impl
PARTITIONING_STRATEGY_TO_METHOD = {
"r": PostgresPartitioningMethod.RANGE,
"l": PostgresPartitioningMethod.LIST,
"h": PostgresPartitioningMethod.HASH,
}
@dataclass
class PostgresIntrospectedPartitionTable:
"""Data container for information about a partition."""
name: str
full_name: str
comment: Optional[str]
@dataclass
class PostgresIntrospectedPartitonedTable:
"""Data container for information about a partitioned table."""
name: str
method: PostgresPartitioningMethod
key: List[str]
partitions: List[PostgresIntrospectedPartitionTable]
def partition_by_name(
self, name: str
) -> Optional[PostgresIntrospectedPartitionTable]:
"""Finds the partition with the specified name."""
return next(
(
partition
for partition in self.partitions
if partition.name == name
),
None,
)
class PostgresIntrospection(base_impl.introspection()):
"""Adds introspection features specific to PostgreSQL."""
def get_partitioned_tables(
self, cursor
) -> PostgresIntrospectedPartitonedTable:
"""Gets a list of partitioned tables."""
sql = """
SELECT
pg_class.relname,
pg_partitioned_table.partstrat
FROM
pg_partitioned_table
JOIN
pg_class
ON
pg_class.oid = pg_partitioned_table.partrelid
"""
cursor.execute(sql)
return [
PostgresIntrospectedPartitonedTable(
name=row[0],
method=PARTITIONING_STRATEGY_TO_METHOD[row[1]],
key=self.get_partition_key(cursor, row[0]),
partitions=self.get_partitions(cursor, row[0]),
)
for row in cursor.fetchall()
]
def get_partitioned_table(self, cursor, table_name: str):
"""Gets a single partitioned table."""
return next(
(
table
for table in self.get_partitioned_tables(cursor)
if table.name == table_name
),
None,
)
def get_partitions(
self, cursor, table_name
) -> List[PostgresIntrospectedPartitionTable]:
"""Gets a list of partitions belonging to the specified partitioned
table."""
sql = """
SELECT
child.relname,
pg_description.description
FROM pg_inherits
JOIN
pg_class parent
ON
pg_inherits.inhparent = parent.oid
JOIN
pg_class child
ON
pg_inherits.inhrelid = child.oid
JOIN
pg_namespace nmsp_parent
ON
nmsp_parent.oid = parent.relnamespace
JOIN
pg_namespace nmsp_child
ON
nmsp_child.oid = child.relnamespace
LEFT JOIN
pg_description
ON
pg_description.objoid = child.oid
WHERE
parent.relname = %s
"""
cursor.execute(sql, (table_name,))
return [
PostgresIntrospectedPartitionTable(
name=row[0].replace(f"{table_name}_", ""),
full_name=row[0],
comment=row[1] or None,
)
for row in cursor.fetchall()
]
def get_partition_key(self, cursor, table_name: str) -> List[str]:
"""Gets the partition key for the specified partitioned table.
Returns:
A list of column names that are part of the
partition key.
"""
sql = """
SELECT
col.column_name
FROM
(SELECT partrelid,
partnatts,
CASE partstrat
WHEN 'l' THEN 'list'
WHEN 'r' THEN 'range'
WHEN 'h' THEN 'hash'
END AS partition_strategy,
Unnest(partattrs) column_index
FROM pg_partitioned_table) pt
JOIN
pg_class par
ON par.oid = pt.partrelid
JOIN
information_schema.COLUMNS col
ON
col.table_schema = par.relnamespace :: regnamespace :: text
AND col.table_name = par.relname
AND ordinal_position = pt.column_index
WHERE
table_name = %s
"""
cursor.execute(sql, (table_name,))
return [row[0] for row in cursor.fetchall()]
def get_constraints(self, cursor, table_name: str):
"""Retrieve any constraints or keys (unique, pk, fk, check, index)
across one or more columns.
Also retrieve the definition of expression-based indexes.
"""
constraints = super().get_constraints(cursor, table_name)
# standard Django implementation does not return the definition
# for indexes, only for constraints, let's patch that up
cursor.execute(
"SELECT indexname, indexdef FROM pg_indexes WHERE tablename = %s",
(table_name,),
)
for index, definition in cursor.fetchall():
if constraints[index].get("definition") is None:
constraints[index]["definition"] = definition
return constraints
django-postgres-extra-2.0.4/psqlextra/backend/migrations/ 0000775 0000000 0000000 00000000000 14175513017 0023536 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/backend/migrations/README.md 0000664 0000000 0000000 00000006674 14175513017 0025032 0 ustar 00root root 0000000 0000000 ## What's up with the shady patch functions?
Django currently does not provide a way to extend certain classes that are used when auto-generating migrations using `makemigrations`. The patch functions use Python's standard mocking framework to direct certain functions to a custom implementation.
These patches allow `django-postgres-extra` to let Django auto-generate migrations for `PostgresPartitionedModel`, `PostgresViewModel` and `PostgresMaterializedView`.
None of the patches fundamentally change how Django work. They let Django do most of the work and only customize for Postgres specific models. All of the patches call the original implementation and then patch the results instead of copying the entire implementation.
### Using the patches
The patches are all context managers. The top level `postgres_patched_migrations` context manager applies all patches for the duration of the context.
This is used in the custom `pgmakemigrations` command to extend the migration autodetector for `PostgresPartitionedModel`, `PostgresViewModel` and `PostgresMaterializedView`.
### Patches
#### Autodetector patch
* Patches `django.db.migrations.autodetector.MigrationAutodetector.add_operation`
This function is called every time the autodetector adds a new operation. For example, if Django detects a new model, `add_operation` is called with a new `CreateModel` operation instance.
The patch hooks into the `add_operation` function to transform the following operations:
* `Createmodel` into a `PostgresCreatePartitionedModel` operation if the model is a `PostgresPartitionedModel` and adds a `PostgresAddDefaultPartition` operation to create a default partition.
* `DeleteModel` into a `PostgresDeletePartitionedModel` operation if the model is a `PostgresPartitionedModel`.
* `CreateModel` into a `PostgresCreateViewModel` operation if the model is a `PostgresViewModel`.
* `DeleteModel` into a `PostgresDeleteviewModel` operation if the model is a `PostgresViewModel`.
* `CreateModel` into a `PostgresCreateMaterializedViewModel` operation if the model is a `PostgresMaterializedViewModel`.
* `DeleteModel` into a `PostgresDeleteMaterializedViewModel` operation if the model is a `PostgresMaterializedViewModel`.
* `AddField` into `ApplyState` migration if the model is a `PostgresViewModel` or `PostgresMaterializedViewModel`.
* `AlterField` into `ApplyState` migration if the model is a `PostgresViewModel` or `PostgresMaterializedViewModel`.
* `RenameField` into `ApplyState` migration if the model is a `PostgresViewModel` or `PostgresMaterializedViewModel`.
* `RemoveField` into `ApplyState` migration if the model is a `PostgresViewModel` or `PostgresMaterializedViewModel`.
#### ProjectState patch
* Patches `django.db.migrations.state.ProjectState.from_apps`
This function is called to build up the current migration state from all the installed apps. For each model, a `ModelState` is created.
The patch hooks into the `from_apps` function to transform the following:
* Create `PostgresPartitionedModelState` from the model if the model is a `PostgresPartitionedModel`.
* Create `PostgresViewModelState` from the model if the model is a `PostgresViewModel`.
* Create `PostgresMaterializedViewModelState` from the model if the model is a `PostgresMaterializedViewModel`.
These custom model states are needed to track partitioning and view options (`PartitioningMeta` and `ViewMeta`) in migrations. Without this, the partitioning and view optiosn would not end up in migrations.
django-postgres-extra-2.0.4/psqlextra/backend/migrations/__init__.py 0000664 0000000 0000000 00000000147 14175513017 0025651 0 ustar 00root root 0000000 0000000 from .patched_migrations import postgres_patched_migrations
__all__ = ["postgres_patched_migrations"]
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/ 0000775 0000000 0000000 00000000000 14175513017 0025721 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/__init__.py 0000664 0000000 0000000 00000002701 14175513017 0030032 0 ustar 00root root 0000000 0000000 from .add_default_partition import PostgresAddDefaultPartition
from .add_hash_partition import PostgresAddHashPartition
from .add_list_partition import PostgresAddListPartition
from .add_range_partition import PostgresAddRangePartition
from .apply_state import ApplyState
from .create_materialized_view_model import PostgresCreateMaterializedViewModel
from .create_partitioned_model import PostgresCreatePartitionedModel
from .create_view_model import PostgresCreateViewModel
from .delete_default_partition import PostgresDeleteDefaultPartition
from .delete_hash_partition import PostgresDeleteHashPartition
from .delete_list_partition import PostgresDeleteListPartition
from .delete_materialized_view_model import PostgresDeleteMaterializedViewModel
from .delete_partitioned_model import PostgresDeletePartitionedModel
from .delete_range_partition import PostgresDeleteRangePartition
from .delete_view_model import PostgresDeleteViewModel
__all__ = [
"ApplyState",
"PostgresAddHashPartition",
"PostgresAddListPartition",
"PostgresAddRangePartition",
"PostgresAddDefaultPartition",
"PostgresDeleteDefaultPartition",
"PostgresDeleteHashPartition",
"PostgresDeleteListPartition",
"PostgresDeleteRangePartition",
"PostgresCreatePartitionedModel",
"PostgresDeletePartitionedModel",
"PostgresCreateViewModel",
"PostgresCreateMaterializedViewModel",
"PostgresDeleteViewModel",
"PostgresDeleteMaterializedViewModel",
]
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/add_default_partition.py 0000664 0000000 0000000 00000002526 14175513017 0032625 0 ustar 00root root 0000000 0000000 from psqlextra.backend.migrations.state import PostgresPartitionState
from .partition import PostgresPartitionOperation
class PostgresAddDefaultPartition(PostgresPartitionOperation):
"""Adds a new default partition to a :see:PartitionedPostgresModel."""
def state_forwards(self, app_label, state):
model_state = state.models[(app_label, self.model_name_lower)]
model_state.add_partition(
PostgresPartitionState(
app_label=app_label, model_name=self.model_name, name=self.name
)
)
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_default_partition(model, self.name)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_partition(model, self.name)
def describe(self) -> str:
return "Creates default partition '%s' on %s" % (
self.name,
self.model_name,
)
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/add_hash_partition.py 0000664 0000000 0000000 00000004706 14175513017 0032126 0 ustar 00root root 0000000 0000000 from psqlextra.backend.migrations.state import PostgresHashPartitionState
from .partition import PostgresPartitionOperation
class PostgresAddHashPartition(PostgresPartitionOperation):
"""Adds a new hash partition to a :see:PartitionedPostgresModel.
Each partition will hold the rows for which the hash value of the
partition key divided by the specified modulus will produce the
specified remainder.
"""
def __init__(
self, model_name: str, name: str, modulus: int, remainder: int
):
"""Initializes new instance of :see:AddHashPartition.
Arguments:
model_name:
The name of the :see:PartitionedPostgresModel.
name:
The name to give to the new partition table.
modulus:
Integer value by which the key is divided.
remainder:
The remainder of the hash value when divided by modulus.
"""
super().__init__(model_name, name)
self.modulus = modulus
self.remainder = remainder
def state_forwards(self, app_label, state):
model = state.models[(app_label, self.model_name_lower)]
model.add_partition(
PostgresHashPartitionState(
app_label=app_label,
model_name=self.model_name,
name=self.name,
modulus=self.modulus,
remainder=self.remainder,
)
)
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_hash_partition(
model, self.name, self.modulus, self.remainder
)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_partition(model, self.name)
def deconstruct(self):
name, args, kwargs = super().deconstruct()
kwargs["modulus"] = self.modulus
kwargs["remainder"] = self.remainder
return name, args, kwargs
def describe(self) -> str:
return "Creates hash partition %s on %s" % (self.name, self.model_name)
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/add_list_partition.py 0000664 0000000 0000000 00000004001 14175513017 0032142 0 ustar 00root root 0000000 0000000 from psqlextra.backend.migrations.state import PostgresListPartitionState
from .partition import PostgresPartitionOperation
class PostgresAddListPartition(PostgresPartitionOperation):
"""Adds a new list partition to a :see:PartitionedPostgresModel."""
def __init__(self, model_name, name, values):
"""Initializes new instance of :see:AddListPartition.
Arguments:
model_name:
The name of the :see:PartitionedPostgresModel.
name:
The name to give to the new partition table.
values:
Partition key values that should be
stored in this partition.
"""
super().__init__(model_name, name)
self.values = values
def state_forwards(self, app_label, state):
model = state.models[(app_label, self.model_name_lower)]
model.add_partition(
PostgresListPartitionState(
app_label=app_label,
model_name=self.model_name,
name=self.name,
values=self.values,
)
)
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_list_partition(model, self.name, self.values)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_partition(model, self.name)
def deconstruct(self):
name, args, kwargs = super().deconstruct()
kwargs["values"] = self.values
return name, args, kwargs
def describe(self) -> str:
return "Creates list partition %s on %s" % (self.name, self.model_name)
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/add_range_partition.py 0000664 0000000 0000000 00000004673 14175513017 0032302 0 ustar 00root root 0000000 0000000 from psqlextra.backend.migrations.state import PostgresRangePartitionState
from .partition import PostgresPartitionOperation
class PostgresAddRangePartition(PostgresPartitionOperation):
"""Adds a new range partition to a :see:PartitionedPostgresModel."""
def __init__(self, model_name: str, name: str, from_values, to_values):
"""Initializes new instance of :see:AddRangePartition.
Arguments:
model_name:
The name of the :see:PartitionedPostgresModel.
name:
The name to give to the new partition table.
from_values:
Start of the partitioning key range of
values that need to be stored in this
partition.
to_values:
End of the partitioning key range of
values that need to be stored in this
partition.
"""
super().__init__(model_name, name)
self.from_values = from_values
self.to_values = to_values
def state_forwards(self, app_label, state):
model = state.models[(app_label, self.model_name_lower)]
model.add_partition(
PostgresRangePartitionState(
app_label=app_label,
model_name=self.model_name,
name=self.name,
from_values=self.from_values,
to_values=self.to_values,
)
)
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_range_partition(
model, self.name, self.from_values, self.to_values
)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_partition(model, self.name)
def deconstruct(self):
name, args, kwargs = super().deconstruct()
kwargs["from_values"] = self.from_values
kwargs["to_values"] = self.to_values
return name, args, kwargs
def describe(self) -> str:
return "Creates range partition %s on %s" % (self.name, self.model_name)
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/apply_state.py 0000664 0000000 0000000 00000002360 14175513017 0030621 0 ustar 00root root 0000000 0000000 from django.db.migrations.operations.base import Operation
class ApplyState(Operation):
"""Takes an abritrary operation and migrates the project state but does not
apply the operation to the database.
This is very similar to the :see:RunSQL `state_operations`
parameter. This is useful if you want to tell Django that an
operation was applied without actually applying it.
"""
reduces_to_sql = False
def __init__(self, state_operation: Operation) -> None:
self.state_operation = state_operation
def deconstruct(self):
kwargs = {"state_operation": self.state_operation}
return (self.__class__.__qualname__, [], kwargs)
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
self.state_operation.state_forwards(app_label, state)
def state_backwards(self, app_label, state):
self.state_operation.state_backwards(app_label, state)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
pass
def describe(self):
return "Apply state: " + self.state_operation.describe()
create_materialized_view_model.py 0000664 0000000 0000000 00000004326 14175513017 0034430 0 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations from django.db.migrations.operations.models import CreateModel
from psqlextra.backend.migrations.state import (
PostgresMaterializedViewModelState,
)
class PostgresCreateMaterializedViewModel(CreateModel):
"""Creates the model as a native PostgreSQL 11.x materialzed view."""
serialization_expand_args = [
"fields",
"options",
"managers",
"view_options",
]
def __init__(
self,
name,
fields,
options=None,
view_options={},
bases=None,
managers=None,
):
super().__init__(name, fields, options, bases, managers)
self.view_options = view_options or {}
def state_forwards(self, app_label, state):
state.add_model(
PostgresMaterializedViewModelState(
app_label=app_label,
name=self.name,
fields=list(self.fields),
options=dict(self.options),
bases=tuple(self.bases),
managers=list(self.managers),
view_options=dict(self.view_options),
)
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
"""Apply this migration operation forwards."""
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_materialized_view_model(model)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
"""Apply this migration operation backwards."""
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_materialized_view_model(model)
def deconstruct(self):
name, args, kwargs = super().deconstruct()
if self.view_options:
kwargs["view_options"] = self.view_options
return name, args, kwargs
def describe(self):
"""Gets a human readable text describing this migration."""
description = super().describe()
description = description.replace("model", "materialized view model")
return description
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/create_partitioned_model.py 0000664 0000000 0000000 00000005523 14175513017 0033325 0 ustar 00root root 0000000 0000000 from django.db.migrations.operations.models import CreateModel
from psqlextra.backend.migrations.state import PostgresPartitionedModelState
class PostgresCreatePartitionedModel(CreateModel):
"""Creates the model as a native PostgreSQL 11.x partitioned table."""
serialization_expand_args = [
"fields",
"options",
"managers",
"partitioning_options",
]
def __init__(
self,
name,
fields,
options=None,
partitioning_options={},
bases=None,
managers=None,
):
super().__init__(name, fields, options, bases, managers)
self.partitioning_options = partitioning_options or {}
def state_forwards(self, app_label, state):
state.add_model(
PostgresPartitionedModelState(
app_label=app_label,
name=self.name,
fields=list(self.fields),
options=dict(self.options),
bases=tuple(self.bases),
managers=list(self.managers),
partitioning_options=dict(self.partitioning_options),
)
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
"""Apply this migration operation forwards."""
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_partitioned_model(model)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
"""Apply this migration operation backwards."""
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_partitioned_model(model)
def deconstruct(self):
name, args, kwargs = super().deconstruct()
if self.partitioning_options:
kwargs["partitioning_options"] = self.partitioning_options
return name, args, kwargs
def describe(self):
"""Gets a human readable text describing this migration."""
description = super().describe()
description = description.replace("model", "partitioned model")
return description
def reduce(self, *args, **kwargs):
result = super().reduce(*args, **kwargs)
# replace CreateModel operation with PostgresCreatePartitionedModel
if isinstance(result, list) and result:
for i, op in enumerate(result):
if isinstance(op, CreateModel):
_, args, kwargs = op.deconstruct()
result[i] = PostgresCreatePartitionedModel(
*args,
**kwargs,
partitioning_options=self.partitioning_options
)
return result
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/create_view_model.py 0000664 0000000 0000000 00000004166 14175513017 0031757 0 ustar 00root root 0000000 0000000 from django.db.migrations.operations.models import CreateModel
from psqlextra.backend.migrations.state import PostgresViewModelState
class PostgresCreateViewModel(CreateModel):
"""Creates the model as a native PostgreSQL 11.x view."""
serialization_expand_args = [
"fields",
"options",
"managers",
"view_options",
]
def __init__(
self,
name,
fields,
options=None,
view_options={},
bases=None,
managers=None,
):
super().__init__(name, fields, options, bases, managers)
self.view_options = view_options or {}
def state_forwards(self, app_label, state):
state.add_model(
PostgresViewModelState(
app_label=app_label,
name=self.name,
fields=list(self.fields),
options=dict(self.options),
bases=tuple(self.bases),
managers=list(self.managers),
view_options=dict(self.view_options),
)
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
"""Apply this migration operation forwards."""
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_view_model(model)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
"""Apply this migration operation backwards."""
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_view_model(model)
def deconstruct(self):
name, args, kwargs = super().deconstruct()
if self.view_options:
kwargs["view_options"] = self.view_options
return name, args, kwargs
def describe(self):
"""Gets a human readable text describing this migration."""
description = super().describe()
description = description.replace("model", "view model")
return description
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/delete_default_partition.py 0000664 0000000 0000000 00000001471 14175513017 0033335 0 ustar 00root root 0000000 0000000 from .delete_partition import PostgresDeletePartition
class PostgresDeleteDefaultPartition(PostgresDeletePartition):
"""Deletes a default partition that's part of a.
:see:PartitionedPostgresModel.
"""
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = to_state.apps.get_model(app_label, self.model_name)
model_state = to_state.models[(app_label, self.model_name_lower)]
if self.allow_migrate_model(schema_editor.connection.alias, model):
partition_state = model_state.partitions[self.name]
schema_editor.add_default_partition(model, partition_state.name)
def describe(self) -> str:
return "Deletes default partition '%s' on %s" % (
self.name,
self.model_name,
)
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/delete_hash_partition.py 0000664 0000000 0000000 00000001660 14175513017 0032634 0 ustar 00root root 0000000 0000000 from .delete_partition import PostgresDeletePartition
class PostgresDeleteHashPartition(PostgresDeletePartition):
"""Deletes a hash partition that's part of a.
:see:PartitionedPostgresModel.
"""
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = to_state.apps.get_model(app_label, self.model_name)
model_state = to_state.models[(app_label, self.model_name_lower)]
if self.allow_migrate_model(schema_editor.connection.alias, model):
partition_state = model_state.partitions[self.name]
schema_editor.add_hash_partition(
model,
partition_state.name,
partition_state.modulus,
partition_state.remainder,
)
def describe(self) -> str:
return "Deletes hash partition '%s' on %s" % (
self.name,
self.model_name,
)
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/delete_list_partition.py 0000664 0000000 0000000 00000001543 14175513017 0032664 0 ustar 00root root 0000000 0000000 from .delete_partition import PostgresDeletePartition
class PostgresDeleteListPartition(PostgresDeletePartition):
"""Deletes a list partition that's part of a.
:see:PartitionedPostgresModel.
"""
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = to_state.apps.get_model(app_label, self.model_name)
model_state = to_state.models[(app_label, self.model_name_lower)]
if self.allow_migrate_model(schema_editor.connection.alias, model):
partition_state = model_state.partitions[self.name]
schema_editor.add_list_partition(
model, partition_state.name, partition_state.values
)
def describe(self) -> str:
return "Deletes list partition '%s' on %s" % (
self.name,
self.model_name,
)
delete_materialized_view_model.py 0000664 0000000 0000000 00000002134 14175513017 0034422 0 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations from django.db.migrations.operations.models import DeleteModel
class PostgresDeleteMaterializedViewModel(DeleteModel):
"""Deletes the specified materialized view model."""
def database_forwards(self, app_label, schema_editor, from_state, to_state):
"""Apply this migration operation forwards."""
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_materialized_view_model(model)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
"""Apply this migration operation backwards."""
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_materialized_view_model(model)
def describe(self):
"""Gets a human readable text describing this migration."""
description = super().describe()
description = description.replace("model", "materialized view model")
return description
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/delete_partition.py 0000664 0000000 0000000 00000002346 14175513017 0031633 0 ustar 00root root 0000000 0000000 from .partition import PostgresPartitionOperation
class PostgresDeletePartition(PostgresPartitionOperation):
"""Deletes a partition that's part of a :see:PartitionedPostgresModel."""
def state_forwards(self, app_label, state):
model = state.models[(app_label, self.model_name_lower)]
model.delete_partition(self.name)
state.reload_model(app_label, self.model_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_partition(model, self.name)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = to_state.apps.get_model(app_label, self.model_name)
model_state = to_state.models[(app_label, self.model_name)]
if self.allow_migrate_model(schema_editor.connection.alias, model):
partition_state = model_state.partitions[self.name]
schema_editor.add_default_partition(model, partition_state.name)
def describe(self) -> str:
return "Deletes partition %s on %s" % (self.name, self.model_name)
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/delete_partitioned_model.py 0000664 0000000 0000000 00000002077 14175513017 0033325 0 ustar 00root root 0000000 0000000 from django.db.migrations.operations.models import DeleteModel
class PostgresDeletePartitionedModel(DeleteModel):
"""Deletes the specified partitioned model."""
def database_forwards(self, app_label, schema_editor, from_state, to_state):
"""Apply this migration operation forwards."""
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_partitioned_model(model)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
"""Apply this migration operation backwards."""
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_partitioned_model(model)
def describe(self):
"""Gets a human readable text describing this migration."""
description = super().describe()
description = description.replace("model", "partitioned model")
return description
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/delete_range_partition.py 0000664 0000000 0000000 00000001670 14175513017 0033006 0 ustar 00root root 0000000 0000000 from .delete_partition import PostgresDeletePartition
class PostgresDeleteRangePartition(PostgresDeletePartition):
"""Deletes a range partition that's part of a.
:see:PartitionedPostgresModel.
"""
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
model = to_state.apps.get_model(app_label, self.model_name)
model_state = to_state.models[(app_label, self.model_name_lower)]
if self.allow_migrate_model(schema_editor.connection.alias, model):
partition_state = model_state.partitions[self.name]
schema_editor.add_range_partition(
model,
partition_state.name,
partition_state.from_values,
partition_state.to_values,
)
def describe(self) -> str:
return "Deletes range partition '%s' on %s" % (
self.name,
self.model_name,
)
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/delete_view_model.py 0000664 0000000 0000000 00000002034 14175513017 0031746 0 ustar 00root root 0000000 0000000 from django.db.migrations.operations.models import DeleteModel
class PostgresDeleteViewModel(DeleteModel):
"""Deletes the specified view model."""
def database_forwards(self, app_label, schema_editor, from_state, to_state):
"""Apply this migration operation forwards."""
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_view_model(model)
def database_backwards(
self, app_label, schema_editor, from_state, to_state
):
"""Apply this migration operation backwards."""
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_view_model(model)
def describe(self):
"""Gets a human readable text describing this migration."""
description = super().describe()
description = description.replace("model", "view model")
return description
django-postgres-extra-2.0.4/psqlextra/backend/migrations/operations/partition.py 0000664 0000000 0000000 00000001664 14175513017 0030313 0 ustar 00root root 0000000 0000000 from django.db.migrations.operations.base import Operation
class PostgresPartitionOperation(Operation):
def __init__(self, model_name: str, name: str) -> None:
"""Initializes new instance of :see:AddDefaultPartition.
Arguments:
model_name:
The name of the :see:PartitionedPostgresModel.
name:
The name to give to the new partition table.
"""
self.model_name = model_name
self.model_name_lower = model_name.lower()
self.name = name
def deconstruct(self):
kwargs = {"model_name": self.model_name, "name": self.name}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, *args, **kwargs):
pass
def state_backwards(self, *args, **kwargs):
pass
def reduce(self, *args, **kwargs):
# PartitionOperation doesn't break migrations optimizations
return True
django-postgres-extra-2.0.4/psqlextra/backend/migrations/patched_autodetector.py 0000664 0000000 0000000 00000021253 14175513017 0030305 0 ustar 00root root 0000000 0000000 from contextlib import contextmanager
from unittest import mock
from django.db.migrations import (
AddField,
AlterField,
CreateModel,
DeleteModel,
RemoveField,
RenameField,
)
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.operations.base import Operation
from django.db.models import Model
from psqlextra.models import (
PostgresMaterializedViewModel,
PostgresPartitionedModel,
PostgresViewModel,
)
from psqlextra.types import PostgresPartitioningMethod
from . import operations
# original `MigrationAutodetector.add_operation`
# function, saved here so the patched version can
# call the original
add_operation = MigrationAutodetector.add_operation
class AddOperationHandler:
"""Handler for when operations are being added to a new migration.
This is where we intercept operations such as
:see:CreateModel to replace it with our own.
"""
def __init__(self, autodetector, app_label, args, kwargs):
self.autodetector = autodetector
self.app_label = app_label
self.args = args
self.kwargs = kwargs
def add(self, operation):
"""Adds the specified operation to the list of operations to execute in
the migration."""
return add_operation(
self.autodetector,
self.app_label,
operation,
*self.args,
**self.kwargs,
)
def add_field(self, operation: AddField):
"""Adds the specified :see:AddField operation to the list of operations
to execute in the migration."""
return self._transform_view_field_operations(operation)
def remove_field(self, operation: RemoveField):
"""Adds the specified :see:RemoveField operation to the list of
operations to execute in the migration."""
return self._transform_view_field_operations(operation)
def alter_field(self, operation: AlterField):
"""Adds the specified :see:AlterField operation to the list of
operations to execute in the migration."""
return self._transform_view_field_operations(operation)
def rename_field(self, operation: RenameField):
"""Adds the specified :see:RenameField operation to the list of
operations to execute in the migration."""
return self._transform_view_field_operations(operation)
def _transform_view_field_operations(self, operation: Operation):
"""Transforms operations on fields on a (materialized) view into state
only operations.
One cannot add/remove/delete fields on a (materialized) view,
however, we do want Django's migration system to keep track of
these kind of changes to the model. The :see:ApplyState
operation just tells Django the operation was applied without
actually applying it.
"""
model = self.autodetector.new_apps.get_model(
self.app_label, operation.model_name
)
if issubclass(model, PostgresViewModel):
return self.add(operations.ApplyState(state_operation=operation))
return self.add(operation)
def add_create_model(self, operation: CreateModel):
"""Adds the specified :see:CreateModel operation to the list of
operations to execute in the migration."""
model = self.autodetector.new_apps.get_model(
self.app_label, operation.name
)
if issubclass(model, PostgresPartitionedModel):
return self.add_create_partitioned_model(model, operation)
elif issubclass(model, PostgresMaterializedViewModel):
return self.add_create_materialized_view_model(model, operation)
elif issubclass(model, PostgresViewModel):
return self.add_create_view_model(model, operation)
return self.add(operation)
def add_delete_model(self, operation: DeleteModel):
"""Adds the specified :see:Deletemodel operation to the list of
operations to execute in the migration."""
model = self.autodetector.old_apps.get_model(
self.app_label, operation.name
)
if issubclass(model, PostgresPartitionedModel):
return self.add_delete_partitioned_model(model, operation)
elif issubclass(model, PostgresMaterializedViewModel):
return self.add_delete_materialized_view_model(model, operation)
elif issubclass(model, PostgresViewModel):
return self.add_delete_view_model(model, operation)
return self.add(operation)
def add_create_partitioned_model(
self, model: Model, operation: CreateModel
):
"""Adds a :see:PostgresCreatePartitionedModel operation to the list of
operations to execute in the migration."""
partitioning_options = model._partitioning_meta.original_attrs
_, args, kwargs = operation.deconstruct()
if partitioning_options["method"] != PostgresPartitioningMethod.HASH:
self.add(
operations.PostgresAddDefaultPartition(
model_name=model.__name__, name="default"
)
)
self.add(
operations.PostgresCreatePartitionedModel(
*args, **kwargs, partitioning_options=partitioning_options
)
)
def add_delete_partitioned_model(
self, model: Model, operation: DeleteModel
):
"""Adds a :see:PostgresDeletePartitionedModel operation to the list of
operations to execute in the migration."""
_, args, kwargs = operation.deconstruct()
return self.add(
operations.PostgresDeletePartitionedModel(*args, **kwargs)
)
def add_create_view_model(self, model: Model, operation: CreateModel):
"""Adds a :see:PostgresCreateViewModel operation to the list of
operations to execute in the migration."""
view_options = model._view_meta.original_attrs
_, args, kwargs = operation.deconstruct()
self.add(
operations.PostgresCreateViewModel(
*args, **kwargs, view_options=view_options
)
)
def add_delete_view_model(self, model: Model, operation: DeleteModel):
"""Adds a :see:PostgresDeleteViewModel operation to the list of
operations to execute in the migration."""
_, args, kwargs = operation.deconstruct()
return self.add(operations.PostgresDeleteViewModel(*args, **kwargs))
def add_create_materialized_view_model(
self, model: Model, operation: CreateModel
):
"""Adds a :see:PostgresCreateMaterializedViewModel operation to the
list of operations to execute in the migration."""
view_options = model._view_meta.original_attrs
_, args, kwargs = operation.deconstruct()
self.add(
operations.PostgresCreateMaterializedViewModel(
*args, **kwargs, view_options=view_options
)
)
def add_delete_materialized_view_model(
self, model: Model, operation: DeleteModel
):
"""Adds a :see:PostgresDeleteMaterializedViewModel operation to the
list of operations to execute in the migration."""
_, args, kwargs = operation.deconstruct()
return self.add(
operations.PostgresDeleteMaterializedViewModel(*args, **kwargs)
)
@contextmanager
def patched_autodetector():
"""Patches the standard Django :seee:MigrationAutodetector for the duration
of the context.
The patch intercepts the `add_operation` function to
customize how new operations are added.
We have to do this because there is no way in Django
to extend the auto detector otherwise.
"""
autodetector_module_path = "django.db.migrations.autodetector"
autodetector_class_path = (
f"{autodetector_module_path}.MigrationAutodetector"
)
add_operation_path = f"{autodetector_class_path}.add_operation"
def _patched(autodetector, app_label, operation, *args, **kwargs):
handler = AddOperationHandler(autodetector, app_label, args, kwargs)
if isinstance(operation, CreateModel):
return handler.add_create_model(operation)
if isinstance(operation, DeleteModel):
return handler.add_delete_model(operation)
if isinstance(operation, AddField):
return handler.add_field(operation)
if isinstance(operation, RemoveField):
return handler.remove_field(operation)
if isinstance(operation, AlterField):
return handler.alter_field(operation)
if isinstance(operation, RenameField):
return handler.rename_field(operation)
return handler.add(operation)
with mock.patch(add_operation_path, new=_patched):
yield
django-postgres-extra-2.0.4/psqlextra/backend/migrations/patched_migrations.py 0000664 0000000 0000000 00000000770 14175513017 0027760 0 ustar 00root root 0000000 0000000 from contextlib import contextmanager
from .patched_autodetector import patched_autodetector
from .patched_project_state import patched_project_state
@contextmanager
def postgres_patched_migrations():
"""Patches migration related classes/functions to extend how Django
generates and applies migrations.
This adds support for automatically detecting changes in Postgres
specific models.
"""
with patched_project_state():
with patched_autodetector():
yield
django-postgres-extra-2.0.4/psqlextra/backend/migrations/patched_project_state.py 0000664 0000000 0000000 00000004167 14175513017 0030456 0 ustar 00root root 0000000 0000000 from contextlib import contextmanager
from unittest import mock
from django.db.migrations.state import ProjectState
from psqlextra.models import (
PostgresMaterializedViewModel,
PostgresPartitionedModel,
PostgresViewModel,
)
from .state import (
PostgresMaterializedViewModelState,
PostgresPartitionedModelState,
PostgresViewModelState,
)
# original `ProjectState.from_apps` function,
# saved here so the patched version can call
# the original
original_from_apps = ProjectState.from_apps
def project_state_from_apps(apps):
"""Creates a :see:ProjectState instance from the specified list of apps."""
project_state = original_from_apps(apps)
for model in apps.get_models(include_swapped=True):
model_state = None
# for some of our custom models, use the more specific model
# state.. for everything else, business as usual
if issubclass(model, PostgresPartitionedModel):
model_state = PostgresPartitionedModelState.from_model(model)
elif issubclass(model, PostgresMaterializedViewModel):
model_state = PostgresMaterializedViewModelState.from_model(model)
elif issubclass(model, PostgresViewModel):
model_state = PostgresViewModelState.from_model(model)
else:
continue
model_state_key = (model_state.app_label, model_state.name_lower)
project_state.models[model_state_key] = model_state
return project_state
@contextmanager
def patched_project_state():
"""Patches the standard Django :see:ProjectState.from_apps for the duration
of the context.
The patch intercepts the `from_apps` function to control
how model state is creatd. We want to use our custom
model state classes for certain types of models.
We have to do this because there is no way in Django
to extend the project state otherwise.
"""
from_apps_module_path = "django.db.migrations.state"
from_apps_class_path = f"{from_apps_module_path}.ProjectState"
from_apps_path = f"{from_apps_class_path}.from_apps"
with mock.patch(from_apps_path, new=project_state_from_apps):
yield
django-postgres-extra-2.0.4/psqlextra/backend/migrations/state/ 0000775 0000000 0000000 00000000000 14175513017 0024656 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/backend/migrations/state/__init__.py 0000664 0000000 0000000 00000001052 14175513017 0026765 0 ustar 00root root 0000000 0000000 from .materialized_view import PostgresMaterializedViewModelState
from .partitioning import (
PostgresHashPartitionState,
PostgresListPartitionState,
PostgresPartitionedModelState,
PostgresPartitionState,
PostgresRangePartitionState,
)
from .view import PostgresViewModelState
__all__ = [
"PostgresPartitionState",
"PostgresRangePartitionState",
"PostgresHashPartitionState",
"PostgresListPartitionState",
"PostgresPartitionedModelState",
"PostgresViewModelState",
"PostgresMaterializedViewModelState",
]
django-postgres-extra-2.0.4/psqlextra/backend/migrations/state/materialized_view.py 0000664 0000000 0000000 00000000765 14175513017 0030744 0 ustar 00root root 0000000 0000000 from typing import Type
from psqlextra.models import PostgresMaterializedViewModel
from .view import PostgresViewModelState
class PostgresMaterializedViewModelState(PostgresViewModelState):
"""Represents the state of a :see:PostgresMaterializedViewModel in the
migrations."""
@classmethod
def _get_base_model_class(self) -> Type[PostgresMaterializedViewModel]:
"""Gets the class to use as a base class for rendered models."""
return PostgresMaterializedViewModel
django-postgres-extra-2.0.4/psqlextra/backend/migrations/state/model.py 0000664 0000000 0000000 00000007432 14175513017 0026336 0 ustar 00root root 0000000 0000000 from collections.abc import Mapping
from typing import Type
from django.db.migrations.state import ModelState
from django.db.models import Model
from psqlextra.models import PostgresModel
class PostgresModelState(ModelState):
"""Base for custom model states.
We need this base class to create some hooks into rendering models,
creating new states and cloning state. Most of the logic resides
here in the base class. Our derived classes implement the `_pre_*`
methods.
"""
@classmethod
def from_model(
cls, model: PostgresModel, *args, **kwargs
) -> "PostgresModelState":
"""Creates a new :see:PostgresModelState object from the specified
model.
We override this so derived classes get the chance to attach
additional information to the newly created model state.
We also need to patch up the base class for the model.
"""
model_state = super().from_model(model, *args, **kwargs)
model_state = cls._pre_new(model, model_state)
# django does not add abstract bases as a base in migrations
# because it assumes the base does not add anything important
# in a migration.. but it does, so we replace the Model
# base with the actual base
bases = tuple()
for base in model_state.bases:
if issubclass(base, Model):
bases += (cls._get_base_model_class(),)
else:
bases += (base,)
model_state.bases = bases
return model_state
def clone(self) -> "PostgresModelState":
"""Gets an exact copy of this :see:PostgresModelState."""
model_state = super().clone()
return self._pre_clone(model_state)
def render(self, apps):
"""Renders this state into an actual model."""
# TODO: figure out a way to do this witout pretty much
# copying the base class's implementation
try:
bases = tuple(
(apps.get_model(base) if isinstance(base, str) else base)
for base in self.bases
)
except LookupError:
# TODO: this should be a InvalidBaseError
raise ValueError(
"Cannot resolve one or more bases from %r" % (self.bases,)
)
if isinstance(self.fields, Mapping):
# In Django 3.1 `self.fields` became a `dict`
fields = {
name: field.clone() for name, field in self.fields.items()
}
else:
# In Django < 3.1 `self.fields` is a list of (name, field) tuples
fields = {name: field.clone() for name, field in self.fields}
meta = type(
"Meta",
(),
{"app_label": self.app_label, "apps": apps, **self.options},
)
attributes = {
**fields,
"Meta": meta,
"__module__": "__fake__",
**dict(self.construct_managers()),
}
return type(*self._pre_render(self.name, bases, attributes))
@classmethod
def _pre_new(
cls, model: PostgresModel, model_state: "PostgresModelState"
) -> "PostgresModelState":
"""Called when a new model state is created from the specified
model."""
return model_state
def _pre_clone(
self, model_state: "PostgresModelState"
) -> "PostgresModelState":
"""Called when this model state is cloned."""
return model_state
def _pre_render(self, name: str, bases, attributes):
"""Called when this model state is rendered into a model."""
return name, bases, attributes
@classmethod
def _get_base_model_class(self) -> Type[PostgresModel]:
"""Gets the class to use as a base class for rendered models."""
return PostgresModel
django-postgres-extra-2.0.4/psqlextra/backend/migrations/state/partitioning.py 0000664 0000000 0000000 00000010051 14175513017 0027734 0 ustar 00root root 0000000 0000000 from typing import Dict, List, Type
from psqlextra.models import PostgresPartitionedModel
from .model import PostgresModelState
class PostgresPartitionState:
"""Represents the state of a partition for a :see:PostgresPartitionedModel
during a migration."""
def __init__(self, app_label: str, model_name: str, name: str) -> None:
self.app_label = app_label
self.model_name = model_name
self.name = name
class PostgresRangePartitionState(PostgresPartitionState):
"""Represents the state of a range partition for a
:see:PostgresPartitionedModel during a migration."""
def __init__(
self, app_label: str, model_name: str, name: str, from_values, to_values
):
super().__init__(app_label, model_name, name)
self.from_values = from_values
self.to_values = to_values
class PostgresListPartitionState(PostgresPartitionState):
"""Represents the state of a list partition for a
:see:PostgresPartitionedModel during a migration."""
def __init__(self, app_label: str, model_name: str, name: str, values):
super().__init__(app_label, model_name, name)
self.values = values
class PostgresHashPartitionState(PostgresPartitionState):
"""Represents the state of a hash partition for a
:see:PostgresPartitionedModel during a migration."""
def __init__(
self,
app_label: str,
model_name: str,
name: str,
modulus: int,
remainder: int,
):
super().__init__(app_label, model_name, name)
self.modulus = modulus
self.remainder = remainder
class PostgresPartitionedModelState(PostgresModelState):
"""Represents the state of a :see:PostgresPartitionedModel in the
migrations."""
def __init__(
self,
*args,
partitions: List[PostgresPartitionState] = [],
partitioning_options={},
**kwargs
):
"""Initializes a new instance of :see:PostgresPartitionedModelState.
Arguments:
partitioning_options:
Dictionary of options for partitioning.
See: PostgresPartitionedModelMeta for a list.
"""
super().__init__(*args, **kwargs)
self.partitions: Dict[str, PostgresPartitionState] = {
partition.name: partition for partition in partitions
}
self.partitioning_options = dict(partitioning_options)
def add_partition(self, partition: PostgresPartitionState):
"""Adds a partition to this partitioned model state."""
self.partitions[partition.name] = partition
def delete_partition(self, name: str):
"""Deletes a partition from this partitioned model state."""
del self.partitions[name]
@classmethod
def _pre_new(
cls,
model: PostgresPartitionedModel,
model_state: "PostgresPartitionedModelState",
) -> "PostgresPartitionedModelState":
"""Called when a new model state is created from the specified
model."""
model_state.partitions = dict()
model_state.partitioning_options = dict(
model._partitioning_meta.original_attrs
)
return model_state
def _pre_clone(
self, model_state: "PostgresPartitionedModelState"
) -> "PostgresPartitionedModelState":
"""Called when this model state is cloned."""
model_state.partitions = dict(self.partitions)
model_state.partitioning_options = dict(self.partitioning_options)
return model_state
def _pre_render(self, name: str, bases, attributes):
"""Called when this model state is rendered into a model."""
partitioning_meta = type(
"PartitioningMeta", (), dict(self.partitioning_options)
)
return (
name,
bases,
{**attributes, "PartitioningMeta": partitioning_meta},
)
@classmethod
def _get_base_model_class(self) -> Type[PostgresPartitionedModel]:
"""Gets the class to use as a base class for rendered models."""
return PostgresPartitionedModel
django-postgres-extra-2.0.4/psqlextra/backend/migrations/state/view.py 0000664 0000000 0000000 00000003173 14175513017 0026206 0 ustar 00root root 0000000 0000000 from typing import Type
from psqlextra.models import PostgresViewModel
from .model import PostgresModelState
class PostgresViewModelState(PostgresModelState):
"""Represents the state of a :see:PostgresViewModel in the migrations."""
def __init__(self, *args, view_options={}, **kwargs):
"""Initializes a new instance of :see:PostgresViewModelState.
Arguments:
view_options:
Dictionary of options for views.
See: PostgresViewModelMeta for a list.
"""
super().__init__(*args, **kwargs)
self.view_options = dict(view_options)
@classmethod
def _pre_new(
cls, model: PostgresViewModel, model_state: "PostgresViewModelState"
) -> "PostgresViewModelState":
"""Called when a new model state is created from the specified
model."""
model_state.view_options = dict(model._view_meta.original_attrs)
return model_state
def _pre_clone(
self, model_state: "PostgresViewModelState"
) -> "PostgresViewModelState":
"""Called when this model state is cloned."""
model_state.view_options = dict(self.view_options)
return model_state
def _pre_render(self, name: str, bases, attributes):
"""Called when this model state is rendered into a model."""
view_meta = type("ViewMeta", (), dict(self.view_options))
return name, bases, {**attributes, "ViewMeta": view_meta}
@classmethod
def _get_base_model_class(self) -> Type[PostgresViewModel]:
"""Gets the class to use as a base class for rendered models."""
return PostgresViewModel
django-postgres-extra-2.0.4/psqlextra/backend/operations.py 0000664 0000000 0000000 00000001422 14175513017 0024116 0 ustar 00root root 0000000 0000000 from importlib import import_module
from . import base_impl
class PostgresOperations(base_impl.operations()):
"""Simple operations specific to PostgreSQL."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._compiler_cache = None
def compiler(self, compiler_name: str):
"""Gets the SQL compiler with the specified name."""
# first let django try to find the compiler
try:
return super().compiler(compiler_name)
except AttributeError:
pass
# django can't find it, look in our own module
if self._compiler_cache is None:
self._compiler_cache = import_module("psqlextra.compiler")
return getattr(self._compiler_cache, compiler_name)
django-postgres-extra-2.0.4/psqlextra/backend/schema.py 0000664 0000000 0000000 00000043177 14175513017 0023210 0 ustar 00root root 0000000 0000000 from typing import Any, List, Optional
from unittest import mock
from django.core.exceptions import (
FieldDoesNotExist,
ImproperlyConfigured,
SuspiciousOperation,
)
from django.db import transaction
from django.db.models import Field, Model
from psqlextra.type_assertions import is_sql_with_params
from psqlextra.types import PostgresPartitioningMethod
from . import base_impl
from .introspection import PostgresIntrospection
from .side_effects import (
HStoreRequiredSchemaEditorSideEffect,
HStoreUniqueSchemaEditorSideEffect,
)
class PostgresSchemaEditor(base_impl.schema_editor()):
"""Schema editor that adds extra methods for PostgreSQL specific features
and hooks into existing implementations to add side effects specific to
PostgreSQL."""
sql_create_view = "CREATE VIEW %s AS (%s)"
sql_replace_view = "CREATE OR REPLACE VIEW %s AS (%s)"
sql_drop_view = "DROP VIEW IF EXISTS %s"
sql_create_materialized_view = (
"CREATE MATERIALIZED VIEW %s AS (%s) WITH DATA"
)
sql_drop_materialized_view = "DROP MATERIALIZED VIEW %s"
sql_refresh_materialized_view = "REFRESH MATERIALIZED VIEW %s"
sql_refresh_materialized_view_concurrently = (
"REFRESH MATERIALIZED VIEW CONCURRENTLY %s"
)
sql_partition_by = " PARTITION BY %s (%s)"
sql_add_default_partition = "CREATE TABLE %s PARTITION OF %s DEFAULT"
sql_add_hash_partition = "CREATE TABLE %s PARTITION OF %s FOR VALUES WITH (MODULUS %s, REMAINDER %s)"
sql_add_range_partition = (
"CREATE TABLE %s PARTITION OF %s FOR VALUES FROM (%s) TO (%s)"
)
sql_add_list_partition = (
"CREATE TABLE %s PARTITION OF %s FOR VALUES IN (%s)"
)
sql_delete_partition = "DROP TABLE %s"
sql_table_comment = "COMMENT ON TABLE %s IS %s"
side_effects = [
HStoreUniqueSchemaEditorSideEffect(),
HStoreRequiredSchemaEditorSideEffect(),
]
def __init__(self, connection, collect_sql=False, atomic=True):
super().__init__(connection, collect_sql, atomic)
for side_effect in self.side_effects:
side_effect.execute = self.execute
side_effect.quote_name = self.quote_name
self.deferred_sql = []
self.introspection = PostgresIntrospection(self.connection)
def create_model(self, model: Model) -> None:
"""Creates a new model."""
super().create_model(model)
for side_effect in self.side_effects:
side_effect.create_model(model)
def delete_model(self, model: Model) -> None:
"""Drops/deletes an existing model."""
for side_effect in self.side_effects:
side_effect.delete_model(model)
super().delete_model(model)
def refresh_materialized_view_model(
self, model: Model, concurrently: bool = False
) -> None:
"""Refreshes a materialized view."""
sql_template = (
self.sql_refresh_materialized_view_concurrently
if concurrently
else self.sql_refresh_materialized_view
)
sql = sql_template % self.quote_name(model._meta.db_table)
self.execute(sql)
def create_view_model(self, model: Model) -> None:
"""Creates a new view model."""
self._create_view_model(self.sql_create_view, model)
def replace_view_model(self, model: Model) -> None:
"""Replaces a view model with a newer version.
This is used to alter the backing query of a view.
"""
self._create_view_model(self.sql_replace_view, model)
def delete_view_model(self, model: Model) -> None:
"""Deletes a view model."""
sql = self.sql_drop_view % self.quote_name(model._meta.db_table)
self.execute(sql)
def create_materialized_view_model(self, model: Model) -> None:
"""Creates a new materialized view model."""
self._create_view_model(self.sql_create_materialized_view, model)
def replace_materialized_view_model(self, model: Model) -> None:
"""Replaces a materialized view with a newer version.
This is used to alter the backing query of a materialized view.
Replacing a materialized view is a lot trickier than a normal view.
For normal views we can use `CREATE OR REPLACE VIEW`, but for
materialized views, we have to create the new view, copy all
indexes and constraints and drop the old one.
This operation is atomic as it runs in a transaction.
"""
with self.connection.cursor() as cursor:
constraints = self.introspection.get_constraints(
cursor, model._meta.db_table
)
with transaction.atomic():
self.delete_materialized_view_model(model)
self.create_materialized_view_model(model)
for constraint_name, constraint_options in constraints.items():
if not constraint_options["definition"]:
raise SuspiciousOperation(
"Table %s has a constraint '%s' that no definition could be generated for",
(model._meta.db_tabel, constraint_name),
)
self.execute(constraint_options["definition"])
def delete_materialized_view_model(self, model: Model) -> None:
"""Deletes a materialized view model."""
sql = self.sql_drop_materialized_view % self.quote_name(
model._meta.db_table
)
self.execute(sql)
def create_partitioned_model(self, model: Model) -> None:
"""Creates a new partitioned model."""
meta = self._partitioning_properties_for_model(model)
# get the sql statement that django creates for normal
# table creations..
sql, params = self._extract_sql(self.create_model, model)
partitioning_key_sql = ", ".join(
self.quote_name(field_name) for field_name in meta.key
)
# create a composite key that includes the partitioning key
sql = sql.replace(" PRIMARY KEY", "")
sql = sql[:-1] + ", PRIMARY KEY (%s, %s))" % (
self.quote_name(model._meta.pk.name),
partitioning_key_sql,
)
# extend the standard CREATE TABLE statement with
# 'PARTITION BY ...'
sql += self.sql_partition_by % (
meta.method.upper(),
partitioning_key_sql,
)
self.execute(sql, params)
def delete_partitioned_model(self, model: Model) -> None:
"""Drops the specified partitioned model."""
return self.delete_model(model)
def add_range_partition(
self,
model: Model,
name: str,
from_values: Any,
to_values: Any,
comment: Optional[str] = None,
) -> None:
"""Creates a new range partition for the specified partitioned model.
Arguments:
model:
Partitioned model to create a partition for.
name:
Name to give to the new partition.
Final name will be "{table_name}_{partition_name}"
from_values:
Start of the partitioning key range of
values that need to be stored in this
partition.
to_values:
End of the partitioning key range of
values that need to be stored in this
partition.
comment:
Optionally, a comment to add on this
partition table.
"""
# asserts the model is a model set up for partitioning
self._partitioning_properties_for_model(model)
table_name = self.create_partition_table_name(model, name)
sql = self.sql_add_range_partition % (
self.quote_name(table_name),
self.quote_name(model._meta.db_table),
"%s",
"%s",
)
with transaction.atomic():
self.execute(sql, (from_values, to_values))
if comment:
self.set_comment_on_table(table_name, comment)
def add_list_partition(
self,
model: Model,
name: str,
values: List[Any],
comment: Optional[str] = None,
) -> None:
"""Creates a new list partition for the specified partitioned model.
Arguments:
model:
Partitioned model to create a partition for.
name:
Name to give to the new partition.
Final name will be "{table_name}_{partition_name}"
values:
Partition key values that should be
stored in this partition.
comment:
Optionally, a comment to add on this
partition table.
"""
# asserts the model is a model set up for partitioning
self._partitioning_properties_for_model(model)
table_name = self.create_partition_table_name(model, name)
sql = self.sql_add_list_partition % (
self.quote_name(table_name),
self.quote_name(model._meta.db_table),
",".join(["%s" for _ in range(len(values))]),
)
with transaction.atomic():
self.execute(sql, values)
if comment:
self.set_comment_on_table(table_name, comment)
def add_hash_partition(
self,
model: Model,
name: str,
modulus: int,
remainder: int,
comment: Optional[str] = None,
) -> None:
"""Creates a new hash partition for the specified partitioned model.
Arguments:
model:
Partitioned model to create a partition for.
name:
Name to give to the new partition.
Final name will be "{table_name}_{partition_name}"
modulus:
Integer value by which the key is divided.
remainder:
The remainder of the hash value when divided by modulus.
comment:
Optionally, a comment to add on this partition table.
"""
# asserts the model is a model set up for partitioning
self._partitioning_properties_for_model(model)
table_name = self.create_partition_table_name(model, name)
sql = self.sql_add_hash_partition % (
self.quote_name(table_name),
self.quote_name(model._meta.db_table),
"%s",
"%s",
)
with transaction.atomic():
self.execute(sql, (modulus, remainder))
if comment:
self.set_comment_on_table(table_name, comment)
def add_default_partition(
self, model: Model, name: str, comment: Optional[str] = None
) -> None:
"""Creates a new default partition for the specified partitioned model.
A default partition is a partition where rows are routed to when
no more specific partition is a match.
Arguments:
model:
Partitioned model to create a partition for.
name:
Name to give to the new partition.
Final name will be "{table_name}_{partition_name}"
comment:
Optionally, a comment to add on this
partition table.
"""
# asserts the model is a model set up for partitioning
self._partitioning_properties_for_model(model)
table_name = self.create_partition_table_name(model, name)
sql = self.sql_add_default_partition % (
self.quote_name(table_name),
self.quote_name(model._meta.db_table),
)
with transaction.atomic():
self.execute(sql)
if comment:
self.set_comment_on_table(table_name, comment)
def delete_partition(self, model: Model, name: str) -> None:
"""Deletes the partition with the specified name."""
sql = self.sql_delete_partition % self.quote_name(
self.create_partition_table_name(model, name)
)
self.execute(sql)
def alter_db_table(
self, model: Model, old_db_table: str, new_db_table: str
) -> None:
"""Alters a table/model."""
super().alter_db_table(model, old_db_table, new_db_table)
for side_effect in self.side_effects:
side_effect.alter_db_table(model, old_db_table, new_db_table)
def add_field(self, model: Model, field: Field) -> None:
"""Adds a new field to an exisiting model."""
super().add_field(model, field)
for side_effect in self.side_effects:
side_effect.add_field(model, field)
def remove_field(self, model: Model, field: Field) -> None:
"""Removes a field from an existing model."""
for side_effect in self.side_effects:
side_effect.remove_field(model, field)
super().remove_field(model, field)
def alter_field(
self,
model: Model,
old_field: Field,
new_field: Field,
strict: bool = False,
) -> None:
"""Alters an existing field on an existing model."""
super().alter_field(model, old_field, new_field, strict)
for side_effect in self.side_effects:
side_effect.alter_field(model, old_field, new_field, strict)
def set_comment_on_table(self, table_name: str, comment: str) -> None:
"""Sets the comment on the specified table."""
sql = self.sql_table_comment % (self.quote_name(table_name), "%s")
self.execute(sql, (comment,))
def _create_view_model(self, sql: str, model: Model) -> None:
"""Creates a new view model using the specified SQL query."""
meta = self._view_properties_for_model(model)
with self.connection.cursor() as cursor:
view_sql = cursor.mogrify(*meta.query).decode("utf-8")
self.execute(sql % (self.quote_name(model._meta.db_table), view_sql))
def _extract_sql(self, method, *args):
"""Calls the specified method with the specified arguments and
intercepts the SQL statement it WOULD execute.
We use this to figure out the exact SQL statement Django would
execute. We can then make a small modification and execute it
ourselves.
"""
with mock.patch.object(self, "execute") as execute:
method(*args)
return tuple(execute.mock_calls[0])[1]
@staticmethod
def _view_properties_for_model(model: Model):
"""Gets the view options for the specified model.
Raises:
ImproperlyConfigured:
When the specified model is not set up
as a view.
"""
meta = getattr(model, "_view_meta", None)
if not meta:
raise ImproperlyConfigured(
(
"Model '%s' is not properly configured to be a view."
" Create the `ViewMeta` class as a child of '%s'."
)
% (model.__name__, model.__name__)
)
if not is_sql_with_params(meta.query):
raise ImproperlyConfigured(
(
"Model '%s' is not properly configured to be a view."
" Set the `query` and `key` attribute on the"
" `ViewMeta` class as a child of '%s'"
)
% (model.__name__, model.__name__)
)
return meta
@staticmethod
def _partitioning_properties_for_model(model: Model):
"""Gets the partitioning options for the specified model.
Raises:
ImproperlyConfigured:
When the specified model is not set up
for partitioning.
"""
meta = getattr(model, "_partitioning_meta", None)
if not meta:
raise ImproperlyConfigured(
(
"Model '%s' is not properly configured to be partitioned."
" Create the `PartitioningMeta` class as a child of '%s'."
)
% (model.__name__, model.__name__)
)
if not meta.method or not meta.key:
raise ImproperlyConfigured(
(
"Model '%s' is not properly configured to be partitioned."
" Set the `method` and `key` attributes on the"
" `PartitioningMeta` class as a child of '%s'"
)
% (model.__name__, model.__name__)
)
if meta.method not in PostgresPartitioningMethod:
raise ImproperlyConfigured(
(
"Model '%s' is not properly configured to be partitioned."
" '%s' is not a member of the PostgresPartitioningMethod enum."
)
% (model.__name__, meta.method)
)
if not isinstance(meta.key, list):
raise ImproperlyConfigured(
(
"Model '%s' is not properly configured to be partitioned."
" Partitioning key should be a list (of field names or values,"
" depending on the partitioning method)."
)
% model.__name__
)
try:
for field_name in meta.key:
model._meta.get_field(field_name)
except FieldDoesNotExist:
raise ImproperlyConfigured(
(
"Model '%s' is not properly configured to be partitioned."
" Field '%s' in partitioning key %s is not a valid field on"
" '%s'."
)
% (model.__name__, field_name, meta.key, model.__name__)
)
return meta
def create_partition_table_name(self, model: Model, name: str) -> str:
return "%s_%s" % (model._meta.db_table.lower(), name.lower())
django-postgres-extra-2.0.4/psqlextra/backend/side_effects/ 0000775 0000000 0000000 00000000000 14175513017 0024005 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/backend/side_effects/__init__.py 0000664 0000000 0000000 00000000345 14175513017 0026120 0 ustar 00root root 0000000 0000000 from .hstore_required import HStoreRequiredSchemaEditorSideEffect
from .hstore_unique import HStoreUniqueSchemaEditorSideEffect
__all__ = [
"HStoreUniqueSchemaEditorSideEffect",
"HStoreRequiredSchemaEditorSideEffect",
]
django-postgres-extra-2.0.4/psqlextra/backend/side_effects/hstore_required.py 0000664 0000000 0000000 00000013322 14175513017 0027564 0 ustar 00root root 0000000 0000000 from psqlextra.fields import HStoreField
class HStoreRequiredSchemaEditorSideEffect:
sql_hstore_required_create = (
"ALTER TABLE {table} "
"ADD CONSTRAINT {name} "
"CHECK (({field}->'{key}') "
"IS NOT NULL)"
)
sql_hstore_required_rename = (
"ALTER TABLE {table} "
"RENAME CONSTRAINT "
"{old_name} "
"TO "
"{new_name}"
)
sql_hstore_required_drop = (
"ALTER TABLE {table} " "DROP CONSTRAINT IF EXISTS {name}"
)
def create_model(self, model):
"""Ran when a new model is created."""
for field in model._meta.local_fields:
if not isinstance(field, HStoreField):
continue
self.add_field(model, field)
def delete_model(self, model):
"""Ran when a model is being deleted."""
for field in model._meta.local_fields:
if not isinstance(field, HStoreField):
continue
self.remove_field(model, field)
def alter_db_table(self, model, old_db_table, new_db_table):
"""Ran when the name of a model is changed."""
for field in model._meta.local_fields:
if not isinstance(field, HStoreField):
continue
for key in self._iterate_required_keys(field):
self._rename_hstore_required(
old_db_table, new_db_table, field, field, key
)
def add_field(self, model, field):
"""Ran when a field is added to a model."""
for key in self._iterate_required_keys(field):
self._create_hstore_required(model._meta.db_table, field, key)
def remove_field(self, model, field):
"""Ran when a field is removed from a model."""
for key in self._iterate_required_keys(field):
self._drop_hstore_required(model._meta.db_table, field, key)
def alter_field(self, model, old_field, new_field, strict=False):
"""Ran when the configuration on a field changed."""
is_old_field_hstore = isinstance(old_field, HStoreField)
is_new_field_hstore = isinstance(new_field, HStoreField)
if not is_old_field_hstore and not is_new_field_hstore:
return
old_required = getattr(old_field, "required", []) or []
new_required = getattr(new_field, "required", []) or []
# handle field renames before moving on
if str(old_field.column) != str(new_field.column):
for key in self._iterate_required_keys(old_field):
self._rename_hstore_required(
model._meta.db_table,
model._meta.db_table,
old_field,
new_field,
key,
)
# drop the constraints for keys that have been removed
for key in old_required:
if key not in new_required:
self._drop_hstore_required(model._meta.db_table, old_field, key)
# create new constraints for keys that have been added
for key in new_required:
if key not in old_required:
self._create_hstore_required(
model._meta.db_table, new_field, key
)
def _create_hstore_required(self, table_name, field, key):
"""Creates a REQUIRED CONSTRAINT for the specified hstore key."""
name = self._required_constraint_name(table_name, field, key)
sql = self.sql_hstore_required_create.format(
name=self.quote_name(name),
table=self.quote_name(table_name),
field=self.quote_name(field.column),
key=key,
)
self.execute(sql)
def _rename_hstore_required(
self, old_table_name, new_table_name, old_field, new_field, key
):
"""Renames an existing REQUIRED CONSTRAINT for the specified hstore
key."""
old_name = self._required_constraint_name(
old_table_name, old_field, key
)
new_name = self._required_constraint_name(
new_table_name, new_field, key
)
sql = self.sql_hstore_required_rename.format(
table=self.quote_name(new_table_name),
old_name=self.quote_name(old_name),
new_name=self.quote_name(new_name),
)
self.execute(sql)
def _drop_hstore_required(self, table_name, field, key):
"""Drops a REQUIRED CONSTRAINT for the specified hstore key."""
name = self._required_constraint_name(table_name, field, key)
sql = self.sql_hstore_required_drop.format(
table=self.quote_name(table_name), name=self.quote_name(name)
)
self.execute(sql)
@staticmethod
def _required_constraint_name(table: str, field, key):
"""Gets the name for a CONSTRAINT that applies to a single hstore key.
Arguments:
table:
The name of the table the field is
a part of.
field:
The hstore field to create a
UNIQUE INDEX for.
key:
The name of the hstore key
to create the name for.
Returns:
The name for the UNIQUE index.
"""
return "{table}_{field}_required_{postfix}".format(
table=table, field=field.column, postfix=key
)
@staticmethod
def _iterate_required_keys(field):
"""Iterates over the keys marked as "required" in the specified field.
Arguments:
field:
The field of which key's to
iterate over.
"""
required_keys = getattr(field, "required", None)
if not required_keys:
return
for key in required_keys:
yield key
django-postgres-extra-2.0.4/psqlextra/backend/side_effects/hstore_unique.py 0000664 0000000 0000000 00000013613 14175513017 0027255 0 ustar 00root root 0000000 0000000 from psqlextra.fields import HStoreField
class HStoreUniqueSchemaEditorSideEffect:
sql_hstore_unique_create = (
"CREATE UNIQUE INDEX IF NOT EXISTS " "{name} ON {table} " "({columns})"
)
sql_hstore_unique_rename = (
"ALTER INDEX " "{old_name} " "RENAME TO " "{new_name}"
)
sql_hstore_unique_drop = "DROP INDEX IF EXISTS {name}"
def create_model(self, model):
"""Ran when a new model is created."""
for field in model._meta.local_fields:
if not isinstance(field, HStoreField):
continue
self.add_field(model, field)
def delete_model(self, model):
"""Ran when a model is being deleted."""
for field in model._meta.local_fields:
if not isinstance(field, HStoreField):
continue
self.remove_field(model, field)
def alter_db_table(self, model, old_db_table, new_db_table):
"""Ran when the name of a model is changed."""
for field in model._meta.local_fields:
if not isinstance(field, HStoreField):
continue
for keys in self._iterate_uniqueness_keys(field):
self._rename_hstore_unique(
old_db_table, new_db_table, field, field, keys
)
def add_field(self, model, field):
"""Ran when a field is added to a model."""
for keys in self._iterate_uniqueness_keys(field):
self._create_hstore_unique(model, field, keys)
def remove_field(self, model, field):
"""Ran when a field is removed from a model."""
for keys in self._iterate_uniqueness_keys(field):
self._drop_hstore_unique(model, field, keys)
def alter_field(self, model, old_field, new_field, strict=False):
"""Ran when the configuration on a field changed."""
is_old_field_hstore = isinstance(old_field, HStoreField)
is_new_field_hstore = isinstance(new_field, HStoreField)
if not is_old_field_hstore and not is_new_field_hstore:
return
old_uniqueness = getattr(old_field, "uniqueness", []) or []
new_uniqueness = getattr(new_field, "uniqueness", []) or []
# handle field renames before moving on
if str(old_field.column) != str(new_field.column):
for keys in self._iterate_uniqueness_keys(old_field):
self._rename_hstore_unique(
model._meta.db_table,
model._meta.db_table,
old_field,
new_field,
keys,
)
# drop the indexes for keys that have been removed
for keys in old_uniqueness:
if keys not in new_uniqueness:
self._drop_hstore_unique(
model, old_field, self._compose_keys(keys)
)
# create new indexes for keys that have been added
for keys in new_uniqueness:
if keys not in old_uniqueness:
self._create_hstore_unique(
model, new_field, self._compose_keys(keys)
)
def _create_hstore_unique(self, model, field, keys):
"""Creates a UNIQUE constraint for the specified hstore keys."""
name = self._unique_constraint_name(model._meta.db_table, field, keys)
columns = ["(%s->'%s')" % (field.column, key) for key in keys]
sql = self.sql_hstore_unique_create.format(
name=self.quote_name(name),
table=self.quote_name(model._meta.db_table),
columns=",".join(columns),
)
self.execute(sql)
def _rename_hstore_unique(
self, old_table_name, new_table_name, old_field, new_field, keys
):
"""Renames an existing UNIQUE constraint for the specified hstore
keys."""
old_name = self._unique_constraint_name(old_table_name, old_field, keys)
new_name = self._unique_constraint_name(new_table_name, new_field, keys)
sql = self.sql_hstore_unique_rename.format(
old_name=self.quote_name(old_name),
new_name=self.quote_name(new_name),
)
self.execute(sql)
def _drop_hstore_unique(self, model, field, keys):
"""Drops a UNIQUE constraint for the specified hstore keys."""
name = self._unique_constraint_name(model._meta.db_table, field, keys)
sql = self.sql_hstore_unique_drop.format(name=self.quote_name(name))
self.execute(sql)
@staticmethod
def _unique_constraint_name(table: str, field, keys):
"""Gets the name for a UNIQUE INDEX that applies to one or more keys in
a hstore field.
Arguments:
table:
The name of the table the field is
a part of.
field:
The hstore field to create a
UNIQUE INDEX for.
key:
The name of the hstore key
to create the name for.
This can also be a tuple
of multiple names.
Returns:
The name for the UNIQUE index.
"""
postfix = "_".join(keys)
return "{table}_{field}_unique_{postfix}".format(
table=table, field=field.column, postfix=postfix
)
def _iterate_uniqueness_keys(self, field):
"""Iterates over the keys marked as "unique" in the specified field.
Arguments:
field:
The field of which key's to
iterate over.
"""
uniqueness = getattr(field, "uniqueness", None)
if not uniqueness:
return
for keys in uniqueness:
composed_keys = self._compose_keys(keys)
yield composed_keys
@staticmethod
def _compose_keys(constraint):
"""Turns a string into a list of string or returns it as a list."""
if isinstance(constraint, str):
return [constraint]
return constraint
django-postgres-extra-2.0.4/psqlextra/compiler.py 0000664 0000000 0000000 00000027371 14175513017 0022171 0 ustar 00root root 0000000 0000000 from collections.abc import Iterable
from typing import Tuple, Union
import django
from django.core.exceptions import SuspiciousOperation
from django.db.models import Expression, Model, Q
from django.db.models.fields.related import RelatedField
from django.db.models.sql.compiler import SQLInsertCompiler, SQLUpdateCompiler
from django.db.utils import ProgrammingError
from .expressions import HStoreValue
from .types import ConflictAction
class PostgresUpdateCompiler(SQLUpdateCompiler):
"""Compiler for SQL UPDATE statements that allows us to use expressions
inside HStore values.
Like:
.update(name=dict(en=F('test')))
"""
def as_sql(self):
self._prepare_query_values()
return super().as_sql()
def _prepare_query_values(self):
"""Extra prep on query values by converting dictionaries into.
:see:HStoreValue expressions.
This allows putting expressions in a dictionary. The
:see:HStoreValue will take care of resolving the expressions
inside the dictionary.
"""
if not self.query.values:
return
new_query_values = []
for field, model, val in self.query.values:
if not isinstance(val, dict):
new_query_values.append((field, model, val))
continue
if not self._does_dict_contain_expression(val):
new_query_values.append((field, model, val))
continue
expression = HStoreValue(dict(val))
new_query_values.append((field, model, expression))
self.query.values = new_query_values
@staticmethod
def _does_dict_contain_expression(data: dict) -> bool:
"""Gets whether the specified dictionary contains any expressions that
need to be resolved."""
for value in data.values():
if hasattr(value, "resolve_expression"):
return True
if hasattr(value, "as_sql"):
return True
return False
class PostgresInsertCompiler(SQLInsertCompiler):
"""Compiler for SQL INSERT statements."""
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:PostgresInsertCompiler."""
super().__init__(*args, **kwargs)
self.qn = self.connection.ops.quote_name
def as_sql(self, return_id=False):
"""Builds the SQL INSERT statement."""
queries = [
self._rewrite_insert(sql, params, return_id)
for sql, params in super().as_sql()
]
return queries
def execute_sql(self, return_id=False):
# execute all the generate queries
with self.connection.cursor() as cursor:
rows = []
for sql, params in self.as_sql(return_id):
cursor.execute(sql, params)
try:
rows.extend(cursor.fetchall())
except ProgrammingError:
pass
# create a mapping between column names and column value
return [
{
column.name: row[column_index]
for column_index, column in enumerate(cursor.description)
if row
}
for row in rows
]
def _rewrite_insert(self, sql, params, return_id=False):
"""Rewrites a formed SQL INSERT query to include the ON CONFLICT
clause.
Arguments:
sql:
The SQL INSERT query to rewrite.
params:
The parameters passed to the query.
returning:
What to put in the `RETURNING` clause
of the resulting query.
Returns:
A tuple of the rewritten SQL query and new params.
"""
returning = (
self.qn(self.query.model._meta.pk.attname) if return_id else "*"
)
return self._rewrite_insert_on_conflict(
sql, params, self.query.conflict_action.value, returning
)
def _rewrite_insert_on_conflict(
self, sql, params, conflict_action: ConflictAction, returning
):
"""Rewrites a normal SQL INSERT query to add the 'ON CONFLICT'
clause."""
update_columns = ", ".join(
[
"{0} = EXCLUDED.{0}".format(self.qn(field.column))
for field in self.query.update_fields
]
)
# build the conflict target, the columns to watch
# for conflicts
conflict_target = self._build_conflict_target()
index_predicate = self.query.index_predicate
update_condition = self.query.conflict_update_condition
rewritten_sql = f"{sql} ON CONFLICT {conflict_target}"
if index_predicate:
expr_sql, expr_params = self._compile_expression(index_predicate)
rewritten_sql += f" WHERE {expr_sql}"
params += tuple(expr_params)
rewritten_sql += f" DO {conflict_action}"
if conflict_action == "UPDATE":
rewritten_sql += f" SET {update_columns}"
if update_condition:
expr_sql, expr_params = self._compile_expression(
update_condition
)
rewritten_sql += f" WHERE {expr_sql}"
params += tuple(expr_params)
rewritten_sql += f" RETURNING {returning}"
return (rewritten_sql, params)
def _build_conflict_target(self):
"""Builds the `conflict_target` for the ON CONFLICT clause."""
if not isinstance(self.query.conflict_target, Iterable):
raise SuspiciousOperation(
(
"%s is not a valid conflict target, specify "
"a list of column names, or tuples with column "
"names and hstore key."
)
% str(self.query.conflict_target)
)
conflict_target = self._build_conflict_target_by_index()
if conflict_target:
return conflict_target
return self._build_conflict_target_by_fields()
def _build_conflict_target_by_fields(self):
"""Builds the `conflict_target` for the ON CONFLICT clauses by matching
the fields specified in the specified conflict target against the
model's fields.
This requires some special handling because the fields names
might not be same as the column names.
"""
conflict_target = []
for field_name in self.query.conflict_target:
self._assert_valid_field(field_name)
# special handling for hstore keys
if isinstance(field_name, tuple):
conflict_target.append(
"(%s->'%s')"
% (self._format_field_name(field_name), field_name[1])
)
else:
conflict_target.append(self._format_field_name(field_name))
return "(%s)" % ",".join(conflict_target)
def _build_conflict_target_by_index(self):
"""Builds the `conflict_target` for the ON CONFLICT clause by trying to
find an index that matches the specified conflict target on the query.
Conflict targets must match some unique constraint, usually this
is a `UNIQUE INDEX`.
"""
matching_index = next(
(
index
for index in self.query.model._meta.indexes
if list(index.fields) == list(self.query.conflict_target)
),
None,
)
if not matching_index:
return None
with self.connection.schema_editor() as schema_editor:
stmt = matching_index.create_sql(self.query.model, schema_editor)
return "(%s)" % stmt.parts["columns"]
def _get_model_field(self, name: str):
"""Gets the field on a model with the specified name.
Arguments:
name:
The name of the field to look for.
This can be both the actual field name, or
the name of the column, both will work :)
Returns:
The field with the specified name or None if
no such field exists.
"""
field_name = self._normalize_field_name(name)
# 'pk' has special meaning and always refers to the primary
# key of a model, we have to respect this de-facto standard behaviour
if field_name == "pk" and self.query.model._meta.pk:
return self.query.model._meta.pk
for field in self.query.model._meta.local_concrete_fields:
if field.name == field_name or field.column == field_name:
return field
return None
def _format_field_name(self, field_name) -> str:
"""Formats a field's name for usage in SQL.
Arguments:
field_name:
The field name to format.
Returns:
The specified field name formatted for
usage in SQL.
"""
field = self._get_model_field(field_name)
return self.qn(field.column)
def _format_field_value(self, field_name) -> str:
"""Formats a field's value for usage in SQL.
Arguments:
field_name:
The name of the field to format
the value of.
Returns:
The field's value formatted for usage
in SQL.
"""
field_name = self._normalize_field_name(field_name)
field = self._get_model_field(field_name)
value = getattr(self.query.objs[0], field.attname)
if isinstance(field, RelatedField) and isinstance(value, Model):
value = value.pk
return SQLInsertCompiler.prepare_value(
self,
field,
# Note: this deliberately doesn't use `pre_save_val` as we don't
# want things like auto_now on DateTimeField (etc.) to change the
# value. We rely on pre_save having already been done by the
# underlying compiler so that things like FileField have already had
# the opportunity to save out their data.
value,
)
def _compile_expression(
self, expression: Union[Expression, Q, str]
) -> Tuple[str, Union[tuple, list]]:
"""Compiles an expression, Q object or raw SQL string into SQL and
tuple of parameters."""
if isinstance(expression, Q):
if django.VERSION < (3, 1):
raise SuspiciousOperation(
"Q objects in psqlextra can only be used with Django 3.1 and newer"
)
return self.query.build_where(expression).as_sql(
self, self.connection
)
elif isinstance(expression, Expression):
return self.compile(expression)
return expression, tuple()
def _assert_valid_field(self, field_name: str):
"""Asserts that a field with the specified name exists on the model and
raises :see:SuspiciousOperation if it does not."""
field_name = self._normalize_field_name(field_name)
if self._get_model_field(field_name):
return
raise SuspiciousOperation(
(
"%s is not a valid conflict target, specify "
"a list of column names, or tuples with column "
"names and hstore key."
)
% str(field_name)
)
@staticmethod
def _normalize_field_name(field_name: str) -> str:
"""Normalizes a field name into a string by extracting the field name
if it was specified as a reference to a HStore key (as a tuple).
Arguments:
field_name:
The field name to normalize.
Returns:
The normalized field name.
"""
if isinstance(field_name, tuple):
field_name, _ = field_name
return field_name
django-postgres-extra-2.0.4/psqlextra/expressions.py 0000664 0000000 0000000 00000014151 14175513017 0022731 0 ustar 00root root 0000000 0000000 from django.db.models import CharField, expressions
class HStoreValue(expressions.Expression):
"""Represents a HStore value.
The base PostgreSQL implementation Django provides, always
represents HStore values as dictionaries, but this doesn't work if
you want to use expressions inside hstore values.
"""
def __init__(self, value):
"""Initializes a new instance."""
self.value = value
def resolve_expression(self, *args, **kwargs):
"""Resolves expressions inside the dictionary."""
result = dict()
for key, value in self.value.items():
if hasattr(value, "resolve_expression"):
result[key] = value.resolve_expression(*args, **kwargs)
else:
result[key] = value
return HStoreValue(result)
def as_sql(self, compiler, connection):
"""Compiles the HStore value into SQL.
Compiles expressions contained in the values
of HStore entries as well.
Given a dictionary like:
dict(key1='val1', key2='val2')
The resulting SQL will be:
hstore(hstore('key1', 'val1'), hstore('key2', 'val2'))
"""
sql = []
params = []
for key, value in self.value.items():
if hasattr(value, "as_sql"):
inner_sql, inner_params = value.as_sql(compiler, connection)
sql.append(f"hstore(%s, {inner_sql})")
params.append(key)
params.extend(inner_params)
elif value is not None:
sql.append("hstore(%s, %s)")
params.append(key)
params.append(str(value))
else:
sql.append("hstore(%s, NULL)")
params.append(key)
return " || ".join(sql), params
class HStoreColumn(expressions.Col):
"""HStoreColumn expression.
Generates expressions like:
[db table].[column]->'[hstore key]'
"""
contains_column_references = True
def __init__(self, alias, target, hstore_key):
"""Initializes a new instance of :see:HStoreColumn.
Arguments:
alias:
The table name.
target:
The field instance.
hstore_key
The name of the hstore key to include
in the epxression.
"""
super().__init__(alias, target, output_field=target)
self.alias, self.target, self.hstore_key = alias, target, hstore_key
def __repr__(self):
"""Gets a textual representation of this expresion."""
return "{}({}, {}->'{}')".format(
self.__class__.__name__, self.alias, self.target, self.hstore_key
)
def as_sql(self, compiler, connection):
"""Compiles this expression into SQL."""
qn = compiler.quote_name_unless_alias
return (
"%s.%s->'%s'"
% (qn(self.alias), qn(self.target.column), self.hstore_key),
[],
)
def relabeled_clone(self, relabels):
"""Gets a re-labeled clone of this expression."""
return self.__class__(
relabels.get(self.alias, self.alias),
self.target,
self.hstore_key,
self.output_field,
)
class HStoreRef(expressions.F):
"""Inline reference to a HStore key.
Allows selecting individual keys in annotations.
"""
def __init__(self, name: str, key: str):
"""Initializes a new instance of :see:HStoreRef.
Arguments:
name:
The name of the column/field to resolve.
key:
The name of the HStore key to select.
"""
super().__init__(name)
self.key = key
def resolve_expression(self, *args, **kwargs):
"""Resolves the expression into a :see:HStoreColumn expression."""
original_expression: expressions.Col = super().resolve_expression(
*args, **kwargs
)
expression = HStoreColumn(
original_expression.alias, original_expression.target, self.key
)
return expression
class DateTimeEpochColumn(expressions.Col):
"""Gets the date/time column as a UNIX epoch timestamp."""
contains_column_references = True
def as_sql(self, compiler, connection):
"""Compiles this expression into SQL."""
sql, params = super().as_sql(compiler, connection)
return "EXTRACT(epoch FROM {})".format(sql), params
def get_group_by_cols(self):
return []
class DateTimeEpoch(expressions.F):
"""Gets the date/time column as a UNIX epoch timestamp."""
contains_aggregate = False
def resolve_expression(self, *args, **kwargs):
original_expression = super().resolve_expression(*args, **kwargs)
expression = DateTimeEpochColumn(
original_expression.alias, original_expression.target
)
return expression
def IsNotNone(*fields, default=None):
"""Selects whichever field is not None, in the specified order.
Arguments:
fields:
The fields to attempt to get a value from,
in order.
default:
The value to return in case all values are None.
Returns:
A Case-When expression that tries each field and
returns the specified default value when all of
them are None.
"""
when_clauses = [
expressions.When(
~expressions.Q(**{field: None}), then=expressions.F(field)
)
for field in reversed(fields)
]
return expressions.Case(
*when_clauses,
default=expressions.Value(default),
output_field=CharField(),
)
class ExcludedCol(expressions.Expression):
"""References a column in PostgreSQL's special EXCLUDED column, which is
used in upserts to refer to the data about to be inserted/updated.
See: https://www.postgresql.org/docs/9.5/sql-insert.html#SQL-ON-CONFLICT
"""
def __init__(self, name: str):
self.name = name
def as_sql(self, compiler, connection):
quoted_name = connection.ops.quote_name(self.name)
return f"EXCLUDED.{quoted_name}", tuple()
django-postgres-extra-2.0.4/psqlextra/fields/ 0000775 0000000 0000000 00000000000 14175513017 0021241 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/fields/__init__.py 0000664 0000000 0000000 00000000101 14175513017 0023342 0 ustar 00root root 0000000 0000000 from .hstore_field import HStoreField
__all__ = ["HStoreField"]
django-postgres-extra-2.0.4/psqlextra/fields/hstore_field.py 0000664 0000000 0000000 00000004424 14175513017 0024266 0 ustar 00root root 0000000 0000000 from typing import List, Optional, Tuple, Union
from django.contrib.postgres.fields import HStoreField as DjangoHStoreField
from django.db.models.expressions import Expression
from django.db.models.fields import Field
class HStoreField(DjangoHStoreField):
"""Improved version of Django's :see:HStoreField that adds support for
database-level constraints.
Notes:
- For the implementation of uniqueness, see the
custom database back-end.
"""
def __init__(
self,
*args,
uniqueness: Optional[List[Union[str, Tuple[str, ...]]]] = None,
required: Optional[List[str]] = None,
**kwargs
):
"""Initializes a new instance of :see:HStoreField.
Arguments:
uniqueness:
List of keys to enforce as unique. Use tuples
to enforce multiple keys together to be unique.
required:
List of keys that should be enforced as required.
"""
super(HStoreField, self).__init__(*args, **kwargs)
self.uniqueness = uniqueness
self.required = required
def get_prep_value(self, value):
"""Override the base class so it doesn't cast all values to strings.
psqlextra supports expressions in hstore fields, so casting all
values to strings is a bad idea.
"""
value = Field.get_prep_value(self, value)
if isinstance(value, dict):
prep_value = {}
for key, val in value.items():
if isinstance(val, Expression):
prep_value[key] = val
elif val is not None:
prep_value[key] = str(val)
else:
prep_value[key] = val
value = prep_value
if isinstance(value, list):
value = [str(item) for item in value]
return value
def deconstruct(self):
"""Gets the values to pass to :see:__init__ when re-creating this
object."""
name, path, args, kwargs = super(HStoreField, self).deconstruct()
if self.uniqueness is not None:
kwargs["uniqueness"] = self.uniqueness
if self.required is not None:
kwargs["required"] = self.required
return name, path, args, kwargs
django-postgres-extra-2.0.4/psqlextra/indexes/ 0000775 0000000 0000000 00000000000 14175513017 0021432 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/indexes/__init__.py 0000664 0000000 0000000 00000000413 14175513017 0023541 0 ustar 00root root 0000000 0000000 from .case_insensitive_unique_index import CaseInsensitiveUniqueIndex
from .conditional_unique_index import ConditionalUniqueIndex
from .unique_index import UniqueIndex
__all__ = [
"UniqueIndex",
"ConditionalUniqueIndex",
"CaseInsensitiveUniqueIndex",
]
django-postgres-extra-2.0.4/psqlextra/indexes/case_insensitive_unique_index.py 0000664 0000000 0000000 00000002534 14175513017 0030120 0 ustar 00root root 0000000 0000000 from django.db.models.indexes import Index
class CaseInsensitiveUniqueIndex(Index):
sql_create_unique_index = (
"CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"
)
def create_sql(self, model, schema_editor, using="", **kwargs):
statement = super().create_sql(model, schema_editor, using)
statement.template = self.sql_create_unique_index
column_collection = statement.parts["columns"]
statement.parts["columns"] = ", ".join(
[
"LOWER(%s)" % self._quote_column(column_collection, column, idx)
for idx, column in enumerate(column_collection.columns)
]
)
return statement
def deconstruct(self):
"""Serializes the :see:CaseInsensitiveUniqueIndex for the migrations
file."""
_, args, kwargs = super().deconstruct()
path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
path = path.replace("django.db.models.indexes", "django.db.models")
return path, args, kwargs
@staticmethod
def _quote_column(column_collection, column, idx):
quoted_name = column_collection.quote_name(column)
try:
return quoted_name + column_collection.col_suffixes[idx]
except IndexError:
return column_collection.quote_name(column)
django-postgres-extra-2.0.4/psqlextra/indexes/conditional_unique_index.py 0000664 0000000 0000000 00000004103 14175513017 0027062 0 ustar 00root root 0000000 0000000 import django
from django.db.models.indexes import Index
class ConditionalUniqueIndex(Index):
"""Creates a partial unique index based on a given condition.
Useful, for example, if you need unique combination of foreign keys, but you might want to include
NULL as a valid value. In that case, you can just use:
>>> class Meta:
>>> indexes = [
>>> ConditionalUniqueIndex(fields=['a', 'b', 'c'], condition='"c" IS NOT NULL'),
>>> ConditionalUniqueIndex(fields=['a', 'b'], condition='"c" IS NULL')
>>> ]
"""
sql_create_index = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s WHERE %(condition)s"
def __init__(self, condition: str, fields=[], name=None):
"""Initializes a new instance of :see:ConditionalUniqueIndex."""
super().__init__(fields=fields, name=name)
self._condition = condition
def create_sql(self, model, schema_editor, using="", **kwargs):
"""Creates the actual SQL used when applying the migration."""
if django.VERSION >= (2, 0):
statement = super().create_sql(model, schema_editor, using)
statement.template = self.sql_create_index
statement.parts["condition"] = self._condition
return statement
else:
sql_create_index = self.sql_create_index
sql_parameters = {
**Index.get_sql_create_template_values(
self, model, schema_editor, using
),
"condition": self._condition,
}
return sql_create_index % sql_parameters
def deconstruct(self):
"""Serializes the :see:ConditionalUniqueIndex for the migrations
file."""
path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
path = path.replace("django.db.models.indexes", "django.db.models")
return (
path,
(),
{
"fields": self.fields,
"name": self.name,
"condition": self._condition,
},
)
django-postgres-extra-2.0.4/psqlextra/indexes/unique_index.py 0000664 0000000 0000000 00000001033 14175513017 0024476 0 ustar 00root root 0000000 0000000 import django
from django.db.models.indexes import Index
class UniqueIndex(Index):
def create_sql(self, *args, **kwargs):
if django.VERSION >= (2, 0):
statement = super().create_sql(*args, **kwargs)
statement.template = self._rewrite_sql(statement.template)
return statement
sql = super().create_sql(*args, **kwargs)
return self._rewrite_sql(sql)
@staticmethod
def _rewrite_sql(sql: str) -> str:
return sql.replace("CREATE INDEX", "CREATE UNIQUE INDEX")
django-postgres-extra-2.0.4/psqlextra/management/ 0000775 0000000 0000000 00000000000 14175513017 0022107 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/management/__init__.py 0000664 0000000 0000000 00000000000 14175513017 0024206 0 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/management/commands/ 0000775 0000000 0000000 00000000000 14175513017 0023710 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/management/commands/__init__.py 0000664 0000000 0000000 00000000000 14175513017 0026007 0 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/management/commands/pgmakemigrations.py 0000664 0000000 0000000 00000000603 14175513017 0027622 0 ustar 00root root 0000000 0000000 from django.core.management.commands import makemigrations
from psqlextra.backend.migrations import postgres_patched_migrations
class Command(makemigrations.Command):
help = "Creates new PostgreSQL specific migration(s) for apps."
def handle(self, *app_labels, **options):
with postgres_patched_migrations():
return super().handle(*app_labels, **options)
django-postgres-extra-2.0.4/psqlextra/management/commands/pgpartition.py 0000664 0000000 0000000 00000007272 14175513017 0026632 0 ustar 00root root 0000000 0000000 import sys
from typing import Optional
import colorama
from ansimarkup import ansiprint, ansistring
from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils.module_loading import import_string
from psqlextra.partitioning import PostgresPartitioningError
class Command(BaseCommand):
"""Create new partitions and delete old ones according to the configured
partitioning strategies."""
help = "Create new partitions and delete old ones using the configured partitioning manager. The PSQLEXTRA_PARTITIONING_MANAGER setting must be configured."
def add_arguments(self, parser):
parser.add_argument(
"--dry",
"-d",
action="store_true",
help="When specified, no partition will be created/deleted. Just a simulation.",
required=False,
default=False,
)
parser.add_argument(
"--yes",
"-y",
action="store_true",
help="Answer yes to all questions. WARNING: You will not be asked before deleting a partition.",
required=False,
default=False,
)
parser.add_argument(
"--using",
"-u",
help="Name of the database connection to use.",
default="default",
)
parser.add_argument(
"--skip-create",
action="store_true",
help="Do not create partitions.",
required=False,
default=False,
)
parser.add_argument(
"--skip-delete",
action="store_true",
help="Do not delete partitions.",
required=False,
default=False,
)
def handle(
self,
dry: bool,
yes: bool,
using: Optional[str],
skip_create: bool,
skip_delete: bool,
*args,
**kwargs,
):
# disable coloring if no terminal is attached
if not sys.stdout.isatty():
colorama.init(strip=True)
partitioning_manager = self._partitioning_manager()
plan = partitioning_manager.plan(
skip_create=skip_create, skip_delete=skip_delete, using=using
)
creations_count = len(plan.creations)
deletions_count = len(plan.deletions)
if creations_count == 0 and deletions_count == 0:
ansiprint("Nothing to be done.")
return
plan.print()
if dry:
return
if not yes:
sys.stdout.write(
ansistring(
"Do you want to proceed? (y/N) "
)
)
if not self._ask_for_confirmation():
ansiprint("Operation aborted.")
return
plan.apply(using=using)
ansiprint("Operations applied.")
@staticmethod
def _ask_for_confirmation() -> bool:
answer = input("").lower()
if not answer:
return False
if answer[0] == "y" or answer == "yes":
return True
return False
@staticmethod
def _partitioning_manager():
partitioning_manager = getattr(
settings, "PSQLEXTRA_PARTITIONING_MANAGER"
)
if not partitioning_manager:
raise PostgresPartitioningError(
"You must configure the PSQLEXTRA_PARTITIONING_MANAGER setting "
"for automatic partitioning to work."
)
if isinstance(partitioning_manager, str):
partitioning_manager = import_string(partitioning_manager)
return partitioning_manager
django-postgres-extra-2.0.4/psqlextra/management/commands/pgrefreshmv.py 0000664 0000000 0000000 00000003024 14175513017 0026611 0 ustar 00root root 0000000 0000000 from django.apps import apps
from django.core.management.base import BaseCommand
from django.db.utils import NotSupportedError, OperationalError
from psqlextra.models import PostgresMaterializedViewModel
class Command(BaseCommand):
"""Refreshes a :see:PostgresMaterializedViewModel."""
help = "Refreshes the specified materialized view."
def add_arguments(self, parser):
parser.add_argument(
"app_label",
type=str,
help="Label of the app the materialized view model is in.",
)
parser.add_argument(
"model_name",
type=str,
help="Name of the materialized view model to refresh.",
)
parser.add_argument(
"--concurrently",
"-c",
action="store_true",
help="Whether to refresh the materialized view model concurrently.",
required=False,
default=False,
)
def handle(self, *app_labels, **options):
app_label = options.get("app_label")
model_name = options.get("model_name")
concurrently = options.get("concurrently")
model = apps.get_model(app_label, model_name)
if not model:
raise OperationalError(f"Cannot find a model named '{model_name}'")
if not issubclass(model, PostgresMaterializedViewModel):
raise NotSupportedError(
f"Model {model.__name__} is not a `PostgresMaterializedViewModel`"
)
model.refresh(concurrently=concurrently)
django-postgres-extra-2.0.4/psqlextra/manager/ 0000775 0000000 0000000 00000000000 14175513017 0021405 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/manager/__init__.py 0000664 0000000 0000000 00000000405 14175513017 0023515 0 ustar 00root root 0000000 0000000 # this should not be here, but there are users depending
# on this being here, so let's leave it here so we don't
# break them
from psqlextra.query import PostgresQuerySet
from .manager import PostgresManager
__all__ = ["PostgresManager", "PostgresQuerySet"]
django-postgres-extra-2.0.4/psqlextra/manager/manager.py 0000664 0000000 0000000 00000003777 14175513017 0023407 0 ustar 00root root 0000000 0000000 from typing import Optional
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import connections
from django.db.models import Manager
from psqlextra.query import PostgresQuerySet
class PostgresManager(Manager.from_queryset(PostgresQuerySet)):
"""Adds support for PostgreSQL specifics."""
use_in_migrations = True
def __init__(self, *args, **kwargs):
"""Initializes a new instance of :see:PostgresManager."""
super().__init__(*args, **kwargs)
# make sure our back-end is set in at least one db and refuse to proceed
has_psqlextra_backend = any(
[
db_settings
for db_settings in settings.DATABASES.values()
if "psqlextra" in db_settings["ENGINE"]
]
)
if not has_psqlextra_backend:
raise ImproperlyConfigured(
(
"Could not locate the 'psqlextra.backend'. "
"django-postgres-extra cannot function without "
"the 'psqlextra.backend'. Set DATABASES.ENGINE."
)
)
def truncate(
self, cascade: bool = False, using: Optional[str] = None
) -> None:
"""Truncates this model/table using the TRUNCATE statement.
This DELETES ALL ROWS. No signals will be fired.
See: https://www.postgresql.org/docs/9.1/sql-truncate.html
Arguments:
cascade:
Whether to delete dependent rows. If set to
False, an error will be raised if there
are rows in other tables referencing
the rows you're trying to delete.
"""
connection = connections[using or "default"]
table_name = connection.ops.quote_name(self.model._meta.db_table)
with connection.cursor() as cursor:
sql = "TRUNCATE TABLE %s" % table_name
if cascade:
sql += " CASCADE"
cursor.execute(sql)
django-postgres-extra-2.0.4/psqlextra/models/ 0000775 0000000 0000000 00000000000 14175513017 0021256 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/models/__init__.py 0000664 0000000 0000000 00000000427 14175513017 0023372 0 ustar 00root root 0000000 0000000 from .base import PostgresModel
from .partitioned import PostgresPartitionedModel
from .view import PostgresMaterializedViewModel, PostgresViewModel
__all__ = [
"PostgresModel",
"PostgresViewModel",
"PostgresMaterializedViewModel",
"PostgresPartitionedModel",
]
django-postgres-extra-2.0.4/psqlextra/models/base.py 0000664 0000000 0000000 00000000460 14175513017 0022542 0 ustar 00root root 0000000 0000000 from django.db import models
from psqlextra.manager import PostgresManager
class PostgresModel(models.Model):
"""Base class for for taking advantage of PostgreSQL specific features."""
class Meta:
abstract = True
base_manager_name = "objects"
objects = PostgresManager()
django-postgres-extra-2.0.4/psqlextra/models/options.py 0000664 0000000 0000000 00000001722 14175513017 0023325 0 ustar 00root root 0000000 0000000 from typing import Dict, List, Optional, Union
from psqlextra.types import PostgresPartitioningMethod, SQLWithParams
class PostgresPartitionedModelOptions:
"""Container for :see:PostgresPartitionedModel options.
This is where attributes copied from the model's `PartitioningMeta`
are held.
"""
def __init__(self, method: PostgresPartitioningMethod, key: List[str]):
self.method = method
self.key = key
self.original_attrs: Dict[
str, Union[PostgresPartitioningMethod, List[str]]
] = dict(method=method, key=key)
class PostgresViewOptions:
"""Container for :see:PostgresView and :see:PostgresMaterializedView
options.
This is where attributes copied from the model's `ViewMeta` are
held.
"""
def __init__(self, query: Optional[SQLWithParams]):
self.query = query
self.original_attrs: Dict[str, Optional[SQLWithParams]] = dict(
query=self.query
)
django-postgres-extra-2.0.4/psqlextra/models/partitioned.py 0000664 0000000 0000000 00000002536 14175513017 0024160 0 ustar 00root root 0000000 0000000 from django.db.models.base import ModelBase
from psqlextra.types import PostgresPartitioningMethod
from .base import PostgresModel
from .options import PostgresPartitionedModelOptions
class PostgresPartitionedModelMeta(ModelBase):
"""Custom meta class for :see:PostgresPartitionedModel.
This meta class extracts attributes from the inner
`PartitioningMeta` class and copies it onto a `_partitioning_meta`
attribute. This is similar to how Django's `_meta` works.
"""
default_method = PostgresPartitioningMethod.RANGE
default_key = []
def __new__(cls, name, bases, attrs, **kwargs):
new_class = super().__new__(cls, name, bases, attrs, **kwargs)
meta_class = attrs.pop("PartitioningMeta", None)
method = getattr(meta_class, "method", None)
key = getattr(meta_class, "key", None)
patitioning_meta = PostgresPartitionedModelOptions(
method=method or cls.default_method, key=key or cls.default_key
)
new_class.add_to_class("_partitioning_meta", patitioning_meta)
return new_class
class PostgresPartitionedModel(
PostgresModel, metaclass=PostgresPartitionedModelMeta
):
"""Base class for taking advantage of PostgreSQL's 11.x native support for
table partitioning."""
class Meta:
abstract = True
base_manager_name = "objects"
django-postgres-extra-2.0.4/psqlextra/models/view.py 0000664 0000000 0000000 00000010547 14175513017 0022611 0 ustar 00root root 0000000 0000000 from typing import Callable, Optional, Union
from django.core.exceptions import ImproperlyConfigured
from django.db import connections
from django.db.models import Model
from django.db.models.base import ModelBase
from django.db.models.query import QuerySet
from psqlextra.type_assertions import is_query_set, is_sql, is_sql_with_params
from psqlextra.types import SQL, SQLWithParams
from .base import PostgresModel
from .options import PostgresViewOptions
ViewQueryValue = Union[QuerySet, SQLWithParams, SQL]
ViewQuery = Optional[Union[ViewQueryValue, Callable[[], ViewQueryValue]]]
class PostgresViewModelMeta(ModelBase):
"""Custom meta class for :see:PostgresView and
:see:PostgresMaterializedView.
This meta class extracts attributes from the inner
`ViewMeta` class and copies it onto a `_vew_meta`
attribute. This is similar to how Django's `_meta` works.
"""
def __new__(cls, name, bases, attrs, **kwargs):
new_class = super().__new__(cls, name, bases, attrs, **kwargs)
meta_class = attrs.pop("ViewMeta", None)
view_query = getattr(meta_class, "query", None)
sql_with_params = cls._view_query_as_sql_with_params(
new_class, view_query
)
view_meta = PostgresViewOptions(query=sql_with_params)
new_class.add_to_class("_view_meta", view_meta)
return new_class
@staticmethod
def _view_query_as_sql_with_params(
model: Model, view_query: ViewQuery
) -> Optional[SQLWithParams]:
"""Gets the query associated with the view as a raw SQL query with bind
parameters.
The query can be specified as a query set, raw SQL with params
or without params. The query can also be specified as a callable
which returns any of the above.
When copying the meta options from the model, we convert any
from the above to a raw SQL query with bind parameters. We do
this is because it is what the SQL driver understands and
we can easily serialize it into a migration.
"""
# might be a callable to support delayed imports
view_query = view_query() if callable(view_query) else view_query
# make sure we don't do a boolean check on query sets,
# because that might evaluate the query set
if not is_query_set(view_query) and not view_query:
return None
is_valid_view_query = (
is_query_set(view_query)
or is_sql_with_params(view_query)
or is_sql(view_query)
)
if not is_valid_view_query:
raise ImproperlyConfigured(
(
"Model '%s' is not properly configured to be a view."
" Set the `query` attribute on the `ViewMeta` class"
" to be a valid `django.db.models.query.QuerySet`"
" SQL string, or tuple of SQL string and params."
)
% (model.__name__)
)
# querysets can easily be converted into sql, params
if is_query_set(view_query):
return view_query.query.sql_with_params()
# query was already specified in the target format
if is_sql_with_params(view_query):
return view_query
return view_query, tuple()
class PostgresViewModel(PostgresModel, metaclass=PostgresViewModelMeta):
"""Base class for creating a model that is a view."""
class Meta:
abstract = True
base_manager_name = "objects"
class PostgresMaterializedViewModel(
PostgresViewModel, metaclass=PostgresViewModelMeta
):
"""Base class for creating a model that is a materialized view."""
class Meta:
abstract = True
base_manager_name = "objects"
@classmethod
def refresh(
cls, concurrently: bool = False, using: Optional[str] = None
) -> None:
"""Refreshes this materialized view.
Arguments:
concurrently:
Whether to tell PostgreSQL to refresh this
materialized view concurrently.
using:
Optionally, the name of the database connection
to use for refreshing the materialized view.
"""
conn_name = using or "default"
with connections[conn_name].schema_editor() as schema_editor:
schema_editor.refresh_materialized_view_model(cls, concurrently)
django-postgres-extra-2.0.4/psqlextra/partitioning/ 0000775 0000000 0000000 00000000000 14175513017 0022502 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/psqlextra/partitioning/__init__.py 0000664 0000000 0000000 00000002201 14175513017 0024606 0 ustar 00root root 0000000 0000000 from .config import PostgresPartitioningConfig
from .current_time_strategy import PostgresCurrentTimePartitioningStrategy
from .error import PostgresPartitioningError
from .manager import PostgresPartitioningManager
from .partition import PostgresPartition
from .plan import PostgresModelPartitioningPlan, PostgresPartitioningPlan
from .range_partition import PostgresRangePartition
from .shorthands import partition_by_current_time
from .strategy import PostgresPartitioningStrategy
from .time_partition import PostgresTimePartition
from .time_partition_size import PostgresTimePartitionSize
from .time_strategy import PostgresTimePartitioningStrategy
__all__ = [
"PostgresPartitioningManager",
"partition_by_current_time",
"PostgresPartitioningError",
"PostgresPartitioningPlan",
"PostgresModelPartitioningPlan",
"PostgresPartition",
"PostgresRangePartition",
"PostgresTimePartition",
"PostgresPartitioningStrategy",
"PostgresTimePartitioningStrategy",
"PostgresCurrentTimePartitioningStrategy",
"PostgresRangePartitioningStrategy",
"PostgresPartitioningConfig",
"PostgresTimePartitionSize",
]
django-postgres-extra-2.0.4/psqlextra/partitioning/config.py 0000664 0000000 0000000 00000000741 14175513017 0024323 0 ustar 00root root 0000000 0000000 from psqlextra.models import PostgresPartitionedModel
from .strategy import PostgresPartitioningStrategy
class PostgresPartitioningConfig:
"""Configuration for partitioning a specific model according to the
specified strategy."""
def __init__(
self,
model: PostgresPartitionedModel,
strategy: PostgresPartitioningStrategy,
) -> None:
self.model = model
self.strategy = strategy
__all__ = ["PostgresPartitioningConfig"]
django-postgres-extra-2.0.4/psqlextra/partitioning/constants.py 0000664 0000000 0000000 00000000531 14175513017 0025067 0 ustar 00root root 0000000 0000000 # comment placed on partition tables created by the partitioner
# partition tables that do not have this comment will _never_
# be deleted by the partitioner, this is a safety mechanism so
# manually created partitions aren't accidently cleaned up
AUTO_PARTITIONED_COMMENT = "psqlextra_auto_partitioned"
__all__ = ["AUTO_PARTITIONED_COMMENT"]
django-postgres-extra-2.0.4/psqlextra/partitioning/current_time_strategy.py 0000664 0000000 0000000 00000004346 14175513017 0027505 0 ustar 00root root 0000000 0000000 from datetime import datetime, timezone
from typing import Generator, Optional
from dateutil.relativedelta import relativedelta
from .range_strategy import PostgresRangePartitioningStrategy
from .time_partition import PostgresTimePartition
from .time_partition_size import PostgresTimePartitionSize
class PostgresCurrentTimePartitioningStrategy(
PostgresRangePartitioningStrategy
):
"""Implments a time based partitioning strategy where each partition
contains values for a specific time period.
All buckets will be equal in size and start at the start of the
unit. With monthly partitioning, partitions start on the 1st and
with weekly partitioning, partitions start on monday.
"""
def __init__(
self,
size: PostgresTimePartitionSize,
count: int,
max_age: Optional[relativedelta] = None,
) -> None:
"""Initializes a new instance of :see:PostgresTimePartitioningStrategy.
Arguments:
size:
The size of each partition.
count:
The amount of partitions to create ahead
from the current date/time.
max_age:
Maximum age of a partition. Partitions
older than this are deleted during
auto cleanup.
"""
self.size = size
self.count = count
self.max_age = max_age
def to_create(self) -> Generator[PostgresTimePartition, None, None]:
current_datetime = self.size.start(self.get_start_datetime())
for _ in range(self.count):
yield PostgresTimePartition(
start_datetime=current_datetime, size=self.size
)
current_datetime += self.size.as_delta()
def to_delete(self) -> Generator[PostgresTimePartition, None, None]:
if not self.max_age:
return
current_datetime = self.size.start(
self.get_start_datetime() - self.max_age
)
while True:
yield PostgresTimePartition(
start_datetime=current_datetime, size=self.size
)
current_datetime -= self.size.as_delta()
def get_start_datetime(self) -> datetime:
return datetime.now(timezone.utc)
django-postgres-extra-2.0.4/psqlextra/partitioning/error.py 0000664 0000000 0000000 00000000321 14175513017 0024201 0 ustar 00root root 0000000 0000000 class PostgresPartitioningError(RuntimeError):
"""Raised when the partitioning configuration is broken or automatically
creating/deleting partitions fails."""
__all__ = ["PostgresPartitioningError"]
django-postgres-extra-2.0.4/psqlextra/partitioning/manager.py 0000664 0000000 0000000 00000010672 14175513017 0024474 0 ustar 00root root 0000000 0000000 from typing import List, Optional, Tuple
from django.db import connections
from psqlextra.models import PostgresPartitionedModel
from .config import PostgresPartitioningConfig
from .constants import AUTO_PARTITIONED_COMMENT
from .error import PostgresPartitioningError
from .partition import PostgresPartition
from .plan import PostgresModelPartitioningPlan, PostgresPartitioningPlan
PartitionList = List[Tuple[PostgresPartitionedModel, List[PostgresPartition]]]
class PostgresPartitioningManager:
"""Helps managing partitions by automatically creating new partitions and
deleting old ones according to the configuration."""
def __init__(self, configs: List[PostgresPartitioningConfig]) -> None:
self.configs = configs
self._validate_configs(self.configs)
def plan(
self,
skip_create: bool = False,
skip_delete: bool = False,
using: Optional[str] = None,
) -> PostgresPartitioningPlan:
"""Plans which partitions should be deleted/created.
Arguments:
skip_create:
If set to True, no partitions will be marked
for creation, regardless of the configuration.
skip_delete:
If set to True, no partitions will be marked
for deletion, regardless of the configuration.
using:
Name of the database connection to use.
Returns:
A plan describing what partitions would be created
and deleted if the plan is applied.
"""
model_plans = []
for config in self.configs:
model_plan = self._plan_for_config(
config,
skip_create=skip_create,
skip_delete=skip_delete,
using=using,
)
if not model_plan:
continue
model_plans.append(model_plan)
return PostgresPartitioningPlan(model_plans)
def find_config_for_model(
self, model: PostgresPartitionedModel
) -> Optional[PostgresPartitioningConfig]:
"""Finds the partitioning config for the specified model."""
return next(
(config for config in self.configs if config.model == model), None
)
def _plan_for_config(
self,
config: PostgresPartitioningConfig,
skip_create: bool = False,
skip_delete: bool = False,
using: Optional[str] = None,
) -> Optional[PostgresModelPartitioningPlan]:
"""Creates a partitioning plan for one partitioning config."""
connection = connections[using or "default"]
table = self._get_partitioned_table(connection, config.model)
model_plan = PostgresModelPartitioningPlan(config)
if not skip_create:
for partition in config.strategy.to_create():
if table.partition_by_name(name=partition.name()):
continue
model_plan.creations.append(partition)
if not skip_delete:
for partition in config.strategy.to_delete():
introspected_partition = table.partition_by_name(
name=partition.name()
)
if not introspected_partition:
break
if introspected_partition.comment != AUTO_PARTITIONED_COMMENT:
continue
model_plan.deletions.append(partition)
if len(model_plan.creations) == 0 and len(model_plan.deletions) == 0:
return None
return model_plan
@staticmethod
def _get_partitioned_table(connection, model: PostgresPartitionedModel):
with connection.cursor() as cursor:
table = connection.introspection.get_partitioned_table(
cursor, model._meta.db_table
)
if not table:
raise PostgresPartitioningError(
f"Model {model.__name__}, with table "
f"{model._meta.db_table} does not exists in the "
"database. Did you run `python manage.py migrate`?"
)
return table
@staticmethod
def _validate_configs(configs: List[PostgresPartitioningConfig]):
"""Ensures there is only one config per model."""
models = set([config.model.__name__ for config in configs])
if len(models) != len(configs):
raise PostgresPartitioningError(
"Only one partitioning config per model is allowed"
)
django-postgres-extra-2.0.4/psqlextra/partitioning/partition.py 0000664 0000000 0000000 00000001751 14175513017 0025071 0 ustar 00root root 0000000 0000000 from abc import abstractmethod
from typing import Optional
from psqlextra.backend.schema import PostgresSchemaEditor
from psqlextra.models import PostgresPartitionedModel
class PostgresPartition:
"""Base class for a PostgreSQL table partition."""
@abstractmethod
def name(self) -> str:
"""Generates/computes the name for this partition."""
@abstractmethod
def create(
self,
model: PostgresPartitionedModel,
schema_editor: PostgresSchemaEditor,
comment: Optional[str] = None,
) -> None:
"""Creates this partition in the database."""
@abstractmethod
def delete(
self,
model: PostgresPartitionedModel,
schema_editor: PostgresSchemaEditor,
) -> None:
"""Deletes this partition from the database."""
def deconstruct(self) -> dict:
"""Deconstructs this partition into a dict of attributes/fields."""
return {"name": self.name()}
__all__ = ["PostgresPartition"]
django-postgres-extra-2.0.4/psqlextra/partitioning/plan.py 0000664 0000000 0000000 00000007301 14175513017 0024007 0 ustar 00root root 0000000 0000000 from dataclasses import dataclass, field
from typing import List, Optional
from ansimarkup import ansiprint
from django.db import connections, transaction
from .config import PostgresPartitioningConfig
from .constants import AUTO_PARTITIONED_COMMENT
from .partition import PostgresPartition
@dataclass
class PostgresModelPartitioningPlan:
"""Describes the partitions that are going to be created/deleted for a
particular partitioning config.
A "partitioning config" applies to one model.
"""
config: PostgresPartitioningConfig
creations: List[PostgresPartition] = field(default_factory=list)
deletions: List[PostgresPartition] = field(default_factory=list)
def apply(self, using: Optional[str]) -> None:
"""Applies this partitioning plan by creating and deleting the planned
partitions.
Applying the plan runs in a transaction.
Arguments:
using:
Name of the database connection to use.
"""
connection = connections[using or "default"]
with transaction.atomic():
with connection.schema_editor() as schema_editor:
for partition in self.creations:
partition.create(
self.config.model,
schema_editor,
comment=AUTO_PARTITIONED_COMMENT,
)
for partition in self.deletions:
partition.delete(self.config.model, schema_editor)
def print(self) -> None:
"""Prints this model plan to the terminal in a readable format."""
ansiprint(f"{self.config.model.__name__}:")
for partition in self.deletions:
ansiprint(" - %s" % partition.name())
for key, value in partition.deconstruct().items():
ansiprint(f" {key}: {value}")
for partition in self.creations:
ansiprint(" + %s" % partition.name())
for key, value in partition.deconstruct().items():
ansiprint(f" {key}: {value}")
@dataclass
class PostgresPartitioningPlan:
"""Describes the partitions that are going to be created/deleted."""
model_plans: List[PostgresModelPartitioningPlan]
@property
def creations(self) -> List[PostgresPartition]:
"""Gets a complete flat list of the partitions that are going to be
created."""
creations = []
for model_plan in self.model_plans:
creations.extend(model_plan.creations)
return creations
@property
def deletions(self) -> List[PostgresPartition]:
"""Gets a complete flat list of the partitions that are going to be
deleted."""
deletions = []
for model_plan in self.model_plans:
deletions.extend(model_plan.deletions)
return deletions
def apply(self, using: Optional[str] = None) -> None:
"""Applies this plan by creating/deleting all planned partitions."""
for model_plan in self.model_plans:
model_plan.apply(using=using)
def print(self) -> None:
"""Prints this plan to the terminal in a readable format."""
for model_plan in self.model_plans:
model_plan.print()
print("")
create_count = len(self.creations)
delete_count = len(self.deletions)
ansiprint(
f"{delete_count} partitions will be deleted"
)
ansiprint(
f"{create_count} partitions will be created"
)
__all__ = ["PostgresPartitioningPlan", "PostgresModelPartitioningPlan"]
django-postgres-extra-2.0.4/psqlextra/partitioning/range_partition.py 0000664 0000000 0000000 00000002370 14175513017 0026243 0 ustar 00root root 0000000 0000000 from typing import Any, Optional
from psqlextra.backend.schema import PostgresSchemaEditor
from psqlextra.models import PostgresPartitionedModel
from .partition import PostgresPartition
class PostgresRangePartition(PostgresPartition):
"""Base class for a PostgreSQL table partition in a range partitioned
table."""
def __init__(self, from_values: Any, to_values: Any) -> None:
self.from_values = from_values
self.to_values = to_values
def deconstruct(self) -> dict:
return {
**super().deconstruct(),
"from_values": self.from_values,
"to_values": self.to_values,
}
def create(
self,
model: PostgresPartitionedModel,
schema_editor: PostgresSchemaEditor,
comment: Optional[str] = None,
) -> None:
schema_editor.add_range_partition(
model=model,
name=self.name(),
from_values=self.from_values,
to_values=self.to_values,
comment=comment,
)
def delete(
self,
model: PostgresPartitionedModel,
schema_editor: PostgresSchemaEditor,
) -> None:
schema_editor.delete_partition(model, self.name())
__all__ = ["PostgresRangePartition"]
django-postgres-extra-2.0.4/psqlextra/partitioning/range_strategy.py 0000664 0000000 0000000 00000000417 14175513017 0026074 0 ustar 00root root 0000000 0000000 from .strategy import PostgresPartitioningStrategy
class PostgresRangePartitioningStrategy(PostgresPartitioningStrategy):
"""Base class for implementing a partitioning strategy for a range
partitioned table."""
__all__ = ["PostgresRangePartitioningStrategy"]
django-postgres-extra-2.0.4/psqlextra/partitioning/shorthands.py 0000664 0000000 0000000 00000003522 14175513017 0025233 0 ustar 00root root 0000000 0000000 from typing import Optional
from dateutil.relativedelta import relativedelta
from psqlextra.models import PostgresPartitionedModel
from .config import PostgresPartitioningConfig
from .current_time_strategy import PostgresCurrentTimePartitioningStrategy
from .time_partition_size import PostgresTimePartitionSize
def partition_by_current_time(
model: PostgresPartitionedModel,
count: int,
years: Optional[int] = None,
months: Optional[int] = None,
weeks: Optional[int] = None,
days: Optional[int] = None,
max_age: Optional[relativedelta] = None,
) -> PostgresPartitioningConfig:
"""Short-hand for generating a partitioning config that partitions the
specified model by time.
One specifies one of the `years`, `months`, `weeks`
or `days` parameter to indicate the size of each
partition. These parameters cannot be combined.
Arguments:
count:
The amount of partitions to create ahead of
the current date/time.
years:
The amount of years each partition should contain.
months:
The amount of months each partition should contain.
weeks:
The amount of weeks each partition should contain.
days:
The amount of days each partition should contain.
max_age:
The maximum age of a partition (calculated from the
start of the partition).
Partitions older than this are deleted when running
a delete/cleanup run.
"""
size = PostgresTimePartitionSize(
years=years, months=months, weeks=weeks, days=days
)
return PostgresPartitioningConfig(
model=model,
strategy=PostgresCurrentTimePartitioningStrategy(
size=size, count=count, max_age=max_age
),
)
__all_ = ["partition_by_current_time"]
django-postgres-extra-2.0.4/psqlextra/partitioning/strategy.py 0000664 0000000 0000000 00000001136 14175513017 0024717 0 ustar 00root root 0000000 0000000 from abc import abstractmethod
from typing import Generator
from .partition import PostgresPartition
class PostgresPartitioningStrategy:
"""Base class for implementing a partitioning strategy for a partitioned
table."""
@abstractmethod
def to_create(
self,
) -> Generator[PostgresPartition, None, None]:
"""Generates a list of partitions to be created."""
@abstractmethod
def to_delete(
self,
) -> Generator[PostgresPartition, None, None]:
"""Generates a list of partitions to be deleted."""
__all__ = ["PostgresPartitioningStrategy"]
django-postgres-extra-2.0.4/psqlextra/partitioning/time_partition.py 0000664 0000000 0000000 00000003000 14175513017 0026074 0 ustar 00root root 0000000 0000000 from datetime import datetime
from .error import PostgresPartitioningError
from .range_partition import PostgresRangePartition
from .time_partition_size import (
PostgresTimePartitionSize,
PostgresTimePartitionUnit,
)
class PostgresTimePartition(PostgresRangePartition):
"""Time-based range table partition.
:see:PostgresTimePartitioningStrategy for more info.
"""
_unit_name_format = {
PostgresTimePartitionUnit.YEARS: "%Y",
PostgresTimePartitionUnit.MONTHS: "%Y_%b",
PostgresTimePartitionUnit.WEEKS: "%Y_week_%W",
PostgresTimePartitionUnit.DAYS: "%Y_%b_%d",
}
def __init__(
self, size: PostgresTimePartitionSize, start_datetime: datetime
) -> None:
end_datetime = start_datetime + size.as_delta()
super().__init__(
from_values=start_datetime.strftime("%Y-%m-%d"),
to_values=end_datetime.strftime("%Y-%m-%d"),
)
self.size = size
self.start_datetime = start_datetime
self.end_datetime = end_datetime
def name(self) -> str:
name_format = self._unit_name_format.get(self.size.unit)
if not name_format:
raise PostgresPartitioningError("Unknown size/unit")
return self.start_datetime.strftime(name_format).lower()
def deconstruct(self) -> dict:
return {
**super().deconstruct(),
"size_unit": self.size.unit.value,
"size_value": self.size.value,
}
__all__ = ["PostgresTimePartition"]
django-postgres-extra-2.0.4/psqlextra/partitioning/time_partition_size.py 0000664 0000000 0000000 00000005557 14175513017 0027151 0 ustar 00root root 0000000 0000000 import enum
from datetime import date, datetime
from typing import Optional, Union
from dateutil.relativedelta import relativedelta
from .error import PostgresPartitioningError
class PostgresTimePartitionUnit(enum.Enum):
YEARS = "years"
MONTHS = "months"
WEEKS = "weeks"
DAYS = "days"
class PostgresTimePartitionSize:
"""Size of a time-based range partition table."""
unit: PostgresTimePartitionUnit
value: int
def __init__(
self,
years: Optional[int] = None,
months: Optional[int] = None,
weeks: Optional[int] = None,
days: Optional[int] = None,
) -> None:
sizes = [years, months, weeks, days]
if not any(sizes):
raise PostgresPartitioningError("Partition cannot be 0 in size.")
if len([size for size in sizes if size and size > 0]) > 1:
raise PostgresPartitioningError(
"Partition can only have on size unit."
)
if years:
self.unit = PostgresTimePartitionUnit.YEARS
self.value = years
elif months:
self.unit = PostgresTimePartitionUnit.MONTHS
self.value = months
elif weeks:
self.unit = PostgresTimePartitionUnit.WEEKS
self.value = weeks
elif days:
self.unit = PostgresTimePartitionUnit.DAYS
self.value = days
else:
raise PostgresPartitioningError(
"Unsupported time partitioning unit"
)
def as_delta(self) -> relativedelta:
if self.unit == PostgresTimePartitionUnit.YEARS:
return relativedelta(years=self.value)
if self.unit == PostgresTimePartitionUnit.MONTHS:
return relativedelta(months=self.value)
if self.unit == PostgresTimePartitionUnit.WEEKS:
return relativedelta(weeks=self.value)
if self.unit == PostgresTimePartitionUnit.DAYS:
return relativedelta(days=self.value)
raise PostgresPartitioningError(
"Unsupported time partitioning unit: %s" % self.unit
)
def start(self, dt: datetime) -> datetime:
if self.unit == PostgresTimePartitionUnit.YEARS:
return self._ensure_datetime(dt.replace(month=1, day=1))
if self.unit == PostgresTimePartitionUnit.MONTHS:
return self._ensure_datetime(dt.replace(day=1))
if self.unit == PostgresTimePartitionUnit.WEEKS:
return self._ensure_datetime(dt - relativedelta(days=dt.weekday()))
return self._ensure_datetime(dt)
@staticmethod
def _ensure_datetime(dt: Union[date, datetime]) -> datetime:
return datetime(year=dt.year, month=dt.month, day=dt.day)
def __repr__(self) -> str:
return "PostgresTimePartitionSize<%s, %s>" % (self.unit, self.value)
__all__ = ["PostgresTimePartitionUnit", "PostgresTimePartitionSize"]
django-postgres-extra-2.0.4/psqlextra/partitioning/time_strategy.py 0000664 0000000 0000000 00000001264 14175513017 0025737 0 ustar 00root root 0000000 0000000 from datetime import datetime
from typing import Optional
from dateutil.relativedelta import relativedelta
from .current_time_strategy import PostgresCurrentTimePartitioningStrategy
from .time_partition_size import PostgresTimePartitionSize
class PostgresTimePartitioningStrategy(PostgresCurrentTimePartitioningStrategy):
def __init__(
self,
start_datetime: datetime,
size: PostgresTimePartitionSize,
count: int,
max_age: Optional[relativedelta] = None,
) -> None:
super().__init__(size, count, max_age)
self.start_datetime = start_datetime
def get_start_datetime(self) -> datetime:
return self.start_datetime
django-postgres-extra-2.0.4/psqlextra/query.py 0000664 0000000 0000000 00000047020 14175513017 0021515 0 ustar 00root root 0000000 0000000 from collections import OrderedDict
from itertools import chain
from typing import Dict, Iterable, List, Optional, Tuple, Union
from django.core.exceptions import SuspiciousOperation
from django.db import connections, models, router
from django.db.models import Expression, Q
from django.db.models.fields import NOT_PROVIDED
from .sql import PostgresInsertQuery, PostgresQuery
from .types import ConflictAction
ConflictTarget = List[Union[str, Tuple[str]]]
class PostgresQuerySet(models.QuerySet):
"""Adds support for PostgreSQL specifics."""
def __init__(self, model=None, query=None, using=None, hints=None):
"""Initializes a new instance of :see:PostgresQuerySet."""
super().__init__(model, query, using, hints)
self.query = query or PostgresQuery(self.model)
self.conflict_target = None
self.conflict_action = None
self.conflict_update_condition = None
self.index_predicate = None
def annotate(self, **annotations):
"""Custom version of the standard annotate function that allows using
field names as annotated fields.
Normally, the annotate function doesn't allow you to use the
name of an existing field on the model as the alias name. This
version of the function does allow that.
This is done by temporarily renaming the fields in order to avoid the
check for conflicts that the base class does.
We rename all fields instead of the ones that already exist because
the annotations are stored in an OrderedDict. Renaming only the
conflicts will mess up the order.
"""
fields = {field.name: field for field in self.model._meta.get_fields()}
new_annotations = OrderedDict()
renames = {}
for name, value in annotations.items():
if name in fields:
new_name = "%s_new" % name
new_annotations[new_name] = value
renames[new_name] = name
else:
new_annotations[name] = value
# run the base class's annotate function
result = super().annotate(**new_annotations)
# rename the annotations back to as specified
result.rename_annotations(**renames)
return result
def rename_annotations(self, **annotations):
"""Renames the aliases for the specified annotations:
.annotate(myfield=F('somestuf__myfield'))
.rename_annotations(myfield='field')
Arguments:
annotations:
The annotations to rename. Mapping the
old name to the new name.
"""
self.query.rename_annotations(annotations)
return self
def on_conflict(
self,
fields: ConflictTarget,
action: ConflictAction,
index_predicate: Optional[Union[Expression, Q, str]] = None,
update_condition: Optional[Union[Expression, Q, str]] = None,
):
"""Sets the action to take when conflicts arise when attempting to
insert/create a new row.
Arguments:
fields:
The fields the conflicts can occur in.
action:
The action to take when the conflict occurs.
index_predicate:
The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking
conflicts)
update_condition:
Only update if this SQL expression evaluates to true.
"""
self.conflict_target = fields
self.conflict_action = action
self.conflict_update_condition = update_condition
self.index_predicate = index_predicate
return self
def bulk_insert(
self,
rows: List[dict],
return_model: bool = False,
using: Optional[str] = None,
):
"""Creates multiple new records in the database.
This allows specifying custom conflict behavior using .on_conflict().
If no special behavior was specified, this uses the normal Django create(..)
Arguments:
rows:
An iterable of dictionaries, where each dictionary
describes the fields to insert.
return_model (default: False):
If model instances should be returned rather than
just dicts.
using:
Name of the database connection to use for
this query.
Returns:
A list of either the dicts of the rows inserted, including the pk or
the models of the rows inserted with defaults for any fields not specified
"""
if not self.conflict_target and not self.conflict_action:
# no special action required, use the standard Django bulk_create(..)
return super().bulk_create(
[self.model(**fields) for fields in rows]
)
deduped_rows = rows
# when we do a ConflictAction.NOTHING, we are actually
# doing a ON CONFLICT DO UPDATE with a trick to avoid
# touching conflicting rows... however, ON CONFLICT UPDATE
# barfs when you specify the exact same row twice:
#
# > "cannot affect row a second time"
#
# we filter out the duplicates here to make sure we maintain
# the same behaviour as the real ON CONFLICT DO NOTHING
if self.conflict_action == ConflictAction.NOTHING:
deduped_rows = []
for row in rows:
if row in deduped_rows:
continue
deduped_rows.append(row)
compiler = self._build_insert_compiler(deduped_rows, using=using)
objs = compiler.execute_sql(return_id=not return_model)
if return_model:
return [
self._create_model_instance(dict(row, **obj), compiler.using)
for row, obj in zip(deduped_rows, objs)
]
return [dict(row, **obj) for row, obj in zip(deduped_rows, objs)]
def insert(self, using: Optional[str] = None, **fields):
"""Creates a new record in the database.
This allows specifying custom conflict behavior using .on_conflict().
If no special behavior was specified, this uses the normal Django create(..)
Arguments:
fields:
The fields of the row to create.
using:
The name of the database connection
to use for this query.
Returns:
The primary key of the record that was created.
"""
if self.conflict_target or self.conflict_action:
compiler = self._build_insert_compiler([fields], using=using)
rows = compiler.execute_sql(return_id=True)
pk_field_name = self.model._meta.pk.name
if not rows or len(rows) == 0:
return None
return rows[0][pk_field_name]
# no special action required, use the standard Django create(..)
return super().create(**fields).pk
def insert_and_get(self, using: Optional[str] = None, **fields):
"""Creates a new record in the database and then gets the entire row.
This allows specifying custom conflict behavior using .on_conflict().
If no special behavior was specified, this uses the normal Django create(..)
Arguments:
fields:
The fields of the row to create.
using:
The name of the database connection
to use for this query.
Returns:
The model instance representing the row that was created.
"""
if not self.conflict_target and not self.conflict_action:
# no special action required, use the standard Django create(..)
return super().create(**fields)
compiler = self._build_insert_compiler([fields], using=using)
rows = compiler.execute_sql(return_id=False)
if not rows:
return None
columns = rows[0]
# get a list of columns that are officially part of the model and
# preserve the fact that the attribute name
# might be different than the database column name
model_columns = {}
for field in self.model._meta.local_concrete_fields:
model_columns[field.column] = field.attname
# strip out any columns/fields returned by the db that
# are not present in the model
model_init_fields = {}
for column_name, column_value in columns.items():
try:
model_init_fields[model_columns[column_name]] = column_value
except KeyError:
pass
return self._create_model_instance(model_init_fields, compiler.using)
def upsert(
self,
conflict_target: ConflictTarget,
fields: dict,
index_predicate: Optional[Union[Expression, Q, str]] = None,
using: Optional[str] = None,
update_condition: Optional[Union[Expression, Q, str]] = None,
) -> int:
"""Creates a new record or updates the existing one with the specified
data.
Arguments:
conflict_target:
Fields to pass into the ON CONFLICT clause.
fields:
Fields to insert/update.
index_predicate:
The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking
conflicts)
using:
The name of the database connection to
use for this query.
update_condition:
Only update if this SQL expression evaluates to true.
Returns:
The primary key of the row that was created/updated.
"""
self.on_conflict(
conflict_target,
ConflictAction.UPDATE,
index_predicate=index_predicate,
update_condition=update_condition,
)
return self.insert(**fields, using=using)
def upsert_and_get(
self,
conflict_target: ConflictTarget,
fields: dict,
index_predicate: Optional[Union[Expression, Q, str]] = None,
using: Optional[str] = None,
update_condition: Optional[Union[Expression, Q, str]] = None,
):
"""Creates a new record or updates the existing one with the specified
data and then gets the row.
Arguments:
conflict_target:
Fields to pass into the ON CONFLICT clause.
fields:
Fields to insert/update.
index_predicate:
The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking
conflicts)
using:
The name of the database connection to
use for this query.
update_condition:
Only update if this SQL expression evaluates to true.
Returns:
The model instance representing the row
that was created/updated.
"""
self.on_conflict(
conflict_target,
ConflictAction.UPDATE,
index_predicate=index_predicate,
update_condition=update_condition,
)
return self.insert_and_get(**fields, using=using)
def bulk_upsert(
self,
conflict_target: ConflictTarget,
rows: Iterable[Dict],
index_predicate: Optional[Union[Expression, Q, str]] = None,
return_model: bool = False,
using: Optional[str] = None,
update_condition: Optional[Union[Expression, Q, str]] = None,
):
"""Creates a set of new records or updates the existing ones with the
specified data.
Arguments:
conflict_target:
Fields to pass into the ON CONFLICT clause.
rows:
Rows to upsert.
index_predicate:
The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking
conflicts)
return_model (default: False):
If model instances should be returned rather than
just dicts.
using:
The name of the database connection to use
for this query.
update_condition:
Only update if this SQL expression evaluates to true.
Returns:
A list of either the dicts of the rows upserted, including the pk or
the models of the rows upserted
"""
def is_empty(r):
return all([False for _ in r])
if not rows or is_empty(rows):
return []
self.on_conflict(
conflict_target,
ConflictAction.UPDATE,
index_predicate=index_predicate,
update_condition=update_condition,
)
return self.bulk_insert(rows, return_model, using=using)
def _create_model_instance(
self, field_values: dict, using: str, apply_converters: bool = True
):
"""Creates a new instance of the model with the specified field.
Use this after the row was inserted into the database. The new
instance will marked as "saved".
"""
converted_field_values = field_values.copy()
if apply_converters:
connection = connections[using]
for field in self.model._meta.local_concrete_fields:
if field.attname not in converted_field_values:
continue
# converters can be defined on the field, or by
# the database back-end we're using
field_column = field.get_col(self.model._meta.db_table)
converters = field.get_db_converters(
connection
) + connection.ops.get_db_converters(field_column)
for converter in converters:
converted_field_values[field.attname] = converter(
converted_field_values[field.attname],
field_column,
connection,
)
instance = self.model(**converted_field_values)
instance._state.db = using
instance._state.adding = False
return instance
def _build_insert_compiler(
self, rows: Iterable[Dict], using: Optional[str] = None
):
"""Builds the SQL compiler for a insert query.
Arguments:
rows:
An iterable of dictionaries, where each entry
describes a record to insert.
using:
The name of the database connection to use
for this query.
Returns:
The SQL compiler for the insert.
"""
# ask the db router which connection to use
using = (
using or self._db or router.db_for_write(self.model, **self._hints)
)
# create model objects, we also have to detect cases
# such as:
# [dict(first_name='swen'), dict(fist_name='swen', last_name='kooij')]
# we need to be certain that each row specifies the exact same
# amount of fields/columns
objs = []
rows_iter = iter(rows)
first_row = next(rows_iter)
field_count = len(first_row)
for index, row in enumerate(chain([first_row], rows_iter)):
if field_count != len(row):
raise SuspiciousOperation(
(
"In bulk upserts, you cannot have rows with different field "
"configurations. Row {0} has a different field config than "
"the first row."
).format(index)
)
objs.append(
self._create_model_instance(row, using, apply_converters=False)
)
# get the fields to be used during update/insert
insert_fields, update_fields = self._get_upsert_fields(first_row)
# build a normal insert query
query = PostgresInsertQuery(self.model)
query.conflict_action = self.conflict_action
query.conflict_target = self.conflict_target
query.conflict_update_condition = self.conflict_update_condition
query.index_predicate = self.index_predicate
query.values(objs, insert_fields, update_fields)
compiler = query.get_compiler(using)
return compiler
def _is_magical_field(self, model_instance, field, is_insert: bool):
"""Verifies whether this field is gonna modify something on its own.
"Magical" means that a field modifies the field value
during the pre_save.
Arguments:
model_instance:
The model instance the field is defined on.
field:
The field to get of whether the field is
magical.
is_insert:
Pretend whether this is an insert?
Returns:
True when this field modifies something.
"""
# does this field modify someting upon insert?
old_value = getattr(model_instance, field.name, None)
field.pre_save(model_instance, is_insert)
new_value = getattr(model_instance, field.name, None)
return old_value != new_value
def _get_upsert_fields(self, kwargs):
"""Gets the fields to use in an upsert.
This some nice magic. We'll split the fields into
a group of "insert fields" and "update fields":
INSERT INTO bla ("val1", "val2") ON CONFLICT DO UPDATE SET val1 = EXCLUDED.val1
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^
insert_fields update_fields
Often, fields appear in both lists. But, for example,
a :see:DateTime field with `auto_now_add=True` set, will
only appear in "insert_fields", since it won't be set
on existing rows.
Other than that, the user specificies a list of fields
in the upsert() call. That migt not be all fields. The
user could decide to leave out optional fields. If we
end up doing an update, we don't want to overwrite
those non-specified fields.
We cannot just take the list of fields the user
specifies, because as mentioned, some fields
make modifications to the model on their own.
We'll have to detect which fields make modifications
and include them in the list of insert/update fields.
"""
model_instance = self.model(**kwargs)
insert_fields = []
update_fields = []
for field in model_instance._meta.local_concrete_fields:
has_default = field.default != NOT_PROVIDED
if field.name in kwargs or field.column in kwargs:
insert_fields.append(field)
update_fields.append(field)
continue
elif has_default:
insert_fields.append(field)
continue
# special handling for 'pk' which always refers to
# the primary key, so if we the user specifies `pk`
# instead of a concrete field, we have to handle that
if field.primary_key is True and "pk" in kwargs:
insert_fields.append(field)
update_fields.append(field)
continue
if self._is_magical_field(model_instance, field, is_insert=True):
insert_fields.append(field)
if self._is_magical_field(model_instance, field, is_insert=False):
update_fields.append(field)
return insert_fields, update_fields
django-postgres-extra-2.0.4/psqlextra/sql.py 0000664 0000000 0000000 00000015472 14175513017 0021155 0 ustar 00root root 0000000 0000000 from collections import OrderedDict
from typing import List, Optional, Tuple
import django
from django.core.exceptions import SuspiciousOperation
from django.db import connections, models
from django.db.models import sql
from django.db.models.constants import LOOKUP_SEP
from .compiler import PostgresInsertCompiler, PostgresUpdateCompiler
from .expressions import HStoreColumn
from .fields import HStoreField
from .types import ConflictAction
class PostgresQuery(sql.Query):
def chain(self, klass=None):
"""Chains this query to another.
We override this so that we can make sure our subclassed query
classes are used.
"""
if klass == sql.UpdateQuery:
return super().chain(PostgresUpdateQuery)
if klass == sql.InsertQuery:
return super().chain(PostgresInsertQuery)
return super().chain(klass)
def rename_annotations(self, annotations) -> None:
"""Renames the aliases for the specified annotations:
.annotate(myfield=F('somestuf__myfield'))
.rename_annotations(myfield='field')
Arguments:
annotations:
The annotations to rename. Mapping the
old name to the new name.
"""
# safety check only, make sure there are no renames
# left that cannot be mapped back to the original name
for old_name, new_name in annotations.items():
annotation = self.annotations.get(old_name)
if not annotation:
raise SuspiciousOperation(
(
'Cannot rename annotation "{old_name}" to "{new_name}", because there'
' is no annotation named "{old_name}".'
).format(old_name=old_name, new_name=new_name)
)
# rebuild the annotations according to the original order
new_annotations = OrderedDict()
for old_name, annotation in self.annotations.items():
new_name = annotations.get(old_name)
new_annotations[new_name or old_name] = annotation
if new_name and self.annotation_select_mask:
self.annotation_select_mask.discard(old_name)
self.annotation_select_mask.add(new_name)
self.annotations.clear()
self.annotations.update(new_annotations)
def add_fields(self, field_names: List[str], *args, **kwargs) -> None:
"""Adds the given (model) fields to the select set.
The field names are added in the order specified. This overrides
the base class's add_fields method. This is called by the
.values() or .values_list() method of the query set. It
instructs the ORM to only select certain values. A lot of
processing is neccesarry because it can be used to easily do
joins. For example, `my_fk__name` pulls in the `name` field in
foreign key `my_fk`. In our case, we want to be able to do
`title__en`, where `title` is a HStoreField and `en` a key. This
doesn't really involve a join. We iterate over the specified
field names and filter out the ones that refer to HStoreField
and compile it into an expression which is added to the list of
to be selected fields using `self.add_select`.
"""
# django knows how to do all of this natively from v2.1
# see: https://github.com/django/django/commit/20bab2cf9d02a5c6477d8aac066a635986e0d3f3
if django.VERSION >= (2, 1):
return super().add_fields(field_names, *args, **kwargs)
select = []
field_names_without_hstore = []
for name in field_names:
parts = name.split(LOOKUP_SEP)
# it cannot be a special hstore thing if there's no __ in it
if len(parts) > 1:
column_name, hstore_key = parts[:2]
is_hstore, field = self._is_hstore_field(column_name)
if is_hstore:
select.append(
HStoreColumn(
self.model._meta.db_table or self.model.name,
field,
hstore_key,
)
)
continue
field_names_without_hstore.append(name)
super().add_fields(field_names_without_hstore, *args, **kwargs)
if len(select) > 0:
self.set_select(self.select + tuple(select))
def _is_hstore_field(
self, field_name: str
) -> Tuple[bool, Optional[models.Field]]:
"""Gets whether the field with the specified name is a HStoreField.
Returns A tuple of a boolean indicating whether the field
with the specified name is a HStoreField, and the field
instance.
"""
field_instance = None
for field in self.model._meta.local_concrete_fields:
if field.name == field_name or field.column == field_name:
field_instance = field
break
return isinstance(field_instance, HStoreField), field_instance
class PostgresInsertQuery(sql.InsertQuery):
"""Insert query using PostgreSQL."""
def __init__(self, *args, **kwargs):
"""Initializes a new instance :see:PostgresInsertQuery."""
super(PostgresInsertQuery, self).__init__(*args, **kwargs)
self.conflict_target = []
self.conflict_action = ConflictAction.UPDATE
self.conflict_update_condition = None
self.index_predicate = None
self.update_fields = []
def values(self, objs: List, insert_fields: List, update_fields: List = []):
"""Sets the values to be used in this query.
Insert fields are fields that are definitely
going to be inserted, and if an existing row
is found, are going to be overwritten with the
specified value.
Update fields are fields that should be overwritten
in case an update takes place rather than an insert.
If we're dealing with a INSERT, these will not be used.
Arguments:
objs:
The objects to apply this query to.
insert_fields:
The fields to use in the INSERT statement
update_fields:
The fields to only use in the UPDATE statement.
"""
self.insert_values(insert_fields, objs, raw=False)
self.update_fields = update_fields
def get_compiler(self, using=None, connection=None):
if using:
connection = connections[using]
return PostgresInsertCompiler(self, connection, using)
class PostgresUpdateQuery(sql.UpdateQuery):
"""Update query using PostgreSQL."""
def get_compiler(self, using=None, connection=None):
if using:
connection = connections[using]
return PostgresUpdateCompiler(self, connection, using)
django-postgres-extra-2.0.4/psqlextra/type_assertions.py 0000664 0000000 0000000 00000001376 14175513017 0023607 0 ustar 00root root 0000000 0000000 from collections.abc import Iterable
from typing import Any
from django.db.models.query import QuerySet
def is_query_set(value: Any) -> bool:
"""Gets whether the specified value is a :see:QuerySet."""
return isinstance(value, QuerySet)
def is_sql(value: Any) -> bool:
"""Gets whether the specified value could be a raw SQL query."""
return isinstance(value, str)
def is_sql_with_params(value: Any) -> bool:
"""Gets whether the specified value is a tuple of a SQL query (as a string)
and a tuple of bind parameters."""
return (
isinstance(value, tuple)
and len(value) == 2
and is_sql(value[0])
and isinstance(value[1], Iterable)
and not isinstance(value[1], (str, bytes, bytearray))
)
django-postgres-extra-2.0.4/psqlextra/types.py 0000664 0000000 0000000 00000001552 14175513017 0021514 0 ustar 00root root 0000000 0000000 from enum import Enum
from typing import Any, Dict, List, Tuple, Union
SQL = str
SQLWithParams = Tuple[str, Union[Tuple[Any, ...], Dict[str, Any]]]
class StrEnum(str, Enum):
@classmethod
def all(cls) -> List["StrEnum"]:
return [choice for choice in cls]
@classmethod
def values(cls) -> List[str]:
return [choice.value for choice in cls]
def __str__(self) -> str:
return str(self.value)
class ConflictAction(Enum):
"""Possible actions to take on a conflict."""
NOTHING = "NOTHING"
UPDATE = "UPDATE"
@classmethod
def all(cls) -> List["ConflictAction"]:
return [choice for choice in cls]
class PostgresPartitioningMethod(StrEnum):
"""Methods of partitioning supported by PostgreSQL 11.x native support for
table partitioning."""
RANGE = "range"
LIST = "list"
HASH = "hash"
django-postgres-extra-2.0.4/psqlextra/util.py 0000664 0000000 0000000 00000000614 14175513017 0021323 0 ustar 00root root 0000000 0000000 from contextlib import contextmanager
from .manager import PostgresManager
@contextmanager
def postgres_manager(model):
"""Allows you to use the :see:PostgresManager with the specified model
instance on the fly.
Arguments:
model:
The model or model instance to use this on.
"""
manager = PostgresManager()
manager.model = model
yield manager
django-postgres-extra-2.0.4/pyproject.toml 0000664 0000000 0000000 00000000164 14175513017 0020665 0 ustar 00root root 0000000 0000000 [tool.black]
line-length = 80
exclude = '''
(
/(
| .env
| env
| venv
| tests/snapshots
)/
)
'''
django-postgres-extra-2.0.4/pytest-benchmark.ini 0000664 0000000 0000000 00000000120 14175513017 0021722 0 ustar 00root root 0000000 0000000 [pytest]
DJANGO_SETTINGS_MODULE=settings
testpaths=tests
addopts=-m "benchmark"
django-postgres-extra-2.0.4/pytest.ini 0000664 0000000 0000000 00000000221 14175513017 0017774 0 ustar 00root root 0000000 0000000 [pytest]
DJANGO_SETTINGS_MODULE=settings
testpaths=tests
addopts=-m "not benchmark"
junit_family=legacy
filterwarnings =
ignore::UserWarning
django-postgres-extra-2.0.4/requirements-all.txt 0000664 0000000 0000000 00000000052 14175513017 0021777 0 ustar 00root root 0000000 0000000 -e .
-e .[test]
-e .[analysis]
-e .[docs]
django-postgres-extra-2.0.4/settings.py 0000664 0000000 0000000 00000000710 14175513017 0020160 0 ustar 00root root 0000000 0000000 import dj_database_url
DEBUG = True
TEMPLATE_DEBUG = True
SECRET_KEY = 'this is my secret key' # NOQA
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
DATABASES = {
'default': dj_database_url.config(default='postgres:///psqlextra'),
}
DATABASES['default']['ENGINE'] = 'psqlextra.backend'
LANGUAGE_CODE = 'en'
LANGUAGES = (
('en', 'English'),
('ro', 'Romanian'),
('nl', 'Dutch')
)
INSTALLED_APPS = (
'psqlextra',
'tests',
)
django-postgres-extra-2.0.4/setup.cfg 0000664 0000000 0000000 00000000455 14175513017 0017575 0 ustar 00root root 0000000 0000000 [flake8]
ignore = E252,E501,W503
exclude = env,.tox,.git,config/settings,*/migrations/*,*/static/CACHE/*,docs,node_modules
[isort]
line_length=80
multi_line_output=3
lines_between_types=1
include_trailing_comma=True
known_third_party=pytest,freezegun
float_to_top=true
skip_glob=tests/snapshots/*.py
django-postgres-extra-2.0.4/setup.py 0000664 0000000 0000000 00000012767 14175513017 0017477 0 ustar 00root root 0000000 0000000 import distutils.cmd
import os
import subprocess
from setuptools import find_packages, setup
class BaseCommand(distutils.cmd.Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def create_command(text, commands):
"""Creates a custom setup.py command."""
class CustomCommand(BaseCommand):
description = text
def run(self):
for cmd in commands:
subprocess.check_call(cmd)
return CustomCommand
with open(
os.path.join(os.path.dirname(__file__), "README.md"), encoding="utf-8"
) as readme:
README = readme.read().split("h1>\n", 2)[1]
setup(
name="django-postgres-extra",
version="2.0.4",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
license="MIT License",
description="Bringing all of PostgreSQL's awesomeness to Django.",
long_description=README,
long_description_content_type="text/markdown",
url="https://github.com/SectorLabs/django-postgres-extra",
author="Sector Labs",
author_email="open-source@sectorlabs.ro",
keywords=["django", "postgres", "extra", "hstore", "ltree"],
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
python_requires=">=3.6",
install_requires=[
"Django>=2.0",
"python-dateutil>=2.8.0,<=3.0.0",
"ansimarkup>=1.4.0,<=2.0.0",
],
extras_require={
':python_version <= "3.6"': ["dataclasses"],
"docs": ["Sphinx==2.2.0", "sphinx-rtd-theme==0.4.3", "docutils<0.18"],
"test": [
"psycopg2>=2.8.4,<3.0.0",
"dj-database-url==0.5.0",
"pytest==6.2.5",
"pytest-benchmark==3.4.1",
"pytest-django==4.4.0",
"pytest-cov==3.0.0",
"tox==3.24.4",
"freezegun==1.1.0",
"coveralls==3.3.0",
"snapshottest==0.6.0",
],
"analysis": [
"black==21.10b0",
"flake8==4.0.1",
"autoflake==1.4",
"autopep8==1.6.0",
"isort==5.10.0",
"docformatter==1.4",
],
},
cmdclass={
"lint": create_command(
"Lints the code", [["flake8", "setup.py", "psqlextra", "tests"]]
),
"lint_fix": create_command(
"Lints the code",
[
[
"autoflake",
"--remove-all",
"-i",
"-r",
"setup.py",
"psqlextra",
"tests",
],
["autopep8", "-i", "-r", "setup.py", "psqlextra", "tests"],
],
),
"format": create_command(
"Formats the code", [["black", "setup.py", "psqlextra", "tests"]]
),
"format_verify": create_command(
"Checks if the code is auto-formatted",
[["black", "--check", "setup.py", "psqlextra", "tests"]],
),
"format_docstrings": create_command(
"Auto-formats doc strings", [["docformatter", "-r", "-i", "."]]
),
"format_docstrings_verify": create_command(
"Verifies that doc strings are properly formatted",
[["docformatter", "-r", "-c", "."]],
),
"sort_imports": create_command(
"Automatically sorts imports",
[
["isort", "setup.py"],
["isort", "psqlextra"],
["isort", "tests"],
],
),
"sort_imports_verify": create_command(
"Verifies all imports are properly sorted.",
[
["isort", "-c", "setup.py"],
["isort", "-c", "psqlextra"],
["isort", "-c", "tests"],
],
),
"fix": create_command(
"Automatically format code and fix linting errors",
[
["python", "setup.py", "format"],
["python", "setup.py", "format_docstrings"],
["python", "setup.py", "sort_imports"],
["python", "setup.py", "lint_fix"],
["python", "setup.py", "lint"],
],
),
"verify": create_command(
"Verifies whether the code is auto-formatted and has no linting errors",
[
["python", "setup.py", "format_verify"],
["python", "setup.py", "format_docstrings_verify"],
["python", "setup.py", "sort_imports_verify"],
["python", "setup.py", "lint"],
],
),
"test": create_command(
"Runs all the tests",
[
[
"pytest",
"--cov=psqlextra",
"--cov-report=term",
"--cov-report=xml:reports/xml",
"--cov-report=html:reports/html",
"--junitxml=reports/junit/tests.xml",
"--reuse-db",
]
],
),
},
)
django-postgres-extra-2.0.4/tests/ 0000775 0000000 0000000 00000000000 14175513017 0017112 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/tests/__init__.py 0000664 0000000 0000000 00000000000 14175513017 0021211 0 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/tests/benchmarks/ 0000775 0000000 0000000 00000000000 14175513017 0021227 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/tests/benchmarks/__init__.py 0000664 0000000 0000000 00000000000 14175513017 0023326 0 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/tests/benchmarks/test_insert_nothing.py 0000664 0000000 0000000 00000002646 14175513017 0025702 0 ustar 00root root 0000000 0000000 import uuid
import pytest
from django.db import models, transaction
from django.db.utils import IntegrityError
from psqlextra.query import ConflictAction
from ..fake_model import get_fake_model
@pytest.mark.benchmark()
def test_insert_nothing_traditional(benchmark):
model = get_fake_model(
{"field": models.CharField(max_length=255, unique=True)}
)
random_value = str(uuid.uuid4())[:8]
model.objects.create(field=random_value)
def _traditional_insert(model, random_value):
"""Performs a concurrency safe insert the traditional way."""
try:
with transaction.atomic():
return model.objects.create(field=random_value)
except IntegrityError:
return model.objects.filter(field=random_value).first()
benchmark(_traditional_insert, model, random_value)
@pytest.mark.benchmark()
def test_insert_nothing_native(benchmark):
model = get_fake_model(
{"field": models.CharField(max_length=255, unique=True)}
)
random_value = str(uuid.uuid4())[:8]
model.objects.create(field=random_value)
def _native_insert(model, random_value):
"""Performs a concurrency safeinsert using the native PostgreSQL
conflict resolution."""
return model.objects.on_conflict(
["field"], ConflictAction.NOTHING
).insert_and_get(field=random_value)
benchmark(_native_insert, model, random_value)
django-postgres-extra-2.0.4/tests/benchmarks/test_upsert.py 0000664 0000000 0000000 00000002602 14175513017 0024162 0 ustar 00root root 0000000 0000000 import uuid
import pytest
from django.db import models, transaction
from django.db.utils import IntegrityError
from ..fake_model import get_fake_model
@pytest.mark.benchmark()
def test_upsert_traditional(benchmark):
model = get_fake_model(
{"field": models.CharField(max_length=255, unique=True)}
)
random_value = str(uuid.uuid4())[:8]
model.objects.create(field=random_value)
def _traditional_upsert(model, random_value):
"""Performs a concurrency safe upsert the traditional way."""
try:
with transaction.atomic():
return model.objects.create(field=random_value)
except IntegrityError:
model.objects.update(field=random_value)
return model.objects.get(field=random_value)
benchmark(_traditional_upsert, model, random_value)
@pytest.mark.benchmark()
def test_upsert_native(benchmark):
model = get_fake_model(
{"field": models.CharField(max_length=255, unique=True)}
)
random_value = str(uuid.uuid4())[:8]
model.objects.create(field=random_value)
def _native_upsert(model, random_value):
"""Performs a concurrency safe upsert using the native PostgreSQL
upsert."""
return model.objects.upsert_and_get(
conflict_target=["field"], fields=dict(field=random_value)
)
benchmark(_native_upsert, model, random_value)
django-postgres-extra-2.0.4/tests/benchmarks/test_upsert_bulk.py 0000664 0000000 0000000 00000003343 14175513017 0025202 0 ustar 00root root 0000000 0000000 import uuid
import pytest
from django.db import models
from psqlextra.query import ConflictAction
from ..fake_model import get_fake_model
ROW_COUNT = 10000
@pytest.mark.benchmark()
def test_upsert_bulk_naive(benchmark):
model = get_fake_model(
{"field": models.CharField(max_length=255, unique=True)}
)
rows = []
random_values = []
for i in range(0, ROW_COUNT):
random_value = str(uuid.uuid4())
random_values.append(random_value)
rows.append(model(field=random_value))
model.objects.bulk_create(rows)
def _native_upsert(model, random_values):
"""Performs a concurrency safe upsert using the native PostgreSQL
upsert."""
rows = [dict(field=random_value) for random_value in random_values]
for row in rows:
model.objects.on_conflict(["field"], ConflictAction.UPDATE).insert(
**row
)
benchmark(_native_upsert, model, random_values)
@pytest.mark.benchmark()
def test_upsert_bulk_native(benchmark):
model = get_fake_model(
{"field": models.CharField(max_length=255, unique=True)}
)
rows = []
random_values = []
for i in range(0, ROW_COUNT):
random_value = str(uuid.uuid4())
random_values.append(random_value)
rows.append(model(field=random_value))
model.objects.bulk_create(rows)
def _native_upsert(model, random_values):
"""Performs a concurrency safe upsert using the native PostgreSQL
upsert."""
rows = [dict(field=random_value) for random_value in random_values]
model.objects.on_conflict(["field"], ConflictAction.UPDATE).bulk_insert(
rows
)
benchmark(_native_upsert, model, random_values)
django-postgres-extra-2.0.4/tests/conftest.py 0000664 0000000 0000000 00000002543 14175513017 0021315 0 ustar 00root root 0000000 0000000 import pytest
from django.contrib.postgres.signals import register_type_handlers
from django.db import connection
from .fake_model import define_fake_app
@pytest.fixture(scope="function", autouse=True)
def database_access(db):
"""Automatically enable database access for all tests."""
# enable the hstore extension on our database because
# our tests rely on it...
with connection.schema_editor() as schema_editor:
schema_editor.execute("CREATE EXTENSION IF NOT EXISTS hstore")
register_type_handlers(schema_editor.connection)
@pytest.fixture
def fake_app():
"""Creates a fake Django app and deletes it at the end of the test."""
with define_fake_app() as fake_app:
yield fake_app
@pytest.fixture
def postgres_server_version(db) -> int:
"""Gets the PostgreSQL server version."""
return connection.cursor().connection.server_version
@pytest.fixture(autouse=True)
def _apply_postgres_version_marker(request, postgres_server_version):
"""Skip tests based on Postgres server version number marker condition."""
marker = request.node.get_closest_marker("postgres_version")
if not marker:
return
lt = marker.kwargs.get("lt")
if lt and postgres_server_version < lt:
pytest.skip(
f"Server version is {postgres_server_version}, the test needs {lt} or newer."
)
django-postgres-extra-2.0.4/tests/db_introspection.py 0000664 0000000 0000000 00000002451 14175513017 0023033 0 ustar 00root root 0000000 0000000 """Thin wrappers over functions in connection.introspection that don't require
creating a cursor.
This makes test code less verbose and easier to read/write.
"""
from django.db import connection
def table_names(include_views: bool = True):
"""Gets a flat list of tables in the default database."""
with connection.cursor() as cursor:
introspection = connection.introspection
return introspection.table_names(cursor, include_views)
def get_partitioned_table(table_name: str):
"""Gets the definition of a partitioned table in the default database."""
with connection.cursor() as cursor:
introspection = connection.introspection
return introspection.get_partitioned_table(cursor, table_name)
def get_partitions(table_name: str):
"""Gets a list of partitions for the specified partitioned table in the
default database."""
with connection.cursor() as cursor:
introspection = connection.introspection
return introspection.get_partitions(cursor, table_name)
def get_constraints(table_name: str):
"""Gets a complete list of constraints and indexes for the specified
table."""
with connection.cursor() as cursor:
introspection = connection.introspection
return introspection.get_constraints(cursor, table_name)
django-postgres-extra-2.0.4/tests/fake_model.py 0000664 0000000 0000000 00000006457 14175513017 0021566 0 ustar 00root root 0000000 0000000 import os
import sys
import uuid
from contextlib import contextmanager
from django.apps import AppConfig, apps
from django.db import connection
from psqlextra.models import (
PostgresMaterializedViewModel,
PostgresModel,
PostgresPartitionedModel,
PostgresViewModel,
)
def define_fake_model(
fields=None, model_base=PostgresModel, meta_options={}, **attributes
):
"""Defines a fake model (but does not create it in the database)."""
name = str(uuid.uuid4()).replace("-", "")[:8].title()
attributes = {
"app_label": meta_options.get("app_label") or "tests",
"__module__": __name__,
"__name__": name,
"Meta": type("Meta", (object,), meta_options),
**attributes,
}
if fields:
attributes.update(fields)
model = type(name, (model_base,), attributes)
apps.app_configs[attributes["app_label"]].models[name] = model
return model
def define_fake_view_model(
fields=None, view_options={}, meta_options={}, model_base=PostgresViewModel
):
"""Defines a fake view model."""
model = define_fake_model(
fields=fields,
model_base=model_base,
meta_options=meta_options,
ViewMeta=type("ViewMeta", (object,), view_options),
)
return model
def define_fake_materialized_view_model(
fields=None,
view_options={},
meta_options={},
model_base=PostgresMaterializedViewModel,
):
"""Defines a fake materialized view model."""
model = define_fake_model(
fields=fields,
model_base=model_base,
meta_options=meta_options,
ViewMeta=type("ViewMeta", (object,), view_options),
)
return model
def define_fake_partitioned_model(
fields=None, partitioning_options={}, meta_options={}
):
"""Defines a fake partitioned model."""
model = define_fake_model(
fields=fields,
model_base=PostgresPartitionedModel,
meta_options=meta_options,
PartitioningMeta=type(
"PartitioningMeta", (object,), partitioning_options
),
)
return model
def get_fake_partitioned_model(
fields=None, partitioning_options={}, meta_options={}
):
"""Defines a fake partitioned model and creates it in the database."""
model = define_fake_partitioned_model(
fields, partitioning_options, meta_options
)
with connection.schema_editor() as schema_editor:
schema_editor.create_model(model)
return model
def get_fake_model(fields=None, model_base=PostgresModel, meta_options={}):
"""Defines a fake model and creates it in the database."""
model = define_fake_model(fields, model_base, meta_options)
with connection.schema_editor() as schema_editor:
schema_editor.create_model(model)
return model
@contextmanager
def define_fake_app():
"""Creates and registers a fake Django app."""
name = "app_" + str(uuid.uuid4()).replace("-", "")[:8]
app_config_cls = type(
name + "Config",
(AppConfig,),
{"name": name, "path": os.path.dirname(__file__)},
)
app_config = app_config_cls(name, "")
app_config.apps = apps
app_config.models = {}
apps.app_configs[name] = app_config
sys.modules[name] = {}
try:
yield app_config
finally:
del apps.app_configs[name]
del sys.modules[name]
django-postgres-extra-2.0.4/tests/migrations.py 0000664 0000000 0000000 00000016513 14175513017 0021646 0 ustar 00root root 0000000 0000000 from contextlib import contextmanager
from typing import List
from unittest import mock
from django.apps import apps
from django.db import connection, migrations
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.questioner import NonInteractiveMigrationQuestioner
from django.db.migrations.state import ProjectState
from psqlextra.backend.schema import PostgresSchemaEditor
from .fake_model import define_fake_model
@contextmanager
def filtered_schema_editor(*filters: List[str]):
"""Gets a schema editor, but filters executed SQL statements based on the
specified text filters.
Arguments:
filters:
List of strings to filter SQL
statements on.
"""
with connection.schema_editor() as schema_editor:
wrapper_for = schema_editor.execute
with mock.patch.object(
PostgresSchemaEditor, "execute", wraps=wrapper_for
) as execute:
filter_results = {}
yield filter_results
for filter_text in filters:
filter_results[filter_text] = [
call for call in execute.mock_calls if filter_text in str(call)
]
def apply_migration(operations, state=None, backwards: bool = False):
"""Executes the specified migration operations using the specified schema
editor.
Arguments:
operations:
The migration operations to execute.
state:
The state state to use during the
migrations.
backwards:
Whether to apply the operations
in reverse (backwards).
"""
state = state or migrations.state.ProjectState.from_apps(apps)
class Migration(migrations.Migration):
pass
Migration.operations = operations
migration = Migration("migration", "tests")
executor = MigrationExecutor(connection)
if not backwards:
executor.apply_migration(state, migration)
else:
executor.unapply_migration(state, migration)
return migration
def make_migration(app_label="tests", from_state=None, to_state=None):
"""Generates migrations based on the specified app's state."""
app_labels = [app_label]
loader = MigrationLoader(None, ignore_no_migrations=True)
loader.check_consistent_history(connection)
questioner = NonInteractiveMigrationQuestioner(
specified_apps=app_labels, dry_run=False
)
autodetector = MigrationAutodetector(
from_state or loader.project_state(),
to_state or ProjectState.from_apps(apps),
questioner,
)
changes = autodetector.changes(
graph=loader.graph,
trim_to_apps=app_labels or None,
convert_apps=app_labels or None,
migration_name="test",
)
changes_for_app = changes.get(app_label)
if not changes_for_app or len(changes_for_app) == 0:
return None
return changes_for_app[0]
@contextmanager
def create_drop_model(field, filters: List[str]):
"""Creates and drops a model with the specified field.
Arguments:
field:
The field to include on the
model to create and drop.
filters:
List of strings to filter
SQL statements on.
"""
model = define_fake_model({"title": field})
with filtered_schema_editor(*filters) as calls:
apply_migration(
[
migrations.CreateModel(
model.__name__, fields=[("title", field.clone())]
),
migrations.DeleteModel(model.__name__),
]
)
yield calls
@contextmanager
def alter_db_table(field, filters: List[str]):
"""Creates a model with the specified field and then renames the database
table.
Arguments:
field:
The field to include into the
model.
filters:
List of strings to filter
SQL statements on.
"""
model = define_fake_model()
state = migrations.state.ProjectState.from_apps(apps)
apply_migration(
[
migrations.CreateModel(
model.__name__, fields=[("title", field.clone())]
)
],
state,
)
with filtered_schema_editor(*filters) as calls:
apply_migration(
[migrations.AlterModelTable(model.__name__, "NewTableName")], state
)
yield calls
@contextmanager
def add_field(field, filters: List[str]):
"""Adds the specified field to a model.
Arguments:
field:
The field to add to a model.
filters:
List of strings to filter
SQL statements on.
"""
model = define_fake_model()
state = migrations.state.ProjectState.from_apps(apps)
apply_migration([migrations.CreateModel(model.__name__, fields=[])], state)
with filtered_schema_editor(*filters) as calls:
apply_migration(
[migrations.AddField(model.__name__, "title", field)], state
)
yield calls
@contextmanager
def remove_field(field, filters: List[str]):
"""Removes the specified field from a model.
Arguments:
field:
The field to remove from a model.
filters:
List of strings to filter
SQL statements on.
"""
model = define_fake_model({"title": field})
state = migrations.state.ProjectState.from_apps(apps)
apply_migration(
[
migrations.CreateModel(
model.__name__, fields=[("title", field.clone())]
)
],
state,
)
with filtered_schema_editor(*filters) as calls:
apply_migration(
[migrations.RemoveField(model.__name__, "title")], state
)
yield calls
@contextmanager
def alter_field(old_field, new_field, filters: List[str]):
"""Alters a field from one state to the other.
Arguments:
old_field:
The field before altering it.
new_field:
The field after altering it.
filters:
List of strings to filter
SQL statements on.
"""
model = define_fake_model({"title": old_field})
state = migrations.state.ProjectState.from_apps(apps)
apply_migration(
[
migrations.CreateModel(
model.__name__, fields=[("title", old_field.clone())]
)
],
state,
)
with filtered_schema_editor(*filters) as calls:
apply_migration(
[migrations.AlterField(model.__name__, "title", new_field)], state
)
yield calls
@contextmanager
def rename_field(field, filters: List[str]):
"""Renames a field from one name to the other.
Arguments:
field:
Field to be renamed.
filters:
List of strings to filter
SQL statements on.
"""
model = define_fake_model({"title": field})
state = migrations.state.ProjectState.from_apps(apps)
apply_migration(
[
migrations.CreateModel(
model.__name__, fields=[("title", field.clone())]
)
],
state,
)
with filtered_schema_editor(*filters) as calls:
apply_migration(
[migrations.RenameField(model.__name__, "title", "newtitle")], state
)
yield calls
django-postgres-extra-2.0.4/tests/snapshots/ 0000775 0000000 0000000 00000000000 14175513017 0021134 5 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/tests/snapshots/__init__.py 0000664 0000000 0000000 00000000000 14175513017 0023233 0 ustar 00root root 0000000 0000000 django-postgres-extra-2.0.4/tests/snapshots/snap_test_management_command_partition.py 0000664 0000000 0000000 00000006756 14175513017 0031507 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import GenericRepr, Snapshot
snapshots = Snapshot()
snapshots['test_management_command_partition_auto_confirm[--yes] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nOperations applied.\\n', err='')")
snapshots['test_management_command_partition_auto_confirm[-y] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nOperations applied.\\n', err='')")
snapshots['test_management_command_partition_confirm_no[NO] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operation aborted.\\n', err='')")
snapshots['test_management_command_partition_confirm_no[N] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operation aborted.\\n', err='')")
snapshots['test_management_command_partition_confirm_no[No] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operation aborted.\\n', err='')")
snapshots['test_management_command_partition_confirm_no[n] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operation aborted.\\n', err='')")
snapshots['test_management_command_partition_confirm_no[no] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operation aborted.\\n', err='')")
snapshots['test_management_command_partition_confirm_yes[YES] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operations applied.\\n', err='')")
snapshots['test_management_command_partition_confirm_yes[Y] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operations applied.\\n', err='')")
snapshots['test_management_command_partition_confirm_yes[y] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operations applied.\\n', err='')")
snapshots['test_management_command_partition_confirm_yes[yes] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\nDo you want to proceed? (y/N) Operations applied.\\n', err='')")
snapshots['test_management_command_partition_dry_run[--dry] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\n', err='')")
snapshots['test_management_command_partition_dry_run[-d] 1'] = GenericRepr("CaptureResult(out='test:\\n - tobedeleted\\n + tobecreated\\n\\n1 partitions will be deleted\\n1 partitions will be created\\n', err='')")
django-postgres-extra-2.0.4/tests/test_case_insensitive_unique_index.py 0000664 0000000 0000000 00000004055 14175513017 0026637 0 ustar 00root root 0000000 0000000 import pytest
from django.db import IntegrityError, connection, models
from django.db.migrations import AddIndex, CreateModel
from psqlextra.indexes import CaseInsensitiveUniqueIndex
from psqlextra.models import PostgresModel
from .fake_model import get_fake_model
from .migrations import apply_migration, filtered_schema_editor
def test_ciui_migrations():
"""Tests whether migrations for case sensitive indexes are being created as
expected."""
index_1 = CaseInsensitiveUniqueIndex(
fields=["name", "other_name"], name="index1"
)
ops = [
CreateModel(
name="mymodel",
fields=[
("name", models.CharField(max_length=255)),
("other_name", models.CharField(max_length=255)),
],
),
AddIndex(model_name="mymodel", index=index_1),
]
with filtered_schema_editor("CREATE UNIQUE INDEX") as calls:
apply_migration(ops)
sql = str([call[0] for _, call, _ in calls["CREATE UNIQUE INDEX"]][0])
expected_sql = 'CREATE UNIQUE INDEX "index1" ON "tests_mymodel" (LOWER("name"), LOWER("other_name"))'
assert sql == expected_sql
def test_ciui():
"""Tests whether the case insensitive unique index works as expected."""
index_1 = CaseInsensitiveUniqueIndex(fields=["name"], name="index1")
model = get_fake_model(
{"name": models.CharField(max_length=255)}, PostgresModel
)
with connection.schema_editor() as schema_editor:
schema_editor.add_index(model, index_1)
model.objects.create(name="henk")
with pytest.raises(IntegrityError):
model.objects.create(name="Henk")
def test_ciui_on_conflict():
"""Tests wether fields with a :see:CaseInsensitiveUniqueIndex can be used
as a conflict target."""
index_1 = CaseInsensitiveUniqueIndex(fields=["name"], name="index1")
model = get_fake_model(
{"name": models.CharField(max_length=255)},
PostgresModel,
{"indexes": [index_1]},
)
model.objects.upsert(conflict_target=["name"], fields=dict(name="henk"))
django-postgres-extra-2.0.4/tests/test_conditional_unique_index.py 0000664 0000000 0000000 00000011203 14175513017 0025600 0 ustar 00root root 0000000 0000000 import pytest
from django.db import IntegrityError, models, transaction
from django.db.migrations import AddIndex, CreateModel
from psqlextra.indexes import ConditionalUniqueIndex
from .fake_model import get_fake_model
from .migrations import apply_migration, filtered_schema_editor
def test_cui_deconstruct():
"""Tests whether the :see:ConditionalUniqueIndex's deconstruct() method
works properly."""
original_kwargs = dict(
condition="field IS NULL", name="great_index", fields=["field", "build"]
)
_, _, new_kwargs = ConditionalUniqueIndex(**original_kwargs).deconstruct()
for key, value in original_kwargs.items():
assert new_kwargs[key] == value
def test_cui_migrations():
"""Tests whether the migrations are properly generated and executed."""
index_1 = ConditionalUniqueIndex(
fields=["name", "other_name"],
condition='"name" IS NOT NULL',
name="index1",
)
index_2 = ConditionalUniqueIndex(
fields=["other_name"], condition='"name" IS NULL', name="index2"
)
ops = [
CreateModel(
name="mymodel",
fields=[
("id", models.IntegerField(primary_key=True)),
("name", models.CharField(max_length=255, null=True)),
("other_name", models.CharField(max_length=255)),
],
options={
# "indexes": [index_1, index_2],
},
),
AddIndex(model_name="mymodel", index=index_1),
AddIndex(model_name="mymodel", index=index_2),
]
with filtered_schema_editor("CREATE UNIQUE INDEX") as calls:
apply_migration(ops)
calls = [call[0] for _, call, _ in calls["CREATE UNIQUE INDEX"]]
db_table = "tests_mymodel"
query = 'CREATE UNIQUE INDEX "index1" ON "{0}" ("name", "other_name") WHERE "name" IS NOT NULL'
assert str(calls[0]) == query.format(db_table)
query = 'CREATE UNIQUE INDEX "index2" ON "{0}" ("other_name") WHERE "name" IS NULL'
assert str(calls[1]) == query.format(db_table)
def test_cui_upserting():
"""Tests upserting respects the :see:ConditionalUniqueIndex rules."""
model = get_fake_model(
fields={
"a": models.IntegerField(),
"b": models.IntegerField(null=True),
"c": models.IntegerField(),
},
meta_options={
"indexes": [
ConditionalUniqueIndex(
fields=["a", "b"], condition='"b" IS NOT NULL'
),
ConditionalUniqueIndex(fields=["a"], condition='"b" IS NULL'),
]
},
)
model.objects.upsert(
conflict_target=["a"],
index_predicate='"b" IS NULL',
fields=dict(a=1, c=1),
)
assert model.objects.all().count() == 1
assert model.objects.filter(a=1, c=1).count() == 1
model.objects.upsert(
conflict_target=["a"],
index_predicate='"b" IS NULL',
fields=dict(a=1, c=2),
)
assert model.objects.all().count() == 1
assert model.objects.filter(a=1, c=1).count() == 0
assert model.objects.filter(a=1, c=2).count() == 1
model.objects.upsert(
conflict_target=["a", "b"],
index_predicate='"b" IS NOT NULL',
fields=dict(a=1, b=1, c=1),
)
assert model.objects.all().count() == 2
assert model.objects.filter(a=1, c=2).count() == 1
assert model.objects.filter(a=1, b=1, c=1).count() == 1
model.objects.upsert(
conflict_target=["a", "b"],
index_predicate='"b" IS NOT NULL',
fields=dict(a=1, b=1, c=2),
)
assert model.objects.all().count() == 2
assert model.objects.filter(a=1, c=1).count() == 0
assert model.objects.filter(a=1, b=1, c=2).count() == 1
def test_cui_inserting():
"""Tests inserting respects the :see:ConditionalUniqueIndex rules."""
model = get_fake_model(
fields={
"a": models.IntegerField(),
"b": models.IntegerField(null=True),
"c": models.IntegerField(),
},
meta_options={
"indexes": [
ConditionalUniqueIndex(
fields=["a", "b"], condition='"b" IS NOT NULL'
),
ConditionalUniqueIndex(fields=["a"], condition='"b" IS NULL'),
]
},
)
model.objects.create(a=1, c=1)
with transaction.atomic():
with pytest.raises(IntegrityError):
model.objects.create(a=1, c=2)
model.objects.create(a=2, c=1)
model.objects.create(a=1, b=1, c=1)
with transaction.atomic():
with pytest.raises(IntegrityError):
model.objects.create(a=1, b=1, c=2)
model.objects.create(a=1, b=2, c=1)
django-postgres-extra-2.0.4/tests/test_db_backend.py 0000664 0000000 0000000 00000000540 14175513017 0022556 0 ustar 00root root 0000000 0000000 from django.db import connection
def test_db_backend_hstore_extension_enabled():
"""Tests whether the `hstore` extension was enabled automatically."""
with connection.cursor() as cursor:
cursor.execute(
("SELECT count(*) FROM pg_extension " "WHERE extname = 'hstore'")
)
assert cursor.fetchone()[0] == 1
django-postgres-extra-2.0.4/tests/test_hstore_autodetect.py 0000664 0000000 0000000 00000005171 14175513017 0024254 0 ustar 00root root 0000000 0000000 from django.db import migrations
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.state import ProjectState
from psqlextra.fields import HStoreField
def _make_project_state(model_states):
"""Shortcut to make :see:ProjectState from a list of predefined models."""
project_state = ProjectState()
for model_state in model_states:
project_state.add_model(model_state.clone())
return project_state
def _detect_changes(before_states, after_states):
"""Uses the migration autodetector to detect changes in the specified
project states."""
return MigrationAutodetector(
_make_project_state(before_states), _make_project_state(after_states)
)._detect_changes()
def _assert_autodetector(changes, expected):
"""Asserts whether the results of the auto detector are as expected."""
assert "tests" in changes
assert len("tests") > 0
operations = changes["tests"][0].operations
for i, expected_operation in enumerate(expected):
real_operation = operations[i]
_, _, real_args, real_kwargs = real_operation.field.deconstruct()
(
_,
_,
expected_args,
expected_kwargs,
) = expected_operation.field.deconstruct()
assert real_args == expected_args
assert real_kwargs == expected_kwargs
def test_hstore_autodetect_uniqueness():
"""Tests whether changes in the `uniqueness` option are properly detected
by the auto detector."""
before = [
migrations.state.ModelState(
"tests", "Model1", [("title", HStoreField())]
)
]
after = [
migrations.state.ModelState(
"tests", "Model1", [("title", HStoreField(uniqueness=["en"]))]
)
]
changes = _detect_changes(before, after)
_assert_autodetector(
changes,
[
migrations.AlterField(
"Model1", "title", HStoreField(uniqueness=["en"])
)
],
)
def test_hstore_autodetect_required():
"""Tests whether changes in the `required` option are properly detected by
the auto detector."""
before = [
migrations.state.ModelState(
"tests", "Model1", [("title", HStoreField())]
)
]
after = [
migrations.state.ModelState(
"tests", "Model1", [("title", HStoreField(required=["en"]))]
)
]
changes = _detect_changes(before, after)
_assert_autodetector(
changes,
[
migrations.AlterField(
"Model1", "title", HStoreField(required=["en"])
)
],
)
django-postgres-extra-2.0.4/tests/test_hstore_field.py 0000664 0000000 0000000 00000001743 14175513017 0023177 0 ustar 00root root 0000000 0000000 import pytest
from psqlextra.fields import HStoreField
def test_hstore_field_deconstruct():
"""Tests whether the :see:HStoreField's deconstruct() method works
properly."""
original_kwargs = dict(uniqueness=["beer", "other"], required=[])
_, _, _, new_kwargs = HStoreField(**original_kwargs).deconstruct()
for key, value in original_kwargs.items():
assert new_kwargs[key] == value
@pytest.mark.parametrize(
"input,output",
[
(dict(key1=1, key2=2), dict(key1="1", key2="2")),
(dict(key1="1", key2="2"), dict(key1="1", key2="2")),
(
dict(key1=1, key2=None, key3="3"),
dict(key1="1", key2=None, key3="3"),
),
([1, 2, 3], ["1", "2", "3"]),
(["1", "2", "3"], ["1", "2", "3"]),
],
)
def test_hstore_field_get_prep_value(input, output):
"""Tests whether the :see:HStoreField's get_prep_value method works
properly."""
assert HStoreField().get_prep_value(input) == output
django-postgres-extra-2.0.4/tests/test_hstore_required.py 0000664 0000000 0000000 00000010320 14175513017 0023723 0 ustar 00root root 0000000 0000000 import pytest
from django.db.utils import IntegrityError
from psqlextra.fields import HStoreField
from . import migrations
from .fake_model import get_fake_model
def test_hstore_required_migration_create_drop_model():
"""Tests whether constraints are properly created and dropped when creating
and dropping a model."""
required = ["beer", "cookies"]
test = migrations.create_drop_model(
HStoreField(required=required), ["ADD CONSTRAINT", "DROP CONSTRAINT"]
)
with test as calls:
assert len(calls["ADD CONSTRAINT"]) == len(required)
assert len(calls["DROP CONSTRAINT"]) == len(required)
def test_hstore_required_migration_alter_db_table():
"""Tests whether constraints are renamed properly when renaming the
database table."""
test = migrations.alter_db_table(
HStoreField(required=["beer", "cookie"]),
["RENAME CONSTRAINT", "ADD CONSTRAINT", "DROP CONSTRAINT"],
)
with test as calls:
assert len(calls["RENAME CONSTRAINT"]) == 2
assert len(calls.get("ADD CONSTRAINT", [])) == 0
assert len(calls.get("DROP CONSTRAINT", [])) == 0
def test_hstore_required_add_field():
"""Tests whether adding a field properly creates the constraints."""
test = migrations.add_field(
HStoreField(required=["beer"]), ["ADD CONSTRAINT", "DROP CONSTRAINT"]
)
with test as calls:
assert len(calls.get("ADD CONSTRAINT", [])) == 1
assert len(calls.get("DROP CONSTRAINT", [])) == 0
def test_hstore_required_remove_field():
"""Tests whether removing a field properly removes the constraint."""
test = migrations.remove_field(
HStoreField(required=["beer"]), ["ADD CONSTRAINT", "DROP CONSTRAINT"]
)
with test as calls:
assert len(calls.get("ADD CONSTRAINT", [])) == 0
assert len(calls.get("DROP CONSTRAINT", [])) == 1
def test_hstore_required_alter_field_nothing():
"""Tests whether no constraints are dropped when not changing anything in
the required."""
test = migrations.alter_field(
HStoreField(required=["beer"]),
HStoreField(required=["beer"]),
["ADD CONSTRAINT", "DROP CONSTRAINT"],
)
with test as calls:
assert len(calls.get("ADD CONSTRAINT", [])) == 0
assert len(calls.get("DROP CONSTRAINT", [])) == 0
def test_hstore_required_alter_field_add():
"""Tests whether only one constraint is created when adding another key to
the required."""
test = migrations.alter_field(
HStoreField(required=["beer"]),
HStoreField(required=["beer", "beer1"]),
["ADD CONSTRAINT", "DROP CONSTRAINT"],
)
with test as calls:
assert len(calls.get("ADD CONSTRAINT", [])) == 1
assert len(calls.get("DROP CONSTRAINT", [])) == 0
def test_hstore_required_alter_field_remove():
"""Tests whether one constraint is dropped when removing a key from
required."""
test = migrations.alter_field(
HStoreField(required=["beer"]),
HStoreField(required=[]),
["ADD CONSTRAINT", "DROP CONSTRAINT"],
)
with test as calls:
assert len(calls.get("ADD CONSTRAINT", [])) == 0
assert len(calls.get("DROP CONSTRAINT", [])) == 1
def test_hstore_required_rename_field():
"""Tests whether renaming a field doesn't cause the constraint to be re-
created."""
test = migrations.rename_field(
HStoreField(required=["beer", "cookies"]),
["RENAME CONSTRAINT", "ADD CONSTRAINT", "DROP CONSTRAINT"],
)
with test as calls:
assert len(calls.get("RENAME CONSTRAINT", [])) == 2
assert len(calls.get("ADD CONSTRAINT", [])) == 0
assert len(calls.get("DROP CONSTRAINT", [])) == 0
def test_hstore_required_required_enforcement():
"""Tests whether the constraints are actually properly enforced."""
model = get_fake_model({"title": HStoreField(required=["en"])})
with pytest.raises(IntegrityError):
model.objects.create(title={"ar": "hello"})
def test_hstore_required_no_required():
"""Tests whether setting `required` to False casues no requiredness
constraints to be added."""
model = get_fake_model({"title": HStoreField(required=False)})
model.objects.create(title={"ar": "hello"})
django-postgres-extra-2.0.4/tests/test_hstore_unique.py 0000664 0000000 0000000 00000013110 14175513017 0023411 0 ustar 00root root 0000000 0000000 import pytest
from django.db import transaction
from django.db.utils import IntegrityError
from psqlextra.fields import HStoreField
from . import migrations
from .fake_model import get_fake_model
def test_hstore_unique_migration_create_drop_model():
"""Tests whether indexes are properly created and dropped when creating and
dropping a model."""
uniqueness = ["beer", "cookies"]
test = migrations.create_drop_model(
HStoreField(uniqueness=uniqueness), ["CREATE UNIQUE", "DROP INDEX"]
)
with test as calls:
assert len(calls["CREATE UNIQUE"]) == len(uniqueness)
assert len(calls["DROP INDEX"]) == len(uniqueness)
def test_hstore_unique_migration_alter_db_table():
"""Tests whether indexes are renamed properly when renaming the database
table."""
test = migrations.alter_db_table(
HStoreField(uniqueness=["beer", "cookie"]),
["RENAME TO", "CREATE INDEX", "DROP INDEX"],
)
with test as calls:
# 1 rename for table, 2 for hstore keys
assert len(calls["RENAME TO"]) == 3
assert len(calls.get("CREATE UNIQUE", [])) == 0
assert len(calls.get("DROP INDEX", [])) == 0
def test_hstore_unique_add_field():
"""Tests whether adding a field properly creates the indexes."""
test = migrations.add_field(
HStoreField(uniqueness=["beer"]), ["CREATE UNIQUE", "DROP INDEX"]
)
with test as calls:
assert len(calls.get("CREATE UNIQUE", [])) == 1
assert len(calls.get("DROP INDEX", [])) == 0
def test_hstore_unique_remove_field():
"""Tests whether removing a field properly removes the index."""
test = migrations.remove_field(
HStoreField(uniqueness=["beer"]), ["CREATE UNIQUE", "DROP INDEX"]
)
with test as calls:
assert len(calls.get("CREATE UNIQUE", [])) == 0
assert len(calls.get("DROP INDEX", [])) == 1
def test_hstore_unique_alter_field_nothing():
"""Tests whether no indexes are dropped when not changing anything in the
uniqueness."""
test = migrations.alter_field(
HStoreField(uniqueness=["beer"]),
HStoreField(uniqueness=["beer"]),
["CREATE UNIQUE", "DROP INDEX"],
)
with test as calls:
assert len(calls.get("CREATE UNIQUE", [])) == 0
assert len(calls.get("DROP INDEX", [])) == 0
def test_hstore_unique_alter_field_add():
"""Tests whether only one index is created when adding another key to the
uniqueness."""
test = migrations.alter_field(
HStoreField(uniqueness=["beer"]),
HStoreField(uniqueness=["beer", "beer1"]),
["CREATE UNIQUE", "DROP INDEX"],
)
with test as calls:
assert len(calls.get("CREATE UNIQUE", [])) == 1
assert len(calls.get("DROP INDEX", [])) == 0
def test_hstore_unique_alter_field_remove():
"""Tests whether one index is dropped when removing a key from
uniqueness."""
test = migrations.alter_field(
HStoreField(uniqueness=["beer"]),
HStoreField(uniqueness=[]),
["CREATE UNIQUE", "DROP INDEX"],
)
with test as calls:
assert len(calls.get("CREATE UNIQUE", [])) == 0
assert len(calls.get("DROP INDEX", [])) == 1
def test_hstore_unique_alter_field_add_together():
"""Tests whether adding one index is created when adding a "unique
together"."""
test = migrations.alter_field(
HStoreField(uniqueness=["beer"]),
HStoreField(uniqueness=["beer", ("beer1", "beer2")]),
["CREATE UNIQUE", "DROP INDEX"],
)
with test as calls:
assert len(calls.get("CREATE UNIQUE", [])) == 1
assert len(calls.get("DROP INDEX", [])) == 0
def test_hstore_unique_alter_field_remove_together():
"""Tests whether adding one index is dropped when adding a "unique
together"."""
test = migrations.alter_field(
HStoreField(uniqueness=[("beer1", "beer2")]),
HStoreField(uniqueness=[]),
["CREATE UNIQUE", "DROP INDEX"],
)
with test as calls:
assert len(calls.get("CREATE UNIQUE", [])) == 0
assert len(calls.get("DROP INDEX", [])) == 1
def test_hstore_unique_rename_field():
"""Tests whether renaming a field doesn't cause the index to be re-
created."""
test = migrations.rename_field(
HStoreField(uniqueness=["beer", "cookies"]),
["RENAME TO", "CREATE INDEX", "DROP INDEX"],
)
with test as calls:
assert len(calls.get("RENAME TO", [])) == 2
assert len(calls.get("CREATE UNIQUE", [])) == 0
assert len(calls.get("DROP INDEX", [])) == 0
def test_hstore_unique_enforcement():
"""Tests whether the constraints are actually properly enforced."""
model = get_fake_model({"title": HStoreField(uniqueness=["en"])})
# should pass, table is empty and 'ar' does not have to be unique
model.objects.create(title={"en": "unique", "ar": "notunique"})
model.objects.create(title={"en": "elseunique", "ar": "notunique"})
# this should fail, key 'en' must be unique
with transaction.atomic():
with pytest.raises(IntegrityError):
model.objects.create(title={"en": "unique", "ar": "notunique"})
def test_hstore_unique_enforcement_together():
"""Tests whether unique_together style constraints are enforced
properly."""
model = get_fake_model({"title": HStoreField(uniqueness=[("en", "ar")])})
model.objects.create(title={"en": "unique", "ar": "notunique"})
with transaction.atomic():
with pytest.raises(IntegrityError):
model.objects.create(title={"en": "unique", "ar": "notunique"})
model.objects.create(title={"en": "notunique", "ar": "unique"})
django-postgres-extra-2.0.4/tests/test_insert.py 0000664 0000000 0000000 00000007327 14175513017 0022040 0 ustar 00root root 0000000 0000000 from django.db import models
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
def test_insert():
"""Tests whether inserts works when the primary key is explicitly
specified."""
model = get_fake_model(
{"cookies": models.CharField(max_length=255, null=True)}
)
pk = model.objects.all().insert(cookies="some-cookies")
assert pk is not None
obj1 = model.objects.get()
assert obj1.pk == pk
assert obj1.cookies == "some-cookies"
def test_insert_explicit_pk():
"""Tests whether inserts works when the primary key is explicitly
specified."""
model = get_fake_model(
{
"name": models.CharField(max_length=255, primary_key=True),
"cookies": models.CharField(max_length=255, null=True),
}
)
pk = model.objects.all().insert(name="the-object", cookies="some-cookies")
assert pk == "the-object"
obj1 = model.objects.get()
assert obj1.pk == "the-object"
assert obj1.name == "the-object"
assert obj1.cookies == "some-cookies"
def test_insert_on_conflict():
"""Tests whether inserts works when a conflict is anticipated."""
model = get_fake_model(
{
"name": models.CharField(max_length=255, unique=True),
"cookies": models.CharField(max_length=255, null=True),
}
)
pk = model.objects.on_conflict([("pk")], ConflictAction.NOTHING).insert(
name="the-object", cookies="some-cookies"
)
assert pk is not None
obj1 = model.objects.get()
assert obj1.pk == pk
assert obj1.name == "the-object"
assert obj1.cookies == "some-cookies"
def test_insert_on_conflict_explicit_pk():
"""Tests whether inserts works when a conflict is anticipated and the
primary key is explicitly specified."""
model = get_fake_model(
{
"name": models.CharField(max_length=255, primary_key=True),
"cookies": models.CharField(max_length=255, null=True),
}
)
pk = model.objects.on_conflict([("name")], ConflictAction.NOTHING).insert(
name="the-object", cookies="some-cookies"
)
assert pk == "the-object"
obj1 = model.objects.get()
assert obj1.pk == "the-object"
assert obj1.name == "the-object"
assert obj1.cookies == "some-cookies"
def test_insert_with_different_column_name():
"""Tests whether inserts works when the primary key is explicitly
specified."""
model = get_fake_model(
{
"name": models.CharField(max_length=255, primary_key=True),
"cookies": models.CharField(
max_length=255, null=True, db_column="brownies"
),
}
)
cookie_string = "these-are-brownies"
results = model.objects.on_conflict(
["name"], ConflictAction.NOTHING
).insert_and_get(name="the-object", cookies=cookie_string)
assert results is not None
assert results.cookies == cookie_string
obj1 = model.objects.get()
assert obj1.cookies == cookie_string
def test_insert_many_to_many():
"""Tests whether adding a rows to a m2m works after using insert_and_get.
The model returned by `insert_and_get` must be configured in a
special way. Just creating a instance of the model is not enough to
be able to add m2m rows.
"""
model1 = get_fake_model({"name": models.TextField(primary_key=True)})
model2 = get_fake_model(
{
"name": models.TextField(primary_key=True),
"model1s": models.ManyToManyField(model1),
}
)
row2 = model2.objects.on_conflict(
["name"], ConflictAction.UPDATE
).insert_and_get(name="swen")
row1 = model1.objects.create(name="booh")
row2.model1s.add(row1)
row2.save()
django-postgres-extra-2.0.4/tests/test_make_migrations.py 0000664 0000000 0000000 00000020566 14175513017 0023705 0 ustar 00root root 0000000 0000000 import django
import pytest
from django.apps import apps
from django.db import models
from django.db.migrations import AddField, AlterField, RemoveField
from django.db.migrations.state import ProjectState
from psqlextra.backend.migrations import operations, postgres_patched_migrations
from psqlextra.models import (
PostgresMaterializedViewModel,
PostgresPartitionedModel,
PostgresViewModel,
)
from psqlextra.types import PostgresPartitioningMethod
from .fake_model import (
define_fake_materialized_view_model,
define_fake_model,
define_fake_partitioned_model,
define_fake_view_model,
get_fake_model,
)
from .migrations import apply_migration, make_migration
@pytest.mark.parametrize(
"model_config",
[
dict(
fields={"category": models.TextField()},
partitioning_options=dict(
method=PostgresPartitioningMethod.LIST, key="category"
),
),
dict(
fields={"timestamp": models.DateTimeField()},
partitioning_options=dict(
method=PostgresPartitioningMethod.RANGE, key="timestamp"
),
),
dict(
fields={"artist_id": models.IntegerField()},
partitioning_options=dict(
method=PostgresPartitioningMethod.HASH, key="artist_id"
),
),
],
)
@postgres_patched_migrations()
def test_make_migration_create_partitioned_model(fake_app, model_config):
"""Tests whether the right operations are generated when creating a new
partitioned model."""
model = define_fake_partitioned_model(
**model_config, meta_options=dict(app_label=fake_app.name)
)
migration = make_migration(fake_app.name)
ops = migration.operations
method = model_config["partitioning_options"]["method"]
if method == PostgresPartitioningMethod.HASH:
# should have one operation to create the partitioned model
# and no default partition
assert len(ops) == 1
assert isinstance(ops[0], operations.PostgresCreatePartitionedModel)
else:
# should have one operation to create the partitioned model
# and one more to add a default partition
assert len(ops) == 2
assert isinstance(ops[0], operations.PostgresCreatePartitionedModel)
assert isinstance(ops[1], operations.PostgresAddDefaultPartition)
# make sure the default partition is named "default"
assert ops[1].model_name == model.__name__
assert ops[1].name == "default"
# make sure the base is set correctly
assert len(ops[0].bases) == 1
assert issubclass(ops[0].bases[0], PostgresPartitionedModel)
# make sure the partitioning options got copied correctly
assert ops[0].partitioning_options == model_config["partitioning_options"]
@postgres_patched_migrations()
def test_make_migration_create_view_model(fake_app):
"""Tests whether the right operations are generated when creating a new
view model."""
underlying_model = get_fake_model({"name": models.TextField()})
model = define_fake_view_model(
fields={"name": models.TextField()},
view_options=dict(query=underlying_model.objects.all()),
meta_options=dict(app_label=fake_app.name),
)
migration = make_migration(model._meta.app_label)
ops = migration.operations
assert len(ops) == 1
assert isinstance(ops[0], operations.PostgresCreateViewModel)
# make sure the base is set correctly
assert len(ops[0].bases) == 1
assert issubclass(ops[0].bases[0], PostgresViewModel)
# make sure the view options got copied correctly
assert ops[0].view_options == model._view_meta.original_attrs
@postgres_patched_migrations()
def test_make_migration_create_materialized_view_model(fake_app):
"""Tests whether the right operations are generated when creating a new
materialized view model."""
underlying_model = get_fake_model({"name": models.TextField()})
model = define_fake_materialized_view_model(
fields={"name": models.TextField()},
view_options=dict(query=underlying_model.objects.all()),
meta_options=dict(app_label=fake_app.name),
)
migration = make_migration(model._meta.app_label)
ops = migration.operations
assert len(ops) == 1
assert isinstance(ops[0], operations.PostgresCreateMaterializedViewModel)
# make sure the base is set correctly
assert len(ops[0].bases) == 1
assert issubclass(ops[0].bases[0], PostgresMaterializedViewModel)
# make sure the view options got copied correctly
assert ops[0].view_options == model._view_meta.original_attrs
@pytest.mark.parametrize(
"define_view_model",
[define_fake_materialized_view_model, define_fake_view_model],
)
@postgres_patched_migrations()
def test_make_migration_field_operations_view_models(
fake_app, define_view_model
):
"""Tests whether field operations against a (materialized) view are always
wrapped in the :see:ApplyState operation so that they don't actually get
applied to the database, yet Django applies to them to the project state.
This is important because you can't actually alter/add or delete
fields from a (materialized) view.
"""
underlying_model = get_fake_model(
{"first_name": models.TextField(), "last_name": models.TextField()},
meta_options=dict(app_label=fake_app.name),
)
model = define_view_model(
fields={"first_name": models.TextField()},
view_options=dict(query=underlying_model.objects.all()),
meta_options=dict(app_label=fake_app.name),
)
state_1 = ProjectState.from_apps(apps)
migration = make_migration(model._meta.app_label)
apply_migration(migration.operations, state_1)
# add a field to the materialized view
last_name_field = models.TextField(null=True)
last_name_field.contribute_to_class(model, "last_name")
migration = make_migration(model._meta.app_label, from_state=state_1)
assert len(migration.operations) == 1
assert isinstance(migration.operations[0], operations.ApplyState)
assert isinstance(migration.operations[0].state_operation, AddField)
# alter the field on the materialized view
state_2 = ProjectState.from_apps(apps)
last_name_field = models.TextField(null=True, blank=True)
last_name_field.contribute_to_class(model, "last_name")
migration = make_migration(model._meta.app_label, from_state=state_2)
assert len(migration.operations) == 1
assert isinstance(migration.operations[0], operations.ApplyState)
assert isinstance(migration.operations[0].state_operation, AlterField)
# remove the field from the materialized view
migration = make_migration(
model._meta.app_label,
from_state=ProjectState.from_apps(apps),
to_state=state_1,
)
assert isinstance(migration.operations[0], operations.ApplyState)
assert isinstance(migration.operations[0].state_operation, RemoveField)
@pytest.mark.skipif(
django.VERSION < (2, 2),
reason="Django < 2.2 doesn't implement left-to-right migration optimizations",
)
@pytest.mark.parametrize("method", PostgresPartitioningMethod.all())
@postgres_patched_migrations()
def test_autodetect_fk_issue(fake_app, method):
"""Test whether Django can perform ForeignKey optimization.
Fixes https://github.com/SectorLabs/django-postgres-extra/issues/123 for Django >= 2.2
"""
meta_options = {"app_label": fake_app.name}
partitioning_options = {"method": method, "key": "artist_id"}
artist_model_fields = {"name": models.TextField()}
Artist = define_fake_model(artist_model_fields, meta_options=meta_options)
from_state = ProjectState.from_apps(apps)
album_model_fields = {
"name": models.TextField(),
"artist": models.ForeignKey(
to=Artist.__name__, on_delete=models.CASCADE
),
}
define_fake_partitioned_model(
album_model_fields,
partitioning_options=partitioning_options,
meta_options=meta_options,
)
migration = make_migration(fake_app.name, from_state=from_state)
ops = migration.operations
if method == PostgresPartitioningMethod.HASH:
assert len(ops) == 1
assert isinstance(ops[0], operations.PostgresCreatePartitionedModel)
else:
assert len(ops) == 2
assert isinstance(ops[0], operations.PostgresCreatePartitionedModel)
assert isinstance(ops[1], operations.PostgresAddDefaultPartition)
django-postgres-extra-2.0.4/tests/test_management_command_partition.py 0000664 0000000 0000000 00000013464 14175513017 0026436 0 ustar 00root root 0000000 0000000 import argparse
from unittest.mock import MagicMock, create_autospec, patch
import pytest
from django.db import models
from django.test import override_settings
from psqlextra.backend.introspection import (
PostgresIntrospectedPartitionTable,
PostgresIntrospectedPartitonedTable,
)
from psqlextra.management.commands.pgpartition import Command
from psqlextra.partitioning import PostgresPartitioningManager
from psqlextra.partitioning.config import PostgresPartitioningConfig
from psqlextra.partitioning.partition import PostgresPartition
from psqlextra.partitioning.strategy import PostgresPartitioningStrategy
from .fake_model import define_fake_partitioned_model
@pytest.fixture
def fake_strategy():
strategy = create_autospec(PostgresPartitioningStrategy)
strategy.createable_partition = create_autospec(PostgresPartition)
strategy.createable_partition.name = MagicMock(return_value="tobecreated")
strategy.to_create = MagicMock(return_value=[strategy.createable_partition])
strategy.deleteable_partition = create_autospec(PostgresPartition)
strategy.deleteable_partition.name = MagicMock(return_value="tobedeleted")
strategy.to_delete = MagicMock(return_value=[strategy.deleteable_partition])
return strategy
@pytest.fixture
def fake_model(fake_strategy):
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
# consistent model name so snapshot tests work
model.__name__ = "test"
# we have to trick the system into thinking the model/table
# actually exists with one partition (so we can simulate deletions)
deleteable_partition_name = fake_strategy.deleteable_partition.name()
mocked_partitioned_table = PostgresIntrospectedPartitonedTable(
name=model._meta.db_table,
method=model._partitioning_meta.method,
key=model._partitioning_meta.key,
partitions=[
PostgresIntrospectedPartitionTable(
name=deleteable_partition_name,
full_name=f"{model._meta.db_table}_{deleteable_partition_name}",
comment="psqlextra_auto_partitioned",
)
],
)
introspection_package = "psqlextra.backend.introspection"
introspection_class = f"{introspection_package}.PostgresIntrospection"
get_partitioned_table_path = f"{introspection_class}.get_partitioned_table"
with patch(get_partitioned_table_path) as mock:
mock.return_value = mocked_partitioned_table
yield model
@pytest.fixture
def fake_partitioning_manager(fake_model, fake_strategy):
manager = PostgresPartitioningManager(
[PostgresPartitioningConfig(fake_model, fake_strategy)]
)
with override_settings(PSQLEXTRA_PARTITIONING_MANAGER=manager):
yield manager
@pytest.fixture
def run(capsys):
def _run(*args):
parser = argparse.ArgumentParser()
command = Command()
command.add_arguments(parser)
command.handle(**vars(parser.parse_args(args)))
return capsys.readouterr()
return _run
@pytest.mark.parametrize("args", ["-d", "--dry"])
def test_management_command_partition_dry_run(
args, snapshot, run, fake_model, fake_partitioning_manager
):
"""Tests whether the --dry option actually makes it a dry run and does not
create/delete partitions."""
config = fake_partitioning_manager.find_config_for_model(fake_model)
snapshot.assert_match(run(args))
config.strategy.createable_partition.create.assert_not_called()
config.strategy.createable_partition.delete.assert_not_called()
config.strategy.deleteable_partition.create.assert_not_called()
config.strategy.deleteable_partition.delete.assert_not_called()
@pytest.mark.parametrize("args", ["-y", "--yes"])
def test_management_command_partition_auto_confirm(
args, snapshot, run, fake_model, fake_partitioning_manager
):
"""Tests whether the --yes option makes it not ask for confirmation before
creating/deleting partitions."""
config = fake_partitioning_manager.find_config_for_model(fake_model)
snapshot.assert_match(run(args))
config.strategy.createable_partition.create.assert_called_once()
config.strategy.createable_partition.delete.assert_not_called()
config.strategy.deleteable_partition.create.assert_not_called()
config.strategy.deleteable_partition.delete.assert_called_once()
@pytest.mark.parametrize("answer", ["y", "Y", "yes", "YES"])
def test_management_command_partition_confirm_yes(
answer, monkeypatch, snapshot, run, fake_model, fake_partitioning_manager
):
"""Tests whether the --yes option makes it not ask for confirmation before
creating/deleting partitions."""
config = fake_partitioning_manager.find_config_for_model(fake_model)
monkeypatch.setattr("builtins.input", lambda _: answer)
snapshot.assert_match(run())
config.strategy.createable_partition.create.assert_called_once()
config.strategy.createable_partition.delete.assert_not_called()
config.strategy.deleteable_partition.create.assert_not_called()
config.strategy.deleteable_partition.delete.assert_called_once()
@pytest.mark.parametrize("answer", ["n", "N", "no", "No", "NO"])
def test_management_command_partition_confirm_no(
answer, monkeypatch, snapshot, run, fake_model, fake_partitioning_manager
):
"""Tests whether the --yes option makes it not ask for confirmation before
creating/deleting partitions."""
config = fake_partitioning_manager.find_config_for_model(fake_model)
monkeypatch.setattr("builtins.input", lambda _: answer)
snapshot.assert_match(run())
config.strategy.createable_partition.create.assert_not_called()
config.strategy.createable_partition.delete.assert_not_called()
config.strategy.deleteable_partition.create.assert_not_called()
config.strategy.deleteable_partition.delete.assert_not_called()
django-postgres-extra-2.0.4/tests/test_manager.py 0000664 0000000 0000000 00000006007 14175513017 0022140 0 ustar 00root root 0000000 0000000 import pytest
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import override_settings
from psqlextra.manager import PostgresManager
from psqlextra.models import PostgresModel
from .fake_model import get_fake_model
@pytest.mark.parametrize(
"databases",
[
{"default": {"ENGINE": "psqlextra.backend"}},
{
"default": {"ENGINE": "django.db.backends.postgresql"},
"other": {"ENGINE": "psqlextra.backend"},
},
{
"default": {"ENGINE": "psqlextra.backend"},
"other": {"ENGINE": "psqlextra.backend"},
},
],
)
def test_manager_backend_set(databases):
"""Tests that creating a new instance of :see:PostgresManager succeseeds
without any errors if one or more databases are configured with
`psqlextra.backend` as its ENGINE."""
with override_settings(DATABASES=databases):
assert PostgresManager()
def test_manager_backend_not_set():
"""Tests whether creating a new instance of
:see:PostgresManager fails if no database
has `psqlextra.backend` configured
as its ENGINE."""
with override_settings(
DATABASES={"default": {"ENGINE": "django.db.backends.postgresql"}}
):
with pytest.raises(ImproperlyConfigured):
PostgresManager()
def test_manager_truncate():
"""Tests whether truncating a table works."""
model = get_fake_model({"name": models.CharField(max_length=255)})
model.objects.create(name="henk1")
model.objects.create(name="henk2")
assert model.objects.count() == 2
model.objects.truncate()
assert model.objects.count() == 0
@pytest.mark.django_db(transaction=True)
def test_manager_truncate_cascade():
"""Tests whether truncating a table with cascade works."""
model_1 = get_fake_model({"name": models.CharField(max_length=255)})
model_2 = get_fake_model(
{
"name": models.CharField(max_length=255),
"model_1": models.ForeignKey(
model_1, on_delete=models.CASCADE, null=True
),
}
)
obj_1 = model_1.objects.create(name="henk1")
model_2.objects.create(name="henk1", model_1_id=obj_1.id)
assert model_1.objects.count() == 1
assert model_2.objects.count() == 1
model_1.objects.truncate(cascade=True)
assert model_1.objects.count() == 0
assert model_2.objects.count() == 0
def test_manager_truncate_quote_name():
"""Tests whether the truncate statement properly quotes the table name."""
model = get_fake_model(
{"name": models.CharField(max_length=255)},
PostgresModel,
{
# without quoting, table names are always
# lower-case, using a capital case table
# name requires quoting to work
"db_table": "MyTable"
},
)
model.objects.create(name="henk1")
model.objects.create(name="henk2")
assert model.objects.count() == 2
model.objects.truncate()
assert model.objects.count() == 0
django-postgres-extra-2.0.4/tests/test_manager_context.py 0000664 0000000 0000000 00000001177 14175513017 0023707 0 ustar 00root root 0000000 0000000 from django.db import models
from psqlextra.util import postgres_manager
from .fake_model import get_fake_model
def test_manager_context():
"""Tests whether the :see:postgres_manager context manager can be used to
get access to :see:PostgresManager on a model that does not use it directly
or inherits from :see:PostgresModel."""
model = get_fake_model(
{"myfield": models.CharField(max_length=255, unique=True)}, models.Model
)
with postgres_manager(model) as manager:
manager.upsert(conflict_target=["myfield"], fields=dict(myfield="beer"))
assert manager.first().myfield == "beer"
django-postgres-extra-2.0.4/tests/test_materialized_view_model.py 0000664 0000000 0000000 00000002062 14175513017 0025407 0 ustar 00root root 0000000 0000000 from django.db import connection, models
from psqlextra.backend.schema import PostgresSchemaEditor
from .fake_model import define_fake_materialized_view_model, get_fake_model
def test_materialized_view_model_refresh():
"""Tests whether a materialized view can be refreshed."""
underlying_model = get_fake_model({"name": models.TextField()})
model = define_fake_materialized_view_model(
{"name": models.TextField()},
{"query": underlying_model.objects.filter(name="test1")},
)
underlying_model.objects.create(name="test1")
underlying_model.objects.create(name="test2")
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_materialized_view_model(model)
# materialized view should only show records name="test"1
objs = list(model.objects.all())
assert len(objs) == 1
assert objs[0].name == "test1"
# create another record with "test1" and refresh
underlying_model.objects.create(name="test1")
model.refresh()
objs = list(model.objects.all())
assert len(objs) == 2
django-postgres-extra-2.0.4/tests/test_migration_operations.py 0000664 0000000 0000000 00000020326 14175513017 0024762 0 ustar 00root root 0000000 0000000 import pytest
from django.apps import apps
from django.db import connection, migrations, models
from psqlextra.backend.migrations import operations
from psqlextra.manager import PostgresManager
from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from . import db_introspection
from .migrations import apply_migration
def _partitioned_table_exists(op: operations.PostgresCreatePartitionedModel):
"""Checks whether the specified partitioned model operation was succesfully
applied."""
model_table_name = f"tests_{op.name}"
table = db_introspection.get_partitioned_table(model_table_name)
if not table:
return False
part_options = op.partitioning_options
return (
table.method == part_options["method"]
and table.key == part_options["key"]
)
def _partition_exists(model_op, op):
"""Checks whether the parttitioned model and partition operations were
succesfully applied."""
model_table_name = f"tests_{model_op.name}"
table = db_introspection.get_partitioned_table(model_table_name)
if not table:
return False
partition = next(
(
partition
for partition in table.partitions
if partition.full_name == f"{model_table_name}_{op.name}"
),
None,
)
return bool(partition)
@pytest.fixture
def create_model():
"""Factory for creating a :see:PostgresCreatePartitionedModel operation."""
def _create_model(method):
fields = [("name", models.TextField())]
key = []
if method == PostgresPartitioningMethod.RANGE:
key.append("timestamp")
fields.append(("timestamp", models.DateTimeField()))
elif method == PostgresPartitioningMethod.LIST:
key.append("category")
fields.append(("category", models.TextField()))
elif method == PostgresPartitioningMethod.HASH:
key.append("artist_id")
fields.append(("artist_id", models.IntegerField()))
else:
raise NotImplementedError
return operations.PostgresCreatePartitionedModel(
"test",
fields=fields,
bases=(PostgresPartitionedModel,),
managers=[("objects", PostgresManager())],
partitioning_options={"method": method, "key": key},
)
return _create_model
@pytest.mark.postgres_version(lt=110000)
@pytest.mark.parametrize("method", PostgresPartitioningMethod.all())
def test_migration_operations_create_partitioned_table(method, create_model):
"""Tests whether the see :PostgresCreatePartitionedModel operation works as
expected in a migration."""
create_operation = create_model(method)
state = migrations.state.ProjectState.from_apps(apps)
# migrate forwards, is the table there?
apply_migration([create_operation], state)
assert _partitioned_table_exists(create_operation)
# migrate backwards, is the table there?
apply_migration([create_operation], state=state, backwards=True)
assert not _partitioned_table_exists(create_operation)
@pytest.mark.postgres_version(lt=110000)
@pytest.mark.parametrize("method", PostgresPartitioningMethod.all())
def test_migration_operations_delete_partitioned_table(method, create_model):
"""Tests whether the see :PostgresDeletePartitionedModel operation works as
expected in a migration."""
create_operation = create_model(method)
delete_operation = operations.PostgresDeletePartitionedModel(
create_operation.name
)
state = migrations.state.ProjectState.from_apps(apps)
# migrate forwards, create model
apply_migration([create_operation], state)
assert _partitioned_table_exists(create_operation)
# record intermediate state, the state we'll
# migrate backwards to
intm_state = state.clone()
# migrate forwards, delete model
apply_migration([delete_operation], state)
assert not _partitioned_table_exists(create_operation)
# migrate backwards, undelete model
delete_operation.database_backwards(
"tests", connection.schema_editor(), state, intm_state
)
assert _partitioned_table_exists(create_operation)
@pytest.mark.postgres_version(lt=110000)
@pytest.mark.parametrize(
"method,add_partition_operation",
[
(
PostgresPartitioningMethod.LIST,
operations.PostgresAddDefaultPartition(
model_name="test", name="pt1"
),
),
(
PostgresPartitioningMethod.RANGE,
operations.PostgresAddRangePartition(
model_name="test",
name="pt1",
from_values="2019-01-01",
to_values="2019-02-01",
),
),
(
PostgresPartitioningMethod.LIST,
operations.PostgresAddListPartition(
model_name="test", name="pt1", values=["car", "boat"]
),
),
(
PostgresPartitioningMethod.HASH,
operations.PostgresAddHashPartition(
model_name="test", name="pt1", modulus=3, remainder=0
),
),
],
)
def test_migration_operations_add_partition(
method, add_partition_operation, create_model
):
"""Tests whether adding partitions and then rolling them back works as
expected."""
create_operation = create_model(method)
state = migrations.state.ProjectState.from_apps(apps)
# migrate forwards
apply_migration([create_operation, add_partition_operation], state)
assert _partition_exists(create_operation, add_partition_operation)
# rollback
apply_migration(
[create_operation, add_partition_operation], state, backwards=True
)
assert not _partition_exists(create_operation, add_partition_operation)
@pytest.mark.postgres_version(lt=110000)
@pytest.mark.parametrize(
"method,add_partition_operation,delete_partition_operation",
[
(
PostgresPartitioningMethod.LIST,
operations.PostgresAddDefaultPartition(
model_name="test", name="pt1"
),
operations.PostgresDeleteDefaultPartition(
model_name="test", name="pt1"
),
),
(
PostgresPartitioningMethod.RANGE,
operations.PostgresAddRangePartition(
model_name="test",
name="pt1",
from_values="2019-01-01",
to_values="2019-02-01",
),
operations.PostgresDeleteRangePartition(
model_name="test", name="pt1"
),
),
(
PostgresPartitioningMethod.LIST,
operations.PostgresAddListPartition(
model_name="test", name="pt1", values=["car", "boat"]
),
operations.PostgresDeleteListPartition(
model_name="test", name="pt1"
),
),
(
PostgresPartitioningMethod.HASH,
operations.PostgresAddHashPartition(
model_name="test", name="pt1", modulus=3, remainder=0
),
operations.PostgresDeleteHashPartition(
model_name="test", name="pt1"
),
),
],
)
def test_migration_operations_add_delete_partition(
method, add_partition_operation, delete_partition_operation, create_model
):
"""Tests whether adding partitions and then removing them works as
expected."""
create_operation = create_model(method)
state = migrations.state.ProjectState.from_apps(apps)
# migrate forwards, create model and partition
apply_migration([create_operation, add_partition_operation], state)
assert _partition_exists(create_operation, add_partition_operation)
# record intermediate state, the state we'll
# migrate backwards to
intm_state = state.clone()
# migrate forwards, delete the partition
apply_migration([delete_partition_operation], state)
assert not _partition_exists(create_operation, add_partition_operation)
# migrate backwards, undelete the partition
delete_partition_operation.database_backwards(
"tests", connection.schema_editor(), state, intm_state
)
assert _partition_exists(create_operation, add_partition_operation)
django-postgres-extra-2.0.4/tests/test_on_conflict.py 0000664 0000000 0000000 00000032027 14175513017 0023024 0 ustar 00root root 0000000 0000000 import django
import pytest
from django.core.exceptions import SuspiciousOperation
from django.db import connection, models
from django.utils import timezone
from psqlextra.fields import HStoreField
from psqlextra.models import PostgresModel
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
@pytest.mark.parametrize("conflict_action", ConflictAction.all())
def test_on_conflict(conflict_action):
"""Tests whether simple inserts work correctly."""
model = get_fake_model(
{
"title": HStoreField(uniqueness=["key1"]),
"cookies": models.CharField(max_length=255, null=True),
}
)
obj = model.objects.on_conflict(
[("title", "key1")], conflict_action
).insert_and_get(title={"key1": "beer"}, cookies="cheers")
model.objects.on_conflict(
[("title", "key1")], conflict_action
).insert_and_get(title={"key1": "beer"})
assert model.objects.count() == 1
# make sure the data is actually in the db
obj.refresh_from_db()
assert obj.title["key1"] == "beer"
assert obj.cookies == "cheers"
def test_on_conflict_auto_fields():
"""Asserts that fields that automatically add something to the model
automatically still work properly when upserting."""
model = get_fake_model(
{
"title": models.CharField(max_length=255, unique=True),
"date_added": models.DateTimeField(auto_now_add=True),
"date_updated": models.DateTimeField(auto_now=True),
}
)
obj1 = model.objects.on_conflict(
["title"], ConflictAction.UPDATE
).insert_and_get(title="beer")
obj2 = model.objects.on_conflict(
["title"], ConflictAction.UPDATE
).insert_and_get(title="beer")
obj2.refresh_from_db()
assert obj1.date_added
assert obj2.date_added
assert obj1.date_updated
assert obj2.date_updated
assert obj1.id == obj2.id
assert obj1.title == obj2.title
assert obj1.date_added == obj2.date_added
assert obj1.date_updated != obj2.date_updated
@pytest.mark.parametrize("conflict_action", ConflictAction.all())
def test_on_conflict_foreign_key(conflict_action):
"""Asserts that models with foreign key relationships can safely be
inserted."""
model1 = get_fake_model(
{"name": models.CharField(max_length=255, unique=True)}
)
model2 = get_fake_model(
{
"name": models.CharField(max_length=255, unique=True),
"model1": models.ForeignKey(model1, on_delete=models.CASCADE),
}
)
model1_row = model1.objects.on_conflict(
["name"], conflict_action
).insert_and_get(name="item1")
# insert by id, that should work
model2_row = model2.objects.on_conflict(
["name"], conflict_action
).insert_and_get(name="item1", model1_id=model1_row.id)
model2_row = model2.objects.get(name="item1")
assert model2_row.name == "item1"
assert model2_row.model1.id == model1_row.id
# insert by object, that should also work
model2_row = model2.objects.on_conflict(
["name"], conflict_action
).insert_and_get(name="item2", model1=model1_row)
model2_row.refresh_from_db()
assert model2_row.name == "item2"
assert model2_row.model1.id == model1_row.id
def test_on_conflict_partial_get():
"""Asserts that when doing a insert_and_get with only part of the columns
on the model, all fields are returned properly."""
model = get_fake_model(
{
"title": models.CharField(max_length=140, unique=True),
"purpose": models.CharField(max_length=10, null=True),
"created_at": models.DateTimeField(auto_now_add=True),
"updated_at": models.DateTimeField(auto_now=True),
}
)
obj1 = model.objects.on_conflict(
["title"], ConflictAction.UPDATE
).insert_and_get(title="beer", purpose="for-sale")
obj2 = model.objects.on_conflict(
["title"], ConflictAction.UPDATE
).insert_and_get(title="beer")
obj2.refresh_from_db()
assert obj2.title == obj1.title
assert obj2.purpose == obj1.purpose
assert obj2.created_at == obj2.created_at
assert obj1.updated_at != obj2.updated_at
@pytest.mark.parametrize("conflict_action", ConflictAction.all())
def test_on_conflict_invalid_target(conflict_action):
"""Tests whether specifying a invalid value for `conflict_target` raises an
error."""
model = get_fake_model(
{"title": models.CharField(max_length=140, unique=True)}
)
with pytest.raises(SuspiciousOperation):
(
model.objects.on_conflict(["cookie"], conflict_action).insert(
title="beer"
)
)
with pytest.raises(SuspiciousOperation):
(
model.objects.on_conflict([None], conflict_action).insert(
title="beer"
)
)
@pytest.mark.parametrize("conflict_action", ConflictAction.all())
def test_on_conflict_outdated_model(conflict_action):
"""Tests whether insert properly handles fields that are in the database
but not on the model.
This happens if somebody manually modified the database
to add a column that is not present in the model.
This should be handled properly by ignoring the column
returned by the database.
"""
model = get_fake_model(
{"title": models.CharField(max_length=140, unique=True)}
)
# manually create the colum that is not on the model
with connection.cursor() as cursor:
cursor.execute(
(
"ALTER TABLE {table} " "ADD COLUMN beer character varying(50);"
).format(table=model._meta.db_table)
)
# without proper handling, this would fail with a TypeError
(
model.objects.on_conflict(["title"], conflict_action).insert_and_get(
title="beer"
)
)
@pytest.mark.parametrize("conflict_action", ConflictAction.all())
def test_on_conflict_custom_column_names(conflict_action):
"""Asserts that models with custom column names (models where the column
and field name are different) work properly."""
model = get_fake_model(
{
"title": models.CharField(
max_length=140, unique=True, db_column="beer"
),
"description": models.CharField(max_length=255, db_column="desc"),
}
)
(
model.objects.on_conflict(["title"], conflict_action).insert(
title="yeey", description="great thing"
)
)
def test_on_conflict_unique_together():
"""Asserts that inserts on models with a unique_together works properly."""
model = get_fake_model(
{
"first_name": models.CharField(max_length=140),
"last_name": models.CharField(max_length=255),
},
PostgresModel,
{"unique_together": ("first_name", "last_name")},
)
id1 = model.objects.on_conflict(
["first_name", "last_name"], ConflictAction.UPDATE
).insert(first_name="swen", last_name="kooij")
id2 = model.objects.on_conflict(
["first_name", "last_name"], ConflictAction.UPDATE
).insert(first_name="swen", last_name="kooij")
assert id1 == id2
def test_on_conflict_unique_together_fk():
"""Asserts that inserts on models with a unique_together and a foreign key
relationship works properly."""
model = get_fake_model({"name": models.CharField(max_length=140)})
model2 = get_fake_model(
{
"model1": models.ForeignKey(model, on_delete=models.CASCADE),
"model2": models.ForeignKey(model, on_delete=models.CASCADE),
},
PostgresModel,
{"unique_together": ("model1", "model2")},
)
id1 = model.objects.create(name="one").id
id2 = model.objects.create(name="two").id
assert id1 != id2
id3 = model2.objects.on_conflict(
["model1_id", "model2_id"], ConflictAction.UPDATE
).insert(model1_id=id1, model2_id=id2)
id4 = model2.objects.on_conflict(
["model1_id", "model2_id"], ConflictAction.UPDATE
).insert(model1_id=id1, model2_id=id2)
assert id3 == id4
def test_on_conflict_pk_conflict_target():
"""Tests whether `on_conflict` properly accepts the 'pk' as a conflict
target, which should resolve into the primary key of a model."""
model = get_fake_model({"name": models.CharField(max_length=255)})
obj1 = model.objects.on_conflict(
["pk"], ConflictAction.UPDATE
).insert_and_get(pk=0, name="beer")
obj2 = model.objects.on_conflict(
["pk"], ConflictAction.UPDATE
).insert_and_get(pk=0, name="beer")
assert obj1.name == "beer"
assert obj2.name == "beer"
assert obj1.id == obj2.id
assert obj1.id == 0
assert obj2.id == 0
def test_on_conflict_default_value():
"""Tests whether setting a default for a field and not specifying it
explicitely when upserting properly causes the default value to be used."""
model = get_fake_model(
{"title": models.CharField(max_length=255, default="great")}
)
obj1 = model.objects.on_conflict(
["id"], ConflictAction.UPDATE
).insert_and_get(id=0)
assert obj1.title == "great"
obj2 = model.objects.on_conflict(
["id"], ConflictAction.UPDATE
).insert_and_get(id=0)
assert obj1.id == obj2.id
assert obj2.title == "great"
def test_on_conflict_default_value_no_overwrite():
"""Tests whether setting a default for a field, inserting a non-default
value and then trying to update it without specifying that field doesn't
result in it being overwritten."""
model = get_fake_model(
{"title": models.CharField(max_length=255, default="great")}
)
obj1 = model.objects.on_conflict(
["id"], ConflictAction.UPDATE
).insert_and_get(id=0, title="mytitle")
assert obj1.title == "mytitle"
obj2 = model.objects.on_conflict(
["id"], ConflictAction.UPDATE
).insert_and_get(id=0)
assert obj1.id == obj2.id
assert obj2.title == "mytitle"
def test_on_conflict_bulk():
"""Tests whether using `on_conflict` with `insert_bulk` properly works."""
model = get_fake_model(
{"title": models.CharField(max_length=255, unique=True)}
)
rows = [
dict(title="this is my title"),
dict(title="this is another title"),
dict(title="and another one"),
]
(
model.objects.on_conflict(["title"], ConflictAction.UPDATE).bulk_insert(
rows
)
)
assert model.objects.all().count() == len(rows)
for index, obj in enumerate(list(model.objects.all())):
assert obj.title == rows[index]["title"]
def test_bulk_return():
"""Tests if primary keys are properly returned from 'bulk_insert'."""
model = get_fake_model(
{
"id": models.BigAutoField(primary_key=True),
"name": models.CharField(max_length=255, unique=True),
}
)
rows = [dict(name="John Smith"), dict(name="Jane Doe")]
objs = model.objects.on_conflict(
["name"], ConflictAction.UPDATE
).bulk_insert(rows)
for index, obj in enumerate(objs, 1):
assert obj["id"] == index
# Add objects again, update should return the same ids
# as we're just updating.
objs = model.objects.on_conflict(
["name"], ConflictAction.UPDATE
).bulk_insert(rows)
for index, obj in enumerate(objs, 1):
assert obj["id"] == index
@pytest.mark.parametrize("conflict_action", ConflictAction.all())
def test_bulk_return_models(conflict_action):
"""Tests whether models are returned instead of dictionaries when
specifying the return_model=True argument."""
model = get_fake_model(
{
"id": models.BigAutoField(primary_key=True),
"name": models.CharField(max_length=255, unique=True),
}
)
rows = [dict(name="John Smith"), dict(name="Jane Doe")]
objs = model.objects.on_conflict(["name"], conflict_action).bulk_insert(
rows, return_model=True
)
for index, obj in enumerate(objs, 1):
assert isinstance(obj, model)
assert obj.id == index
@pytest.mark.skipif(
django.VERSION < (3, 1),
reason="Django < 3.1 doesn't implement JSONField",
)
@pytest.mark.parametrize("conflict_action", ConflictAction.all())
def test_bulk_return_models_converters(conflict_action):
"""Tests whether converters are properly applied when using
return_model=True."""
model = get_fake_model(
{
"name": models.TextField(unique=True),
"data": models.JSONField(unique=True),
"updated_at": models.DateTimeField(),
}
)
now = timezone.now()
rows = [
dict(name="John Smith", data={"a": 1}, updated_at=now.isoformat()),
dict(name="Jane Doe", data={"b": 2}, updated_at=now),
]
objs = model.objects.on_conflict(["name"], conflict_action).bulk_insert(
rows, return_model=True
)
for index, (obj, row) in enumerate(zip(objs, rows), 1):
assert isinstance(obj, model)
assert obj.id == index
assert obj.name == row["name"]
assert obj.data == row["data"]
assert obj.updated_at == now
django-postgres-extra-2.0.4/tests/test_on_conflict_nothing.py 0000664 0000000 0000000 00000012473 14175513017 0024555 0 ustar 00root root 0000000 0000000 import pytest
from django.db import models
from psqlextra.fields import HStoreField
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
def test_on_conflict_nothing():
"""Tests whether simple insert NOTHING works correctly."""
model = get_fake_model(
{
"title": HStoreField(uniqueness=["key1"]),
"cookies": models.CharField(max_length=255, null=True),
}
)
# row does not conflict, new row should be created
obj1 = model.objects.on_conflict(
[("title", "key1")], ConflictAction.NOTHING
).insert_and_get(title={"key1": "beer"}, cookies="cheers")
obj1.refresh_from_db()
assert obj1.title["key1"] == "beer"
assert obj1.cookies == "cheers"
# row conflicts, no new row should be created
obj2 = model.objects.on_conflict(
[("title", "key1")], ConflictAction.NOTHING
).insert_and_get(title={"key1": "beer"}, cookies="choco")
assert not obj2
# assert that the 'cookies' field didn't change
obj1.refresh_from_db()
assert obj1.title["key1"] == "beer"
assert obj1.cookies == "cheers"
assert model.objects.count() == 1
def test_on_conflict_nothing_foreign_primary_key():
"""Tests whether simple insert NOTHING works correctly when the primary key
of a field is a foreign key with a custom name."""
referenced_model = get_fake_model({})
model = get_fake_model(
{
"parent": models.OneToOneField(
referenced_model, primary_key=True, on_delete=models.CASCADE
),
"cookies": models.CharField(max_length=255),
}
)
referenced_obj = referenced_model.objects.create()
# row does not conflict, new row should be created
obj1 = model.objects.on_conflict(
["parent_id"], ConflictAction.NOTHING
).insert_and_get(parent_id=referenced_obj.pk, cookies="cheers")
obj1.refresh_from_db()
assert obj1.parent == referenced_obj
assert obj1.cookies == "cheers"
# row conflicts, no new row should be created
obj2 = model.objects.on_conflict(
["parent_id"], ConflictAction.NOTHING
).insert_and_get(parent_id=referenced_obj.pk, cookies="choco")
assert not obj2
obj1.refresh_from_db()
assert obj1.cookies == "cheers"
assert model.objects.count() == 1
def test_on_conflict_nothing_foreign_key_by_object():
"""Tests whether simple insert NOTHING works correctly when the potentially
conflicting field is a foreign key specified as an object."""
other_model = get_fake_model({})
model = get_fake_model(
{
"other": models.OneToOneField(
other_model, on_delete=models.CASCADE
),
"data": models.CharField(max_length=255),
}
)
other_obj = other_model.objects.create()
# row does not conflict, new row should be created
obj1 = model.objects.on_conflict(
["other"], ConflictAction.NOTHING
).insert_and_get(other=other_obj, data="some data")
assert obj1.other == other_obj
assert obj1.data == "some data"
obj1.refresh_from_db()
assert obj1.other == other_obj
assert obj1.data == "some data"
with pytest.raises(ValueError):
(
model.objects.on_conflict(
["other"], ConflictAction.NOTHING
).insert_and_get(other=obj1)
)
# row conflicts, no new row should be created
obj2 = model.objects.on_conflict(
["other"], ConflictAction.NOTHING
).insert_and_get(other=other_obj, data="different data")
assert not obj2
obj1.refresh_from_db()
assert model.objects.count() == 1
assert obj1.other == other_obj
assert obj1.data == "some data"
def test_on_conflict_nothing_foreign_key_by_id():
"""Tests whether simple insert NOTHING works correctly when the potentially
conflicting field is a foreign key specified as an id."""
other_model = get_fake_model({})
model = get_fake_model(
{
"other": models.OneToOneField(
other_model, on_delete=models.CASCADE
),
"data": models.CharField(max_length=255),
}
)
other_obj = other_model.objects.create()
# row does not conflict, new row should be created
obj1 = model.objects.on_conflict(
["other_id"], ConflictAction.NOTHING
).insert_and_get(other_id=other_obj.pk, data="some data")
assert obj1.other == other_obj
assert obj1.data == "some data"
obj1.refresh_from_db()
assert obj1.other == other_obj
assert obj1.data == "some data"
# row conflicts, no new row should be created
obj2 = model.objects.on_conflict(
["other_id"], ConflictAction.NOTHING
).insert_and_get(other_id=other_obj.pk, data="different data")
assert not obj2
assert model.objects.count() == 1
obj1.refresh_from_db()
assert obj1.other == other_obj
assert obj1.data == "some data"
def test_on_conflict_nothing_duplicate_rows():
"""Tests whether duplicate rows are filtered out when doing a insert
NOTHING and no error is raised when the list of rows contains
duplicates."""
model = get_fake_model({"amount": models.IntegerField(unique=True)})
rows = [dict(amount=1), dict(amount=1)]
(
model.objects.on_conflict(
["amount"], ConflictAction.NOTHING
).bulk_insert(rows)
)
django-postgres-extra-2.0.4/tests/test_on_conflict_update.py 0000664 0000000 0000000 00000007512 14175513017 0024367 0 ustar 00root root 0000000 0000000 import pytest
from django.db import models
from psqlextra.fields import HStoreField
from psqlextra.query import ConflictAction
from .fake_model import get_fake_model
def test_on_conflict_update():
"""Tests whether simple upserts works correctly."""
model = get_fake_model(
{
"title": HStoreField(uniqueness=["key1"]),
"cookies": models.CharField(max_length=255, null=True),
}
)
obj1 = model.objects.on_conflict(
[("title", "key1")], ConflictAction.UPDATE
).insert_and_get(title={"key1": "beer"}, cookies="cheers")
obj1.refresh_from_db()
assert obj1.title["key1"] == "beer"
assert obj1.cookies == "cheers"
obj2 = model.objects.on_conflict(
[("title", "key1")], ConflictAction.UPDATE
).insert_and_get(title={"key1": "beer"}, cookies="choco")
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert both objects are the same
assert obj1.id == obj2.id
assert obj1.title["key1"] == "beer"
assert obj1.cookies == "choco"
assert obj2.title["key1"] == "beer"
assert obj2.cookies == "choco"
def test_on_conflict_update_foreign_key_by_object():
"""Tests whether simple upsert works correctly when the conflicting field
is a foreign key specified as an object."""
other_model = get_fake_model({})
model = get_fake_model(
{
"other": models.OneToOneField(
other_model, on_delete=models.CASCADE
),
"data": models.CharField(max_length=255),
}
)
other_obj = other_model.objects.create()
obj1 = model.objects.on_conflict(
["other"], ConflictAction.UPDATE
).insert_and_get(other=other_obj, data="some data")
assert obj1.other == other_obj
assert obj1.data == "some data"
obj1.refresh_from_db()
assert obj1.other == other_obj
assert obj1.data == "some data"
with pytest.raises(ValueError):
(
model.objects.on_conflict(
["other"], ConflictAction.UPDATE
).insert_and_get(other=obj1)
)
obj2 = model.objects.on_conflict(
["other"], ConflictAction.UPDATE
).insert_and_get(other=other_obj, data="different data")
assert obj2.other == other_obj
assert obj2.data == "different data"
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert that the 'other' field didn't change
assert obj1.id == obj2.id
assert obj1.other == other_obj
assert obj2.other == other_obj
assert obj1.data == "different data"
assert obj2.data == "different data"
def test_on_conflict_update_foreign_key_by_id():
"""Tests whether simple upsert works correctly when the conflicting field
is a foreign key specified as an id."""
other_model = get_fake_model({})
model = get_fake_model(
{
"other": models.OneToOneField(
other_model, on_delete=models.CASCADE
),
"data": models.CharField(max_length=255),
}
)
other_obj = other_model.objects.create()
obj1 = model.objects.on_conflict(
["other_id"], ConflictAction.UPDATE
).insert_and_get(other_id=other_obj.pk, data="some data")
assert obj1.other == other_obj
assert obj1.data == "some data"
obj1.refresh_from_db()
assert obj1.other == other_obj
assert obj1.data == "some data"
obj2 = model.objects.on_conflict(
["other_id"], ConflictAction.UPDATE
).insert_and_get(other_id=other_obj.pk, data="different data")
assert obj2.other == other_obj
assert obj2.data == "different data"
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert that the 'other' field didn't change
assert obj1.id == obj2.id
assert obj1.other == other_obj
assert obj2.other == other_obj
assert obj1.data == "different data"
assert obj2.data == "different data"
django-postgres-extra-2.0.4/tests/test_partitioned_model.py 0000664 0000000 0000000 00000004240 14175513017 0024225 0 ustar 00root root 0000000 0000000 from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from .fake_model import define_fake_partitioned_model
def test_partitioned_model_abstract():
"""Tests whether :see:PostgresPartitionedModel is abstract."""
assert PostgresPartitionedModel._meta.abstract
def test_partitioning_model_options_meta():
"""Tests whether the `_partitioning_meta` attribute is available on the
class (created by the meta class) and not just creating when the model is
instantiated."""
assert PostgresPartitionedModel._partitioning_meta
def test_partitioned_model_default_options():
"""Tests whether the default partitioning options are set as expected on.
:see:PostgresPartitionedModel.
"""
model = define_fake_partitioned_model()
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
assert model._partitioning_meta.key == []
def test_partitioned_model_method_option():
"""Tests whether the `method` partitioning option is properly copied onto
the options object."""
model = define_fake_partitioned_model(
partitioning_options=dict(method=PostgresPartitioningMethod.LIST)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.LIST
def test_partitioned_model_method_option_none():
"""Tests whether setting the `method` partitioning option results in the
default being set."""
model = define_fake_partitioned_model(
partitioning_options=dict(method=None)
)
assert model._partitioning_meta.method == PostgresPartitioningMethod.RANGE
def test_partitioned_model_key_option():
"""Tests whether the `key` partitioning option is properly copied onto the
options object."""
model = define_fake_partitioned_model(
partitioning_options=dict(key=["timestamp"])
)
assert model._partitioning_meta.key == ["timestamp"]
def test_partitioned_model_key_option_none():
"""Tests whether setting the `key` partitioning option results in the
default being set."""
model = define_fake_partitioned_model(partitioning_options=dict(key=None))
assert model._partitioning_meta.key == []
django-postgres-extra-2.0.4/tests/test_partitioned_model_state.py 0000664 0000000 0000000 00000006550 14175513017 0025433 0 ustar 00root root 0000000 0000000 import uuid
import pytest
from django.apps import apps
from django.db import models
from psqlextra.backend.migrations.state import (
PostgresPartitionedModelState,
PostgresPartitionState,
)
from psqlextra.manager import PostgresManager
from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from .fake_model import define_fake_partitioned_model
@pytest.fixture
def model():
fields = {"name": models.TextField(), "category": models.TextField()}
partitioning_options = {
"method": PostgresPartitioningMethod.LIST,
"key": ["category"],
}
model = define_fake_partitioned_model(fields, partitioning_options)
return model
def test_partitioned_model_state_copies():
"""Tests whether cloning the model state properly copies all the options.
If it does not copy them, bad things can happen as the state is
mutated to build up migration state.
"""
options = dict(method=PostgresPartitioningMethod.RANGE, key=["timestamp"])
state = PostgresPartitionedModelState(
app_label="tests",
name=str(uuid.uuid4()),
fields=[],
options=None,
partitioning_options=options,
bases=(PostgresPartitionedModel,),
)
assert options is not state.partitioning_options
def test_partitioned_model_state_from_model(model):
"""Tests whether creating state from an existing model works as
expected."""
state = PostgresPartitionedModelState.from_model(model)
assert state.partitions == {}
assert (
state.partitioning_options["method"] == model._partitioning_meta.method
)
assert state.partitioning_options["key"] == model._partitioning_meta.key
def test_partitioned_model_clone(model):
"""Tests whether cloning the state actually clones the partitioning
options.
If its not a copy, but a reference instead, bad things can happen as
the options are mutated to build up migration state.
"""
state = PostgresPartitionedModelState.from_model(model)
state.partitions = {
"pt1": PostgresPartitionState(
app_label="tests", model_name="tests", name="pt1"
)
}
state_copy = state.clone()
assert state.partitions is not state_copy.partitions
assert state.partitioning_options is not state_copy.partitioning_options
def test_partitioned_model_render(model):
"""Tests whether the state can be rendered into a valid model class."""
options = dict(method=PostgresPartitioningMethod.RANGE, key=["timestamp"])
state = PostgresPartitionedModelState(
app_label="tests",
name=str(uuid.uuid4()),
fields=[("name", models.TextField())],
options=None,
partitioning_options=options,
bases=(PostgresPartitionedModel,),
managers=[("cookie", PostgresManager())],
)
rendered_model = state.render(apps)
assert issubclass(rendered_model, PostgresPartitionedModel)
assert rendered_model.name
assert isinstance(rendered_model.objects, PostgresManager)
assert isinstance(rendered_model.cookie, PostgresManager)
assert rendered_model.__name__ == state.name
assert rendered_model._meta.apps == apps
assert rendered_model._meta.app_label == "tests"
assert rendered_model._partitioning_meta.method == options["method"]
assert rendered_model._partitioning_meta.key == options["key"]
django-postgres-extra-2.0.4/tests/test_partitioning_manager.py 0000664 0000000 0000000 00000004557 14175513017 0024737 0 ustar 00root root 0000000 0000000 import pytest
from django.db import models
from psqlextra.partitioning import (
PostgresPartitioningError,
PostgresPartitioningManager,
partition_by_current_time,
)
from .fake_model import define_fake_partitioned_model, get_fake_model
def test_partitioning_manager_duplicate_model():
"""Tests whether it is not possible to have more than one partitioning
config per model."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
with pytest.raises(PostgresPartitioningError):
PostgresPartitioningManager(
[
partition_by_current_time(model, years=1, count=3),
partition_by_current_time(model, years=1, count=3),
]
)
def test_partitioning_manager_find_config_for_model():
"""Tests that finding a partitioning config by the model works as
expected."""
model1 = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
config1 = partition_by_current_time(model1, years=1, count=3)
model2 = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
config2 = partition_by_current_time(model2, months=1, count=2)
manager = PostgresPartitioningManager([config1, config2])
assert manager.find_config_for_model(model1) == config1
assert manager.find_config_for_model(model2) == config2
def test_partitioning_manager_plan_not_partitioned_model():
"""Tests that the auto partitioner does not try to auto partition for non-
partitioned models/tables."""
model = get_fake_model({"timestamp": models.DateTimeField()})
with pytest.raises(PostgresPartitioningError):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, months=1, count=2)]
)
manager.plan()
def test_partitioning_manager_plan_non_existent_model():
"""Tests that the auto partitioner does not try to partition for non-
existent partitioned tables."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
with pytest.raises(PostgresPartitioningError):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, months=1, count=2)]
)
manager.plan()
django-postgres-extra-2.0.4/tests/test_partitioning_time.py 0000664 0000000 0000000 00000037172 14175513017 0024262 0 ustar 00root root 0000000 0000000 import datetime
import freezegun
import pytest
from dateutil.relativedelta import relativedelta
from django.db import connection, models, transaction
from django.db.utils import IntegrityError
from psqlextra.partitioning import (
PostgresPartitioningError,
PostgresPartitioningManager,
partition_by_current_time,
)
from . import db_introspection
from .fake_model import define_fake_partitioned_model
def _get_partitioned_table(model):
return db_introspection.get_partitioned_table(model._meta.db_table)
@pytest.mark.postgres_version(lt=110000)
def test_partitioning_time_yearly_apply():
"""Tests whether automatically creating new partitions ahead yearly works
as expected."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
with freezegun.freeze_time("2019-1-1"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, years=1, count=2)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 2
assert table.partitions[0].name == "2019"
assert table.partitions[1].name == "2020"
with freezegun.freeze_time("2019-12-30"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, years=1, count=3)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 3
assert table.partitions[0].name == "2019"
assert table.partitions[1].name == "2020"
assert table.partitions[2].name == "2021"
@pytest.mark.postgres_version(lt=110000)
def test_partitioning_time_monthly_apply():
"""Tests whether automatically creating new partitions ahead monthly works
as expected."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
# create partitions for the next 12 months (including the current)
with freezegun.freeze_time("2019-1-30"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, months=1, count=12)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 12
assert table.partitions[0].name == "2019_jan"
assert table.partitions[1].name == "2019_feb"
assert table.partitions[2].name == "2019_mar"
assert table.partitions[3].name == "2019_apr"
assert table.partitions[4].name == "2019_may"
assert table.partitions[5].name == "2019_jun"
assert table.partitions[6].name == "2019_jul"
assert table.partitions[7].name == "2019_aug"
assert table.partitions[8].name == "2019_sep"
assert table.partitions[9].name == "2019_oct"
assert table.partitions[10].name == "2019_nov"
assert table.partitions[11].name == "2019_dec"
# re-running it with 13, should just create one additional partition
with freezegun.freeze_time("2019-1-30"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, months=1, count=13)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 13
assert table.partitions[12].name == "2020_jan"
# it's november now, we only want to create 4 partitions ahead,
# so only one new partition should be created for february 1338
with freezegun.freeze_time("2019-11-1"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, months=1, count=4)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 14
assert table.partitions[13].name == "2020_feb"
@pytest.mark.postgres_version(lt=110000)
def test_partitioning_time_weekly_apply():
"""Tests whether automatically creating new partitions ahead weekly works
as expected."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
# create partitions for the next 4 weeks (including the current)
with freezegun.freeze_time("2019-1-23"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, weeks=1, count=4)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 4
assert table.partitions[0].name == "2019_week_03"
assert table.partitions[1].name == "2019_week_04"
assert table.partitions[2].name == "2019_week_05"
assert table.partitions[3].name == "2019_week_06"
# re-running it with 5, should just create one additional partition
with freezegun.freeze_time("2019-1-23"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, weeks=1, count=5)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 5
assert table.partitions[4].name == "2019_week_07"
# it's june now, we want to partition two weeks ahead
with freezegun.freeze_time("2019-06-03"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, weeks=1, count=2)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 7
assert table.partitions[5].name == "2019_week_22"
assert table.partitions[6].name == "2019_week_23"
@pytest.mark.postgres_version(lt=110000)
def test_partitioning_time_daily_apply():
"""Tests whether automatically creating new partitions ahead daily works as
expected."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
# create partitions for the next 4 days (including the current)
with freezegun.freeze_time("2019-1-23"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, days=1, count=4)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 4
assert table.partitions[0].name == "2019_jan_23"
assert table.partitions[1].name == "2019_jan_24"
assert table.partitions[2].name == "2019_jan_25"
assert table.partitions[3].name == "2019_jan_26"
# re-running it with 5, should just create one additional partition
with freezegun.freeze_time("2019-1-23"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, days=1, count=5)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 5
assert table.partitions[4].name == "2019_jan_27"
# it's june now, we want to partition two days ahead
with freezegun.freeze_time("2019-06-03"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, days=1, count=2)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 7
assert table.partitions[5].name == "2019_jun_03"
assert table.partitions[6].name == "2019_jun_04"
@pytest.mark.postgres_version(lt=110000)
def test_partitioning_time_monthly_apply_insert():
"""Tests whether automatically created monthly partitions line up
perfectly."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
with freezegun.freeze_time("2019-1-1"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, months=1, count=2)]
)
manager.plan().apply()
model.objects.create(timestamp=datetime.date(2019, 1, 1))
model.objects.create(timestamp=datetime.date(2019, 1, 31))
model.objects.create(timestamp=datetime.date(2019, 2, 28))
with transaction.atomic():
with pytest.raises(IntegrityError):
model.objects.create(timestamp=datetime.date(2019, 3, 1))
model.objects.create(timestamp=datetime.date(2019, 3, 2))
with freezegun.freeze_time("2019-1-1"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, months=1, count=3)]
)
manager.plan().apply()
model.objects.create(timestamp=datetime.date(2019, 3, 1))
model.objects.create(timestamp=datetime.date(2019, 3, 2))
@pytest.mark.postgres_version(lt=110000)
def test_partitioning_time_weekly_apply_insert():
"""Tests whether automatically created weekly partitions line up
perfectly."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
# that's a monday
with freezegun.freeze_time("2019-1-08"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, weeks=1, count=2)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 2
model.objects.create(timestamp=datetime.date(2019, 1, 7))
model.objects.create(timestamp=datetime.date(2019, 1, 14))
model.objects.create(timestamp=datetime.date(2019, 1, 20))
with transaction.atomic():
with pytest.raises(IntegrityError):
model.objects.create(timestamp=datetime.date(2019, 1, 21))
model.objects.create(timestamp=datetime.date(2019, 1, 22))
with freezegun.freeze_time("2019-1-07"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, weeks=1, count=3)]
)
manager.plan().apply()
model.objects.create(timestamp=datetime.date(2019, 1, 21))
model.objects.create(timestamp=datetime.date(2019, 1, 22))
@pytest.mark.postgres_version(lt=110000)
def test_partitioning_time_daily_apply_insert():
"""Tests whether automatically created daily partitions line up
perfectly."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
# that's a monday
with freezegun.freeze_time("2019-1-07"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, days=1, count=2)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 2
model.objects.create(timestamp=datetime.date(2019, 1, 7))
model.objects.create(timestamp=datetime.date(2019, 1, 8))
with transaction.atomic():
with pytest.raises(IntegrityError):
model.objects.create(timestamp=datetime.date(2019, 1, 9))
model.objects.create(timestamp=datetime.date(2019, 1, 10))
with freezegun.freeze_time("2019-1-07"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, days=1, count=4)]
)
manager.plan().apply()
model.objects.create(timestamp=datetime.date(2019, 1, 9))
model.objects.create(timestamp=datetime.date(2019, 1, 10))
@pytest.mark.postgres_version(lt=110000)
@pytest.mark.parametrize(
"kwargs,partition_names",
[
(dict(days=2), ["2019_jan_01", "2019_jan_03"]),
(dict(weeks=2), ["2018_week_53", "2019_week_02"]),
(dict(months=2), ["2019_jan", "2019_mar"]),
(dict(years=2), ["2019", "2021"]),
],
)
def test_partitioning_time_multiple(kwargs, partition_names):
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
with freezegun.freeze_time("2019-1-1"):
manager = PostgresPartitioningManager(
[partition_by_current_time(model, **kwargs, count=2)]
)
manager.plan().apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 2
assert partition_names == [par.name for par in table.partitions]
@pytest.mark.postgres_version(lt=110000)
@pytest.mark.parametrize(
"kwargs,timepoints",
[
(
dict(years=1, max_age=relativedelta(years=2)),
[("2019-1-1", 6), ("2020-1-1", 6), ("2021-1-1", 5)],
),
(
dict(months=1, max_age=relativedelta(months=1)),
[
("2019-1-1", 6),
("2019-2-1", 5),
("2019-2-28", 5),
("2019-3-1", 4),
],
),
(
dict(days=7, max_age=relativedelta(weeks=1)),
[
("2019-1-1", 6),
("2019-1-4", 6),
("2019-1-8", 5),
("2019-1-15", 4),
("2019-1-16", 4),
],
),
],
)
def test_partitioning_time_delete(kwargs, timepoints):
"""Tests whether partitions older than the specified max_age are
automatically deleted."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
partition_kwargs = {"model": model, "count": 6, **kwargs}
manager = PostgresPartitioningManager(
[partition_by_current_time(**partition_kwargs)]
)
with freezegun.freeze_time(timepoints[0][0]):
manager.plan().apply()
for index, (dt, partition_count) in enumerate(timepoints):
with freezegun.freeze_time(dt):
manager.plan(skip_create=True).apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == partition_count
@pytest.mark.postgres_version(lt=110000)
def test_partitioning_time_delete_ignore_manual():
"""Tests whether partitions that were created manually are ignored.
Partitions created automatically have a special comment attached to
them. Only partitions with this special comments would be deleted.
"""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
schema_editor = connection.schema_editor()
schema_editor.create_partitioned_model(model)
manager = PostgresPartitioningManager(
[partition_by_current_time(model, count=2, months=1)]
)
schema_editor.add_range_partition(
model, "2019_jan", from_values="2019-1-1", to_values="2019-2-1"
)
with freezegun.freeze_time("2020-1-1"):
manager.plan(skip_create=True).apply()
table = _get_partitioned_table(model)
assert len(table.partitions) == 1
def test_partitioning_time_no_size():
"""Tests whether an error is raised when size for the partitions is
specified."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
with pytest.raises(PostgresPartitioningError):
partition_by_current_time(model, count=1)
def test_partitioning_time_multiple_sizes():
"""Tests whether an error is raised when multiple sizes for a partition are
specified."""
model = define_fake_partitioned_model(
{"timestamp": models.DateTimeField()}, {"key": ["timestamp"]}
)
with pytest.raises(PostgresPartitioningError):
partition_by_current_time(model, weeks=1, months=2, count=1)
django-postgres-extra-2.0.4/tests/test_query.py 0000664 0000000 0000000 00000007507 14175513017 0021701 0 ustar 00root root 0000000 0000000 from django.db import models
from django.db.models import Case, F, Q, Value, When
from psqlextra.expressions import HStoreRef
from psqlextra.fields import HStoreField
from .fake_model import get_fake_model
def test_query_annotate_hstore_key_ref():
"""Tests whether annotating using a :see:HStoreRef expression works
correctly.
This allows you to select an individual hstore key.
"""
model_fk = get_fake_model({"title": HStoreField()})
model = get_fake_model(
{"fk": models.ForeignKey(model_fk, on_delete=models.CASCADE)}
)
fk = model_fk.objects.create(title={"en": "english", "ar": "arabic"})
model.objects.create(fk=fk)
queryset = (
model.objects.annotate(english_title=HStoreRef("fk__title", "en"))
.values("english_title")
.first()
)
assert queryset["english_title"] == "english"
def test_query_annotate_rename():
"""Tests whether field names can be overwritten with a annotated field."""
model = get_fake_model({"title": models.CharField(max_length=12)})
model.objects.create(title="swen")
obj = model.objects.annotate(title=F("title")).first()
assert obj.title == "swen"
def test_query_annotate_rename_chain():
"""Tests whether annotations are behaving correctly after a QuerySet
chain."""
model = get_fake_model(
{
"name": models.CharField(max_length=10),
"value": models.IntegerField(),
}
)
model.objects.create(name="test", value=23)
obj = model.objects.values("name").annotate(value=F("value"))[:1]
assert "value" in obj[0]
assert obj[0]["value"] == 23
def test_query_annotate_rename_order():
"""Tests whether annotation order is preserved after a rename."""
model = get_fake_model(
{
"name": models.CharField(max_length=10),
"value": models.IntegerField(),
}
)
qs = model.objects.annotate(value=F("value"), value_2=F("value"))
assert list(qs.query.annotations.keys()) == ["value", "value_2"]
def test_query_annotate_in_expression():
"""Tests whether annotations can be used in expressions."""
model = get_fake_model({"name": models.CharField(max_length=10)})
model.objects.create(name="henk")
result = model.objects.annotate(
real_name=F("name"),
is_he_henk=Case(
When(Q(real_name="henk"), then=Value("really henk")),
default=Value("definitely not henk"),
output_field=models.CharField(),
),
).first()
assert result.real_name == "henk"
assert result.is_he_henk == "really henk"
def test_query_hstore_value_update_f_ref():
"""Tests whether F(..) expressions can be used in hstore values when
performing update queries."""
model = get_fake_model(
{"name": models.CharField(max_length=255), "name_new": HStoreField()}
)
model.objects.create(name="waqas", name_new=dict(en="swen"))
model.objects.update(name_new=dict(en=models.F("name")))
inst = model.objects.all().first()
assert inst.name_new.get("en") == "waqas"
def test_query_hstore_value_update_cast():
"""Tests whether values in a HStore field are automatically cast to strings
when doing updates."""
model = get_fake_model({"title": HStoreField()})
model.objects.create(title=dict(en="test"))
model.objects.update(title=dict(en=2))
inst = model.objects.all().first()
assert inst.title.get("en") == "2"
def test_query_hstore_value_update_escape():
"""Tests whether values in a HStore field are properly escaped using
prepared statement values."""
model = get_fake_model({"title": HStoreField()})
model.objects.create(title=dict(en="test"))
model.objects.update(title=dict(en="console.log('test')"))
inst = model.objects.all().first()
assert inst.title.get("en") == "console.log('test')"
django-postgres-extra-2.0.4/tests/test_query_values.py 0000664 0000000 0000000 00000004352 14175513017 0023253 0 ustar 00root root 0000000 0000000 import django
import pytest
from django.db import models
from psqlextra.fields import HStoreField
from .fake_model import get_fake_model
@pytest.fixture
def model():
"""Test models, where the first model has a foreign key relationship to the
second."""
return get_fake_model({"title": HStoreField()})
@pytest.fixture
def modelobj(model):
"""Data for the test models, one row per model."""
return model.objects.create(title={"en": "english", "ar": "arabic"})
def test_query_values_hstore(model, modelobj):
"""Tests that selecting all the keys properly works and returns a.
:see:LocalizedValue instance.
"""
result = list(model.objects.values("title"))[0]
assert result["title"] == modelobj.title
def test_query_values_hstore_key(model, modelobj):
"""Tests whether selecting a single key from a :see:HStoreField using the
query set's .values() works properly."""
result = list(model.objects.values("title__en", "title__ar"))[0]
assert result["title__en"] == modelobj.title["en"]
assert result["title__ar"] == modelobj.title["ar"]
def test_query_values_list_hstore_key(model, modelobj):
"""Tests that selecting a single key from a :see:HStoreField using the
query set's .values_list() works properly."""
result = list(model.objects.values_list("title__en", "title__ar"))[0]
assert result[0] == modelobj.title["en"]
assert result[1] == modelobj.title["ar"]
@pytest.mark.skipif(
django.VERSION < (2, 1), reason="requires django 2.1 or newer"
)
def test_query_values_hstore_key_through_fk():
"""Tests whether selecting a single key from a :see:HStoreField using the
query set's .values() works properly when there's a foreign key
relationship involved."""
# this starting working in django 2.1
# see: https://github.com/django/django/commit/20bab2cf9d02a5c6477d8aac066a635986e0d3f3
fmodel = get_fake_model({"name": HStoreField()})
model = get_fake_model(
{"fk": models.ForeignKey(fmodel, on_delete=models.CASCADE)}
)
fobj = fmodel.objects.create(name={"en": "swen", "ar": "arabic swen"})
model.objects.create(fk=fobj)
result = list(model.objects.values("fk__name__ar"))[0]
assert result["fk__name__ar"] == fobj.name["ar"]
django-postgres-extra-2.0.4/tests/test_schema_editor_partitioning.py 0000664 0000000 0000000 00000020171 14175513017 0026121 0 ustar 00root root 0000000 0000000 import pytest
from django.core.exceptions import ImproperlyConfigured
from django.db import connection, models
from psqlextra.backend.schema import PostgresSchemaEditor
from psqlextra.types import PostgresPartitioningMethod
from . import db_introspection
from .fake_model import define_fake_partitioned_model
@pytest.mark.postgres_version(lt=110000)
def test_schema_editor_create_delete_partitioned_model_range():
"""Tests whether creating a partitioned model and adding a list partition
to it using the :see:PostgresSchemaEditor works."""
method = PostgresPartitioningMethod.RANGE
key = ["timestamp"]
model = define_fake_partitioned_model(
{"name": models.TextField(), "timestamp": models.DateTimeField()},
{"method": method, "key": key},
)
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_partitioned_model(model)
schema_editor.add_range_partition(model, "pt1", "2019-01-01", "2019-02-01")
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert table.name == model._meta.db_table
assert table.method == method
assert table.key == key
assert table.partitions[0].full_name == model._meta.db_table + "_pt1"
schema_editor.delete_partitioned_model(model)
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert not table
partitions = db_introspection.get_partitions(model._meta.db_table)
assert len(partitions) == 0
@pytest.mark.postgres_version(lt=110000)
def test_schema_editor_create_delete_partitioned_model_list():
"""Tests whether creating a partitioned model and adding a range partition
to it using the :see:PostgresSchemaEditor works."""
method = PostgresPartitioningMethod.LIST
key = ["category"]
model = define_fake_partitioned_model(
{"name": models.TextField(), "category": models.TextField()},
{"method": method, "key": key},
)
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_partitioned_model(model)
schema_editor.add_list_partition(model, "pt1", ["car", "boat"])
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert table.name == model._meta.db_table
assert table.method == method
assert table.key == key
assert table.partitions[0].full_name == model._meta.db_table + "_pt1"
schema_editor.delete_partitioned_model(model)
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert not table
partitions = db_introspection.get_partitions(model._meta.db_table)
assert len(partitions) == 0
@pytest.mark.postgres_version(lt=110000)
def test_schema_editor_create_delete_partitioned_model_default():
"""Tests whether creating a partitioned model and adding a default
partition to it using the :see:PostgresSchemaEditor works."""
method = PostgresPartitioningMethod.LIST
key = ["category"]
model = define_fake_partitioned_model(
{"name": models.TextField(), "category": models.TextField()},
{"method": method, "key": key},
)
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_partitioned_model(model)
schema_editor.add_default_partition(model, "default")
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert table.name == model._meta.db_table
assert table.method == method
assert table.key == key
assert table.partitions[0].full_name == model._meta.db_table + "_default"
schema_editor.delete_partitioned_model(model)
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert not table
partitions = db_introspection.get_partitions(model._meta.db_table)
assert len(partitions) == 0
@pytest.mark.postgres_version(lt=110000)
def test_schema_editor_create_partitioned_model_no_method():
"""Tests whether its possible to create a partitioned model without
explicitly setting a partitioning method.
The default is "range" so setting one explicitely should not be
needed.
"""
model = define_fake_partitioned_model(
{"name": models.TextField(), "timestamp": models.DateTimeField()},
{"key": ["timestamp"]},
)
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_partitioned_model(model)
pt = db_introspection.get_partitioned_table(model._meta.db_table)
assert pt.method == PostgresPartitioningMethod.RANGE
assert len(pt.partitions) == 0
def test_schema_editor_create_partitioned_model_no_key():
"""Tests whether trying to create a partitioned model without a
partitioning key raises :see:ImproperlyConfigured as its not possible to
create a partitioned model without one and we cannot have a sane
default."""
model = define_fake_partitioned_model(
{"name": models.TextField(), "timestamp": models.DateTimeField()},
{"method": PostgresPartitioningMethod.RANGE},
)
schema_editor = PostgresSchemaEditor(connection)
with pytest.raises(ImproperlyConfigured):
schema_editor.create_partitioned_model(model)
@pytest.mark.postgres_version(lt=110000)
def test_schema_editor_add_range_partition():
"""Tests whether adding a range partition works."""
model = define_fake_partitioned_model(
{"name": models.TextField(), "timestamp": models.DateTimeField()},
{"key": ["timestamp"]},
)
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_partitioned_model(model)
schema_editor.add_range_partition(
model,
name="mypartition",
from_values="2019-1-1",
to_values="2019-2-1",
comment="test",
)
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert len(table.partitions) == 1
assert table.partitions[0].name == "mypartition"
assert (
table.partitions[0].full_name == f"{model._meta.db_table}_mypartition"
)
assert table.partitions[0].comment == "test"
schema_editor.delete_partition(model, "mypartition")
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert len(table.partitions) == 0
@pytest.mark.postgres_version(lt=110000)
def test_schema_editor_add_list_partition():
"""Tests whether adding a list partition works."""
model = define_fake_partitioned_model(
{"name": models.TextField()},
{"method": PostgresPartitioningMethod.LIST, "key": ["name"]},
)
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_partitioned_model(model)
schema_editor.add_list_partition(
model, name="mypartition", values=["1"], comment="test"
)
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert len(table.partitions) == 1
assert table.partitions[0].name == "mypartition"
assert (
table.partitions[0].full_name == f"{model._meta.db_table}_mypartition"
)
assert table.partitions[0].comment == "test"
schema_editor.delete_partition(model, "mypartition")
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert len(table.partitions) == 0
@pytest.mark.postgres_version(lt=110000)
@pytest.mark.parametrize(
"method,key",
[
(PostgresPartitioningMethod.RANGE, ["timestamp"]),
(PostgresPartitioningMethod.LIST, ["name"]),
],
)
def test_schema_editor_add_default_partition(method, key):
model = define_fake_partitioned_model(
{"name": models.TextField(), "timestamp": models.DateTimeField()},
{"method": method, "key": key},
)
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_partitioned_model(model)
schema_editor.add_default_partition(
model, name="mypartition", comment="test"
)
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert len(table.partitions) == 1
assert table.partitions[0].name == "mypartition"
assert (
table.partitions[0].full_name == f"{model._meta.db_table}_mypartition"
)
assert table.partitions[0].comment == "test"
schema_editor.delete_partition(model, "mypartition")
table = db_introspection.get_partitioned_table(model._meta.db_table)
assert len(table.partitions) == 0
django-postgres-extra-2.0.4/tests/test_schema_editor_view.py 0000664 0000000 0000000 00000011146 14175513017 0024366 0 ustar 00root root 0000000 0000000 from django.db import connection, models
from psqlextra.backend.schema import PostgresSchemaEditor
from . import db_introspection
from .fake_model import (
define_fake_materialized_view_model,
define_fake_view_model,
get_fake_model,
)
def test_schema_editor_create_delete_view():
"""Tests whether creating and then deleting a view using the schema editor
works as expected."""
underlying_model = get_fake_model({"name": models.TextField()})
model = define_fake_view_model(
{"name": models.TextField()},
{"query": underlying_model.objects.filter(name="test1")},
)
underlying_model.objects.create(name="test1")
underlying_model.objects.create(name="test2")
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_view_model(model)
# view should only show records name="test"1
objs = list(model.objects.all())
assert len(objs) == 1
assert objs[0].name == "test1"
# create another record, view should have it right away
underlying_model.objects.create(name="test1")
assert model.objects.count() == 2
# delete the view
schema_editor.delete_view_model(model)
# make sure it was actually deleted
assert model._meta.db_table not in db_introspection.table_names(True)
def test_schema_editor_replace_view():
"""Tests whether creating a view and then replacing it with another one
(thus changing the backing query) works as expected."""
underlying_model = get_fake_model({"name": models.TextField()})
model = define_fake_view_model(
{"name": models.TextField()},
{"query": underlying_model.objects.filter(name="test1")},
)
underlying_model.objects.create(name="test1")
underlying_model.objects.create(name="test2")
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_view_model(model)
objs = list(model.objects.all())
assert len(objs) == 1
assert objs[0].name == "test1"
model._view_meta.query = underlying_model.objects.filter(
name="test2"
).query.sql_with_params()
schema_editor.replace_view_model(model)
objs = list(model.objects.all())
assert len(objs) == 1
assert objs[0].name == "test2"
def test_schema_editor_create_delete_materialized_view():
"""Tests whether creating and then deleting a materialized view using the
schema editor works as expected."""
underlying_model = get_fake_model({"name": models.TextField()})
model = define_fake_materialized_view_model(
{"name": models.TextField()},
{"query": underlying_model.objects.filter(name="test1")},
)
underlying_model.objects.create(name="test1")
underlying_model.objects.create(name="test2")
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_materialized_view_model(model)
# materialized view should only show records name="test"1
objs = list(model.objects.all())
assert len(objs) == 1
assert objs[0].name == "test1"
# delete the materialized view
schema_editor.delete_materialized_view_model(model)
# make sure it was actually deleted
assert model._meta.db_table not in db_introspection.table_names(True)
def test_schema_editor_replace_materialized_view():
"""Tests whether creating a materialized view and then replacing it with
another one (thus changing the backing query) works as expected."""
underlying_model = get_fake_model({"name": models.TextField()})
model = define_fake_materialized_view_model(
{"name": models.TextField()},
{"query": underlying_model.objects.filter(name="test1")},
{"indexes": [models.Index(fields=["name"])]},
)
underlying_model.objects.create(name="test1")
underlying_model.objects.create(name="test2")
schema_editor = PostgresSchemaEditor(connection)
schema_editor.create_materialized_view_model(model)
for index in model._meta.indexes:
schema_editor.add_index(model, index)
constraints_before = db_introspection.get_constraints(model._meta.db_table)
objs = list(model.objects.all())
assert len(objs) == 1
assert objs[0].name == "test1"
model._view_meta.query = underlying_model.objects.filter(
name="test2"
).query.sql_with_params()
schema_editor.replace_materialized_view_model(model)
objs = list(model.objects.all())
assert len(objs) == 1
assert objs[0].name == "test2"
# make sure all indexes/constraints still exists because
# replacing a materialized view involves re-creating it
constraints_after = db_introspection.get_constraints(model._meta.db_table)
assert constraints_after == constraints_before
django-postgres-extra-2.0.4/tests/test_unique_index.py 0000664 0000000 0000000 00000001704 14175513017 0023222 0 ustar 00root root 0000000 0000000 from django.db import models
from django.db.migrations import AddIndex, CreateModel
from psqlextra.indexes import UniqueIndex
from .migrations import apply_migration, filtered_schema_editor
def test_unique_index_migrations():
index = UniqueIndex(fields=["name", "other_name"], name="index1")
ops = [
CreateModel(
name="mymodel",
fields=[
("name", models.TextField()),
("other_name", models.TextField()),
],
options={
# "indexes": [index],
},
),
AddIndex(model_name="mymodel", index=index),
]
with filtered_schema_editor("CREATE UNIQUE INDEX") as calls:
apply_migration(ops)
calls = [call[0] for _, call, _ in calls["CREATE UNIQUE INDEX"]]
db_table = "tests_mymodel"
query = 'CREATE UNIQUE INDEX "index1" ON "{0}" ("name", "other_name")'
assert str(calls[0]) == query.format(db_table)
django-postgres-extra-2.0.4/tests/test_upsert.py 0000664 0000000 0000000 00000021116 14175513017 0022046 0 ustar 00root root 0000000 0000000 import django
import pytest
from django.db import models
from django.db.models import Q
from django.db.models.expressions import CombinedExpression, Value
from psqlextra.expressions import ExcludedCol
from psqlextra.fields import HStoreField
from .fake_model import get_fake_model
def test_upsert():
"""Tests whether simple upserts works correctly."""
model = get_fake_model(
{
"title": HStoreField(uniqueness=["key1"]),
"cookies": models.CharField(max_length=255, null=True),
}
)
obj1 = model.objects.upsert_and_get(
conflict_target=[("title", "key1")],
fields=dict(title={"key1": "beer"}, cookies="cheers"),
)
obj1.refresh_from_db()
assert obj1.title["key1"] == "beer"
assert obj1.cookies == "cheers"
obj2 = model.objects.upsert_and_get(
conflict_target=[("title", "key1")],
fields=dict(title={"key1": "beer"}, cookies="choco"),
)
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert both objects are the same
assert obj1.id == obj2.id
assert obj1.title["key1"] == "beer"
assert obj1.cookies == "choco"
assert obj2.title["key1"] == "beer"
assert obj2.cookies == "choco"
def test_upsert_explicit_pk():
"""Tests whether upserts works when the primary key is explicitly
specified."""
model = get_fake_model(
{
"name": models.CharField(max_length=255, primary_key=True),
"cookies": models.CharField(max_length=255, null=True),
}
)
obj1 = model.objects.upsert_and_get(
conflict_target=[("name")],
fields=dict(name="the-object", cookies="first-cheers"),
)
obj1.refresh_from_db()
assert obj1.name == "the-object"
assert obj1.cookies == "first-cheers"
obj2 = model.objects.upsert_and_get(
conflict_target=[("name")],
fields=dict(name="the-object", cookies="second-boo"),
)
obj1.refresh_from_db()
obj2.refresh_from_db()
# assert both objects are the same
assert obj1.pk == obj2.pk
assert obj1.name == "the-object"
assert obj1.cookies == "second-boo"
assert obj2.name == "the-object"
assert obj2.cookies == "second-boo"
def test_upsert_with_update_condition():
"""Tests that an expression can be used as an upsert update condition."""
model = get_fake_model(
{
"name": models.TextField(unique=True),
"priority": models.IntegerField(),
"active": models.BooleanField(),
}
)
obj1 = model.objects.create(name="joe", priority=1, active=False)
# should not return anything because no rows were affected
assert not model.objects.upsert(
conflict_target=["name"],
update_condition=CombinedExpression(
model._meta.get_field("active").get_col(model._meta.db_table),
"=",
ExcludedCol("active"),
),
fields=dict(name="joe", priority=2, active=True),
)
obj1.refresh_from_db()
assert obj1.priority == 1
assert not obj1.active
# should return something because one row was affected
obj1_pk = model.objects.upsert(
conflict_target=["name"],
update_condition=CombinedExpression(
model._meta.get_field("active").get_col(model._meta.db_table),
"=",
Value(False),
),
fields=dict(name="joe", priority=2, active=True),
)
obj1.refresh_from_db()
assert obj1.pk == obj1_pk
assert obj1.priority == 2
assert obj1.active
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="requires django 3.1 or newer"
)
def test_upsert_with_update_condition_with_q_object():
"""Tests that :see:Q objects can be used as an upsert update condition."""
model = get_fake_model(
{
"name": models.TextField(unique=True),
"priority": models.IntegerField(),
"active": models.BooleanField(),
}
)
obj1 = model.objects.create(name="joe", priority=1, active=False)
# should not return anything because no rows were affected
assert not model.objects.upsert(
conflict_target=["name"],
update_condition=Q(active=ExcludedCol("active")),
fields=dict(name="joe", priority=2, active=True),
)
obj1.refresh_from_db()
assert obj1.priority == 1
assert not obj1.active
# should return something because one row was affected
obj1_pk = model.objects.upsert(
conflict_target=["name"],
update_condition=Q(active=Value(False)),
fields=dict(name="joe", priority=2, active=True),
)
obj1.refresh_from_db()
assert obj1.pk == obj1_pk
assert obj1.priority == 2
assert obj1.active
def test_upsert_and_get_applies_converters():
"""Tests that converters are properly applied when using upsert_and_get."""
class MyCustomField(models.TextField):
def from_db_value(self, value, expression, connection):
return value.replace("hello", "bye")
model = get_fake_model({"title": MyCustomField(unique=True)})
obj = model.objects.upsert_and_get(
conflict_target=["title"], fields=dict(title="hello")
)
assert obj.title == "bye"
def test_upsert_bulk():
"""Tests whether bulk_upsert works properly."""
model = get_fake_model(
{
"first_name": models.CharField(
max_length=255, null=True, unique=True
),
"last_name": models.CharField(max_length=255, null=True),
}
)
model.objects.bulk_upsert(
conflict_target=["first_name"],
rows=[
dict(first_name="Swen", last_name="Kooij"),
dict(first_name="Henk", last_name="Test"),
],
)
row_a = model.objects.get(first_name="Swen")
row_b = model.objects.get(first_name="Henk")
model.objects.bulk_upsert(
conflict_target=["first_name"],
rows=[
dict(first_name="Swen", last_name="Test"),
dict(first_name="Henk", last_name="Kooij"),
],
)
row_a.refresh_from_db()
assert row_a.last_name == "Test"
row_b.refresh_from_db()
assert row_b.last_name == "Kooij"
def test_upsert_bulk_no_rows():
"""Tests whether bulk_upsert doesn't crash when specifying no rows or a
falsy value."""
model = get_fake_model(
{"name": models.CharField(max_length=255, null=True, unique=True)}
)
model.objects.bulk_upsert(conflict_target=["name"], rows=[])
model.objects.bulk_upsert(conflict_target=["name"], rows=None)
def test_bulk_upsert_return_models():
"""Tests whether models are returned instead of dictionaries when
specifying the return_model=True argument."""
model = get_fake_model(
{
"id": models.BigAutoField(primary_key=True),
"name": models.CharField(max_length=255, unique=True),
}
)
rows = [dict(name="John Smith"), dict(name="Jane Doe")]
objs = model.objects.bulk_upsert(
conflict_target=["name"], rows=rows, return_model=True
)
for index, obj in enumerate(objs, 1):
assert isinstance(obj, model)
assert obj.id == index
def test_bulk_upsert_accepts_getitem_iterable():
"""Tests whether an iterable only implementing the __getitem__ method works
correctly."""
class GetItemIterable:
def __init__(self, items):
self.items = items
def __getitem__(self, key):
return self.items[key]
model = get_fake_model(
{
"id": models.BigAutoField(primary_key=True),
"name": models.CharField(max_length=255, unique=True),
}
)
rows = GetItemIterable([dict(name="John Smith"), dict(name="Jane Doe")])
objs = model.objects.bulk_upsert(
conflict_target=["name"], rows=rows, return_model=True
)
for index, obj in enumerate(objs, 1):
assert isinstance(obj, model)
assert obj.id == index
def test_bulk_upsert_accepts_iter_iterable():
"""Tests whether an iterable only implementing the __iter__ method works
correctly."""
class IterIterable:
def __init__(self, items):
self.items = items
def __iter__(self):
return iter(self.items)
model = get_fake_model(
{
"id": models.BigAutoField(primary_key=True),
"name": models.CharField(max_length=255, unique=True),
}
)
rows = IterIterable([dict(name="John Smith"), dict(name="Jane Doe")])
objs = model.objects.bulk_upsert(
conflict_target=["name"], rows=rows, return_model=True
)
for index, obj in enumerate(objs, 1):
assert isinstance(obj, model)
assert obj.id == index
django-postgres-extra-2.0.4/tests/test_view_models.py 0000664 0000000 0000000 00000006565 14175513017 0023054 0 ustar 00root root 0000000 0000000 import pytest
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from psqlextra.models import PostgresMaterializedViewModel, PostgresViewModel
from .fake_model import define_fake_model, define_fake_view_model
@pytest.mark.parametrize(
"model_base", [PostgresViewModel, PostgresMaterializedViewModel]
)
def test_view_model_meta_query_set(model_base):
"""Tests whether you can set a :see:QuerySet to be used as the underlying
query for a view."""
model = define_fake_model({"name": models.TextField()})
view_model = define_fake_view_model(
{"name": models.TextField()},
model_base=model_base,
view_options={"query": model.objects.all()},
)
expected_sql = 'SELECT "{0}"."id", "{0}"."name" FROM "{0}"'.format(
model._meta.db_table
)
assert view_model._view_meta.query == (expected_sql, tuple())
@pytest.mark.parametrize(
"model_base", [PostgresViewModel, PostgresMaterializedViewModel]
)
@pytest.mark.parametrize("bind_params", [("test",), ["test"]])
def test_view_model_meta_sql_with_params(model_base, bind_params):
"""Tests whether you can set a raw SQL query with a tuple of bind params as
the underlying query for a view."""
model = define_fake_model({"name": models.TextField()})
sql = "select * from %s where name = %s" % (model._meta.db_table, "%s")
sql_with_params = (sql, bind_params)
view_model = define_fake_view_model(
{"name": models.TextField()},
model_base=model_base,
view_options={"query": sql_with_params},
)
assert view_model._view_meta.query == sql_with_params
@pytest.mark.parametrize(
"model_base", [PostgresViewModel, PostgresMaterializedViewModel]
)
def test_view_model_meta_sql_with_named_params(model_base):
"""Tests whether you can set a raw SQL query with a tuple of bind params as
the underlying query for a view."""
model = define_fake_model({"name": models.TextField()})
sql = "select * from " + model._meta.db_table + " where name = %(name)s"
sql_with_params = (sql, dict(name="test"))
view_model = define_fake_view_model(
{"name": models.TextField()},
model_base=model_base,
view_options={"query": sql_with_params},
)
assert view_model._view_meta.query == sql_with_params
@pytest.mark.parametrize(
"model_base", [PostgresViewModel, PostgresMaterializedViewModel]
)
def test_view_model_meta_sql(model_base):
"""Tests whether you can set a raw SQL query without any params."""
sql = "select 1"
view_model = define_fake_view_model(
{"name": models.TextField()},
model_base=model_base,
view_options={"query": sql},
)
assert view_model._view_meta.query == (sql, tuple())
@pytest.mark.parametrize(
"model_base", [PostgresViewModel, PostgresMaterializedViewModel]
)
@pytest.mark.parametrize(
"view_query",
[
dict(a=1),
tuple("test"),
("test", None),
(None, None),
(1, 2),
("select 1", ("a", "b"), "onetoomay"),
],
)
def test_view_model_meta_bad_query(model_base, view_query):
"""Tests whether a bad view query configuration raises and error."""
with pytest.raises(ImproperlyConfigured):
define_fake_view_model(
{"name": models.TextField()},
model_base=model_base,
view_options={"query": view_query},
)
django-postgres-extra-2.0.4/tox.ini 0000664 0000000 0000000 00000000650 14175513017 0017264 0 ustar 00root root 0000000 0000000 [tox]
envlist = py36-dj{20,21,22,30,31,32}, py37-dj{20,21,22,30,31,32}, py38-dj{20,21,22,30,31,32}, py39-dj{21,22,30,31,32}, py310-dj{21,22,30,31,32}
[testenv]
deps =
dj20: Django~=2.0.0
dj21: Django~=2.1.0
dj22: Django~=2.2.0
dj30: Django~=3.0.0
dj31: Django~=3.1.0
dj32: Django~=3.2.0
.[test]
setenv =
DJANGO_SETTINGS_MODULE=settings
passenv = DATABASE_URL
commands = python setup.py test