pax_global_header00006660000000000000000000000064144262023710014513gustar00rootroot0000000000000052 comment=1e863899c661e6c32a6563648d7312d88b57958c django-celery-results-2.5.1/000077500000000000000000000000001442620237100157425ustar00rootroot00000000000000django-celery-results-2.5.1/.bumpversion.cfg000066400000000000000000000005451442620237100210560ustar00rootroot00000000000000[bumpversion] current_version = 2.5.1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z]+)? serialize = {major}.{minor}.{patch}{releaselevel} {major}.{minor}.{patch} [bumpversion:file:django_celery_results/__init__.py] [bumpversion:file:docs/includes/introduction.txt] [bumpversion:file:README.rst] django-celery-results-2.5.1/.cookiecutterrc000066400000000000000000000016721442620237100207760ustar00rootroot00000000000000# This file exists so you can easily regenerate your project. # # `cookiepatcher` is a convenient shim around `cookiecutter` # for regenerating projects (it will generate a .cookiecutterrc # automatically for any template). To use it: # # pip install cookiepatcher # cookiepatcher gh:ionelmc/cookiecutter-pylibrary project-path # # See: # https://pypi.python.org/pypi/cookiecutter # # Alternatively, you can run: # # cookiecutter --overwrite-if-exists --config-file=project-path/.cookiecutterrc gh:ionelmc/cookiecutter-pylibrary default_context: email: 'ask@celeryproject.org' full_name: 'Ask Solem' github_username: 'celery' project_name: 'django-celery-results' project_short_description: 'Celery result backends using Django' project_slug: 'django-celery-results' version: '1.0.0' year: '2016' django-celery-results-2.5.1/.coveragerc000066400000000000000000000002251442620237100200620ustar00rootroot00000000000000[run] branch = 1 cover_pylib = 0 include = *django_celery_results/* omit = t/* [report] omit = */python?.?/* */site-packages/* */pypy/* django-celery-results-2.5.1/.editorconfig000066400000000000000000000003171442620237100204200ustar00rootroot00000000000000# https://editorconfig.org/ root = true [*] indent_style = space indent_size = 4 trim_trailing_whitespace = true insert_final_newline = true charset = utf-8 end_of_line = lf [Makefile] indent_style = tab django-celery-results-2.5.1/.github/000077500000000000000000000000001442620237100173025ustar00rootroot00000000000000django-celery-results-2.5.1/.github/workflows/000077500000000000000000000000001442620237100213375ustar00rootroot00000000000000django-celery-results-2.5.1/.github/workflows/codeql-analysis.yml000066400000000000000000000044421442620237100251560ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ master ] pull_request: # The branches below must be a subset of the branches above branches: [ master ] schedule: - cron: '38 23 * * 3' jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # Learn more about CodeQL language support at https://git.io/codeql-language-support steps: - name: Checkout repository uses: actions/checkout@v2 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v1 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v1 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v1 django-celery-results-2.5.1/.github/workflows/test.yaml000066400000000000000000000050461442620237100232070ustar00rootroot00000000000000name: Test on: pull_request: branches: [ "main" ] push: branches: [ "main" ] jobs: matrix: name: Unittest Matrix runs-on: ubuntu-latest strategy: matrix: python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11", "pypy3.9" ] django: [ 32, 40, 41, 42] exclude: - python-version: 3.7 django: 40 - python-version: 3.7 django: 41 - python-version: "3.11" django: 32 - python-version: "3.11" django: 40 - python-version: 3.7 django: 42 services: postgres: image: postgres # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: # Maps tcp port 5432 on service container to the host - 5432:5432 env: # Docker image requires a password to be set POSTGRES_PASSWORD: "postgres" steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - run: pip install tox - run: tox -v -- -v env: TOXENV: py-django${{ matrix.django }} rest: name: Integration/Coverage/Docs/Codestyle runs-on: ubuntu-latest strategy: matrix: toxenv: [ flake8, pydocstyle, cov, integration ] services: postgres: image: postgres options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 5432:5432 env: POSTGRES_PASSWORD: "postgres" steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: 3.9 - run: pip install tox - run: tox -v -- -v env: TOXENV: ${{ matrix.toxenv }} django-celery-results-2.5.1/.gitignore000066400000000000000000000004531442620237100177340ustar00rootroot00000000000000.DS_Store *.pyc *$py.class *~ .*.sw[pon] *.mo dist/ *.egg-info *.egg *.egg/ build/ .build/ _build/ pip-log.txt .directory erl_crash.dump *.db Documentation/ .tox/ .ropeproject/ .project .pydevproject .idea/ .coverage celery/tests/cover/ .ve* cover/ .vagrant/ *.sqlite3 .cache/ htmlcov/ coverage.xml django-celery-results-2.5.1/.pre-commit-config.yaml000066400000000000000000000011211442620237100222160ustar00rootroot00000000000000repos: - repo: https://github.com/asottile/pyupgrade rev: v3.3.2 hooks: - id: pyupgrade args: ["--py36-plus"] - repo: https://github.com/PyCQA/flake8 rev: 6.0.0 hooks: - id: flake8 - repo: https://github.com/asottile/yesqa rev: v1.4.0 hooks: - id: yesqa - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: - id: check-merge-conflict - id: check-toml - id: check-yaml - id: mixed-line-ending - repo: https://github.com/pycqa/isort rev: 5.12.0 hooks: - id: isort django-celery-results-2.5.1/.readthedocs.yaml000066400000000000000000000002441442620237100211710ustar00rootroot00000000000000version: 2 build: os: ubuntu-20.04 tools: python: "3.8" sphinx: configuration: docs/conf.py python: install: - requirements: requirements/docs.txt django-celery-results-2.5.1/AUTHORS000066400000000000000000000061661442620237100170230ustar00rootroot00000000000000========= AUTHORS ========= :order: sorted Aaron Ross Adam Endicott Alex Stapleton Alvaro Vega Andrew Frankel Andrew Watts Andrii Kostenko Anton Novosyolov Ask Solem Asif Saif Uddin Augusto Becciu Ben Firshman Brad Jasper Brett Gibson Brian Rosner Charlie DeTar Christopher Grebs Dan LaMotte Darjus Loktevic David Fischer David Ziegler Diego Andres Sanabria Martin Dmitriy Krasilnikov Donald Stufft Eldon Stegall Eugene Nagornyi Felix Berger Glenn Washburn Gnrhxni Greg Taylor Grégoire Cachet Hari Idan Zalzberg Ionel Maries Cristian Jannis Leidel Jason Baker Jay States Jeff Balogh Jeff Fischer Jeffrey Hu Jens Alm Jerzy Kozera Jesper Noehr John Andrews John Watson Jonas Haag Jonatan Heyman Josh Drake José Moreira Jude Nagurney Justin Quick Keith Perkins Kirill Panshin Mark Hellewell Mark Lavin Mark Stover Maxim Bodyansky Michael Elsdoerfer Michael van Tellingen Mikhail Korobov Mos Wenzy Olivier Tabone Patrick Altman Piotr Bulinski Piotr Sikora Reza Lotun Rockallite Wulf Roger Barnes Roman Imankulov Rune Halvorsen Sam Cooke Scott Rubin Sean Creeley Serj Zavadsky Simon Charette Spencer Ellinor Theo Spears Timo Sugliani Vincent Driessen Vitaly Babiy Vladislav Poluhin Weipin Xia Wes Turner Wes Winham Williams Mendez WoLpH dongweiming zeez Eduardo Oliveira django-celery-results-2.5.1/Changelog000066400000000000000000000117501442620237100175600ustar00rootroot00000000000000.. _changelog: ================ Change history ================ .. _version-2.5.1: 2.5.1 ===== :release-date: 2023-05-08 8:15 P.M. UTC+6:00 :release-by: Asif Saif Uddin - Revert "feat: raw delete expired instead of Queryset.delete (#235)" partially. .. _version-2.5.0: 2.5.0 ===== :release-date: 2023-03-13 5:45 P.M. UTC+6:00 :release-by: Asif Saif Uddin - try possible fix to avoid a oracle regression (#325). - Added periodic_task_name to admin fieldset for parity with list view. - Only update the ChordCounter.count field when saving. - Meta injection (#366). .. _version-2.4.0: 2.4.0 ===== :release-date: 2022-06-29 4:30 P.M. UTC+6:00 :release-by: Asif Saif Uddin - Fix [#315] Save args, kwargs and other extended props only when result_extended config is set to True. - Fix atomic transaction not routing to the the correct DB (#324). - Drop django 2.2 from matrix .. _version-2.3.1: 2.3.1 ===== :release-date: 2022-04-17 12:50 P.M. UTC+6:00 :release-by: Asif Saif Uddin - Remove hard dependency on psycopg2. - Fix #296 Stop producing a universal wheel, python 2 is unspported. - fix: The description content type for setuptools needs to be rst to markdown. .. _version-2.3.0: 2.3.0 ===== :release-date: 2022-03-01 1:45 p.m. UTC+6:00 :release-by: Asif Saif Uddin - Fix default_app_config deprecation (#221) - Use string values for django-cache keys #230 (#242) - feat: raw delete expired instead of Queryset.delete (#235) - Fix ``pydoc.ErrorDuringImport`` problem in django_celery_results url - Russian language support (#255) - Add Simplified Chinese translation strings. - Minor code clean up - feat: add periodic_task_name (#261) - Update CI with django 4.0 (#272) - Add translation of the messages to brazilian portuguese (#278) - Fix properties default value (#281) - Work around Oracle migration instability - Fix field size for MySQL (#285) - Update python & pypy min version (#291) - bum min pytest versions .. _version-2.2.0: 2.2.0 ===== :release-date: 2021-07-02 11:00 a.m. UTC+6:00 :release-by: Asif Saif Uddin - add new urls with nouns first structure (#216) - Remove duplicate indexes - fix group status view return data, add tests for it (#215) - typo fix (#218) - Use the DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH for group_id/task_id - Minor code clean up .. _version-2.1.0: 2.1.0 ===== :release-date: 2021-06-14 09:00 p.m. UTC+6:00 :release-by: Asif Saif Uddin - Don't raise an error when ChordCounter is not found - add default_auto_field to apps.py - Use the provided chord_size when available - Match apply_chord call signature to Celery 5.1 - Add support for GroupResult (#161) - Minor code clean up .. _version-2.0.1: 2.0.1 ===== :release-date: 2021-01-19 07:30 p.m. UTC+6:00 :release-by: - Fix backward compatibility in DatabaseBackend._store_result function - Fix 'args' and 'kwargs' propiertes of AsyncResult for DatabaseBackend - Fix task_args/task_kwargs in task_protocol=1 - Test refactors - Add task_args and task_kwargs to admins searchable fields (#182) .. _version-2.0.0: 2.0.0 ===== :release-date: :release-by: - Add Spanish translations (#134) - Add support for Django 3.0 and 3.1 (#145, #163) - Add support for Celery 5 (#163) - Drop support for Django < 2.2 (#147, #152) - Drop support for Python < 3.6 (#146, #147, #152) - Add Chord syncronisation from the database (#144) - Encode `task_args` and `task_kwargs` of `TaskResult` using `json.dumps` instead of using `str` (#78) .. _version-1.1.2: 1.1.2 ===== :release-date: 2019-06-06 00:00 a.m. UTC+6:00 :release-by: Asif Saif Uddin - Fixed few regressions .. _version-1.1.0: 1.1.0 ===== :release-date: 2019-05-21 17:00 p.m. UTC+6:00 :release-by: Asif Saif Uddin - Django 2.2+. - Drop python 3.4 and django 2.0 - Support specifying the database to use for the store_result method (#63) - Fix MySQL8 system variable tx_isolation issue (#84) .. _version-1.0.4: 1.0.4 ===== :release-date: 2018-11-12 19:00 p.m. UTC+2:00 :release-by: Omer Katz 1.0.3 is broken. Use 1.0.4 - Revert renaming label as it is a breaking change. .. _version-1.0.3: 1.0.3 ===== :release-date: 2018-11-12 18:00 p.m. UTC+2:00 :release-by: Omer Katz - Revert renaming label as it is a breaking change. .. _version-1.0.2: 1.0.2 ===== :release-date: 2018-11-12 18:00 p.m. UTC+2:00 :release-by: Omer Katz - Store task name, args, kwargs as part of the task results in database. Contributed by :github_user: `wardal`. - Admin screen changes - task name filter, search on task_name, task_id, status. Contributed by :github_user: `jaylynch`. - Added default_app_config. - Added missing migration. - Fix MySQL max length issue. - Drop support for Django<1.11. .. _version-1.0.1: 1.0.1 ===== :release-date: 2016-11-07 02:00 p.m. PST :release-by: Ask Solem - Migrations were not being installed as part of the distribution (Issue #4). - Now includes simple task result admin interface. Contributed by :github_user:`zeezdev`. - Now depends on Celery 4.0.0. .. _version-1.0.0: 1.0.0 ===== :release-date: 2016-09-08 03:19 p.m. PDT :release-by: Ask Solem - Initial release django-celery-results-2.5.1/LICENSE000066400000000000000000000052051442620237100167510ustar00rootroot00000000000000Copyright (c) 2017-2032 Asif Saif Uddin. All Rights Reserved. Copyright (c) 2015-2016 Ask Solem. All Rights Reserved. Copyright (c) 2012-2014 GoPivotal, Inc. All Rights Reserved. Copyright (c) 2009-2012 Ask Solem. All Rights Reserved. django-celery-results is licensed under The BSD License (3 Clause, also known as the new BSD license). The license is an OSI approved Open Source license and is GPL-compatible(1). The license text can also be found here: https://opensource.org/license/bsd-3-clause/ License ======= Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Ask Solem nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ask Solem OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Documentation License ===================== The documentation portion of django-celery-results (the rendered contents of the "docs" directory of a software distribution or checkout) is supplied under the "Creative Commons Attribution-ShareAlike 4.0 International" (CC BY-SA 4.0) License as described by https://creativecommons.org/licenses/by-sa/4.0/ Footnotes ========= (1) A GPL-compatible license makes it possible to combine django-celery-results with other software that is released under the GPL, it does not mean that we're distributing django-celery-results under the GPL license. The BSD license, unlike the GPL, let you distribute a modified version without making your changes open source. django-celery-results-2.5.1/MANIFEST.in000066400000000000000000000006351442620237100175040ustar00rootroot00000000000000include Changelog include LICENSE include README.rst include MANIFEST.in include setup.cfg include setup.py include manage.py recursive-include docs * recursive-include extra/* recursive-include examples * recursive-include requirements *.txt *.rst recursive-include t *.py recursive-include django_celery_results *.py *.po recursive-exclude * __pycache__ recursive-exclude * *.py[co] recursive-exclude * .*.sw* django-celery-results-2.5.1/Makefile000066400000000000000000000067521442620237100174140ustar00rootroot00000000000000PROJ=django_celery_results PGPIDENT="Celery Security Team" PYTHON=python3 PYTEST=pytest GIT=git TOX=tox ICONV=iconv FLAKE8=flake8 PYDOCSTYLE=pydocstyle SPHINX2RST=sphinx2rst TESTDIR=t SPHINX_DIR=docs/ SPHINX_BUILDDIR="${SPHINX_DIR}/_build" README=README.rst README_SRC="docs/templates/readme.txt" CONTRIBUTING=CONTRIBUTING.rst CONTRIBUTING_SRC="docs/contributing.rst" SPHINX_HTMLDIR="${SPHINX_BUILDDIR}/html" DOCUMENTATION=Documentation all: help help: @echo "docs - Build documentation." @echo "test-all - Run tests for all supported python versions." @echo "distcheck ---------- - Check distribution for problems." @echo " test - Run unittests using current python." @echo " lint ------------ - Check codebase for problems." @echo " apicheck - Check API reference coverage." @echo " configcheck - Check configuration reference coverage." @echo " readmecheck - Check README.rst encoding." @echo " contribcheck - Check CONTRIBUTING.rst encoding" @echo " flakes -------- - Check code for syntax and style errors." @echo " flakecheck - Run flake8 on the source code." @echo " pep257check - Run pydocstyle on the source code." @echo "readme - Regenerate README.rst file." @echo "contrib - Regenerate CONTRIBUTING.rst file" @echo "clean-dist --------- - Clean all distribution build artifacts." @echo " clean-git-force - Remove all uncomitted files." @echo " clean ------------ - Non-destructive clean" @echo " clean-pyc - Remove .pyc/__pycache__ files" @echo " clean-docs - Remove documentation build artifacts." @echo " clean-build - Remove setup artifacts." @echo "bump - Bump patch version number." @echo "bump-minor - Bump minor version number." @echo "bump-major - Bump major version number." @echo "release - Make PyPI release." clean: clean-docs clean-pyc clean-build clean-dist: clean clean-git-force bump: bumpversion patch bump-minor: bumpversion minor bump-major: bumpversion major release: python setup.py register sdist bdist_wheel upload --sign --identity="$(PGPIDENT)" Documentation: (cd "$(SPHINX_DIR)"; $(MAKE) html) mv "$(SPHINX_HTMLDIR)" $(DOCUMENTATION) docs: Documentation clean-docs: -rm -rf "$(SPHINX_BUILDDIR)" lint: flakecheck apicheck configcheck readmecheck apicheck: (cd "$(SPHINX_DIR)"; $(MAKE) apicheck) configcheck: true flakecheck: $(FLAKE8) "$(PROJ)" "$(TESTDIR)" flakediag: -$(MAKE) flakecheck pep257check: $(PYDOCSTYLE) "$(PROJ)" flakes: flakediag pep257check clean-readme: -rm -f $(README) readmecheck: $(ICONV) -f ascii -t ascii $(README) >/dev/null $(README): $(SPHINX2RST) "$(README_SRC)" --ascii > $@ readme: clean-readme $(README) readmecheck clean-contrib: -rm -f "$(CONTRIBUTING)" $(CONTRIBUTING): $(SPHINX2RST) "$(CONTRIBUTING_SRC)" > $@ contrib: clean-contrib $(CONTRIBUTING) clean-pyc: -find . -type f -a \( -name "*.pyc" -o -name "*$$py.class" \) | xargs rm -find . -type d -name "__pycache__" | xargs rm -r removepyc: clean-pyc clean-build: rm -rf build/ dist/ .eggs/ *.egg-info/ .tox/ .coverage cover/ clean-git: $(GIT) clean -xdn clean-git-force: $(GIT) clean -xdf test-all: clean-pyc $(TOX) test: $(PYTHON) setup.py test cov: covbuild (cd $(TESTDIR); pytest -x --cov=django_celery_results --cov-report=html) build: $(PYTHON) setup.py sdist bdist_wheel distcheck: lint test clean dist: readme contrib clean-dist build django-celery-results-2.5.1/README.rst000066400000000000000000000104651442620237100174370ustar00rootroot00000000000000===================================================================== Celery Result Backends using the Django ORM/Cache framework. ===================================================================== |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| :Version: 2.5.1 :Web: https://django-celery-results.readthedocs.io/ :Download: https://pypi.python.org/pypi/django-celery-results :Source: https://github.com/celery/django-celery-results :Keywords: django, celery, database, results About ===== This extension enables you to store Celery task results using the Django ORM. It defines a single model (``django_celery_results.models.TaskResult``) used to store task results, and you can query this database table like any other Django model. Installing ========== The installation instructions for this extension is available from the `Celery documentation`_ .. _`Celery documentation`: https://docs.celeryproject.org/en/latest/django/first-steps-with-django.html#django-celery-results-using-the-django-orm-cache-as-a-result-backend .. _installation: Installation ============ You can install django-celery-results either via the Python Package Index (PyPI) or from source. To install using `pip`,:: $ pip install -U django-celery-results .. _installing-from-source: Downloading and installing from source -------------------------------------- Download the latest version of django-celery-results from https://pypi.python.org/pypi/django-celery-results You can install it by doing the following,:: $ tar xvfz django-celery-results-0.0.0.tar.gz $ cd django-celery-results-0.0.0 $ python setup.py build # python setup.py install The last command must be executed as a privileged user if you are not currently using a virtualenv. .. _installing-from-git: Using the development version ----------------------------- With pip ~~~~~~~~ You can install the latest snapshot of django-celery-results using the following pip command:: $ pip install https://github.com/celery/django-celery-results/zipball/master#egg=django-celery-results Issues with mysql ----------------- If you want to run ``django-celery-results`` with MySQL, you might run into some issues. One such issue is when you try to run ``python manage.py migrate django_celery_results``, you might get the following error:: django.db.utils.OperationalError: (1071, 'Specified key was too long; max key length is 767 bytes') To get around this issue, you can set:: DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH=191 (or any other value if any other db other than MySQL is causing similar issues.) max_length of **191** seems to work for MySQL. .. |build-status| image:: https://secure.travis-ci.org/celery/django-celery-results.svg?branch=master :alt: Build status :target: https://travis-ci.org/celery/django-celery-results .. |coverage| image:: https://codecov.io/github/celery/django-celery-results/coverage.svg?branch=master :target: https://codecov.io/github/celery/django-celery-results?branch=master .. |license| image:: https://img.shields.io/pypi/l/django-celery-results.svg :alt: BSD License :target: https://opensource.org/licenses/BSD-3-Clause .. |wheel| image:: https://img.shields.io/pypi/wheel/django-celery-results.svg :alt: django-celery-results can be installed via wheel :target: https://pypi.python.org/pypi/django-celery-results/ .. |pyversion| image:: https://img.shields.io/pypi/pyversions/django-celery-results.svg :alt: Supported Python versions. :target: https://pypi.python.org/pypi/django-celery-results/ .. |pyimp| image:: https://img.shields.io/pypi/implementation/django-celery-results.svg :alt: Support Python implementations. :target: https://pypi.python.org/pypi/django-celery-results/ django-celery-results for enterprise ------------------------------------ Available as part of the Tidelift Subscription. The maintainer of django-celery-results and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainer of the exact packages you use. `Learn more. `_ django-celery-results-2.5.1/conftest.py000066400000000000000000000016041442620237100201420ustar00rootroot00000000000000import pytest def pytest_addoption(parser): parser.addoption( '-B', '--run-benchmarks', action='store_true', default=False, help='run benchmarks', ) def pytest_runtest_setup(item): """ Skip tests marked benchmark unless --run-benchmark is given to pytest """ run_benchmarks = item.config.getoption('--run-benchmarks') is_benchmark = any(item.iter_markers(name="benchmark")) if is_benchmark: if run_benchmarks: return pytest.skip( 'need --run-benchmarks to run benchmarks' ) def pytest_collection_modifyitems(items): """ Add the "benchmark" mark to tests that start with "benchmark_". """ for item in items: test_class_name = item.cls.__name__ if test_class_name.startswith("benchmark_"): item.add_marker(pytest.mark.benchmark) django-celery-results-2.5.1/django_celery_results/000077500000000000000000000000001442620237100223305ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/__init__.py000066400000000000000000000017571442620237100244530ustar00rootroot00000000000000"""Celery result backends for Django.""" # :copyright: (c) 2016, Ask Solem. # :copyright: (c) 2017-2033, Asif Saif Uddin. # All rights reserved. # :license: BSD (3 Clause), see LICENSE for more details. import re from collections import namedtuple import django __version__ = '2.5.1' __author__ = 'Asif Saif Uddin, Ask Solem' __contact__ = 'auvipy@gmail.com' __homepage__ = 'https://github.com/celery/django-celery-results' __docformat__ = 'restructuredtext' # -eof meta- version_info_t = namedtuple('version_info_t', ( 'major', 'minor', 'micro', 'releaselevel', 'serial', )) # bumpversion can only search for {current_version} # so we have to parse the version here. _temp = re.match( r'(\d+)\.(\d+).(\d+)(.+)?', __version__).groups() VERSION = version_info = version_info_t( int(_temp[0]), int(_temp[1]), int(_temp[2]), _temp[3] or '', '') del _temp del re __all__ = [] if django.VERSION < (3, 2): default_app_config = 'django_celery_results.apps.CeleryResultConfig' django-celery-results-2.5.1/django_celery_results/admin.py000066400000000000000000000044721442620237100240010ustar00rootroot00000000000000"""Result Task Admin interface.""" from django.conf import settings from django.contrib import admin from django.utils.translation import gettext_lazy as _ try: ALLOW_EDITS = settings.DJANGO_CELERY_RESULTS['ALLOW_EDITS'] except (AttributeError, KeyError): ALLOW_EDITS = False pass from .models import GroupResult, TaskResult class TaskResultAdmin(admin.ModelAdmin): """Admin-interface for results of tasks.""" model = TaskResult date_hierarchy = 'date_done' list_display = ('task_id', 'periodic_task_name', 'task_name', 'date_done', 'status', 'worker') list_filter = ('status', 'date_done', 'periodic_task_name', 'task_name', 'worker') readonly_fields = ('date_created', 'date_done', 'result', 'meta') search_fields = ('task_name', 'task_id', 'status', 'task_args', 'task_kwargs') fieldsets = ( (None, { 'fields': ( 'task_id', 'task_name', 'periodic_task_name', 'status', 'worker', 'content_type', 'content_encoding', ), 'classes': ('extrapretty', 'wide') }), (_('Parameters'), { 'fields': ( 'task_args', 'task_kwargs', ), 'classes': ('extrapretty', 'wide') }), (_('Result'), { 'fields': ( 'result', 'date_created', 'date_done', 'traceback', 'meta', ), 'classes': ('extrapretty', 'wide') }), ) def get_readonly_fields(self, request, obj=None): if ALLOW_EDITS: return self.readonly_fields else: return list({ field.name for field in self.opts.local_fields }) admin.site.register(TaskResult, TaskResultAdmin) class GroupResultAdmin(admin.ModelAdmin): """Admin-interface for results of grouped tasks.""" model = GroupResult date_hierarchy = 'date_done' list_display = ('group_id', 'date_done') list_filter = ('date_done',) readonly_fields = ('date_created', 'date_done', 'result') search_fields = ('group_id',) admin.site.register(GroupResult, GroupResultAdmin) django-celery-results-2.5.1/django_celery_results/apps.py000066400000000000000000000006541442620237100236520ustar00rootroot00000000000000"""Application configuration.""" from django.apps import AppConfig from django.utils.translation import gettext_lazy as _ __all__ = ['CeleryResultConfig'] class CeleryResultConfig(AppConfig): """Default configuration for the django_celery_results app.""" name = 'django_celery_results' label = 'django_celery_results' verbose_name = _('Celery Results') default_auto_field = 'django.db.models.AutoField' django-celery-results-2.5.1/django_celery_results/backends/000077500000000000000000000000001442620237100241025ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/backends/__init__.py000066400000000000000000000001651442620237100262150ustar00rootroot00000000000000from .cache import CacheBackend from .database import DatabaseBackend __all__ = ['CacheBackend', 'DatabaseBackend'] django-celery-results-2.5.1/django_celery_results/backends/cache.py000066400000000000000000000021511442620237100255160ustar00rootroot00000000000000"""Celery cache backend using the Django Cache Framework.""" from celery.backends.base import KeyValueStoreBackend from django.core.cache import cache as default_cache from django.core.cache import caches from kombu.utils.encoding import bytes_to_str class CacheBackend(KeyValueStoreBackend): """Backend using the Django cache framework to store task metadata.""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Must make sure backend doesn't convert exceptions to dict. self.serializer = 'pickle' def get(self, key): key = bytes_to_str(key) return self.cache_backend.get(key) def set(self, key, value): key = bytes_to_str(key) self.cache_backend.set(key, value, self.expires) def delete(self, key): key = bytes_to_str(key) self.cache_backend.delete(key) def encode(self, data): return data def decode(self, data): return data @property def cache_backend(self): backend = self.app.conf.cache_backend return caches[backend] if backend else default_cache django-celery-results-2.5.1/django_celery_results/backends/database.py000066400000000000000000000256551442620237100262350ustar00rootroot00000000000000import binascii import json from celery import maybe_signature from celery.backends.base import BaseDictBackend, get_current_task from celery.exceptions import ChordError from celery.result import GroupResult, allow_join_result, result_from_tuple from celery.utils.log import get_logger from celery.utils.serialization import b64decode, b64encode from django.db import connection, transaction from django.db.utils import InterfaceError from kombu.exceptions import DecodeError from ..models import ChordCounter from ..models import GroupResult as GroupResultModel from ..models import TaskResult EXCEPTIONS_TO_CATCH = (InterfaceError,) try: from psycopg2 import InterfaceError as Psycopg2InterfaceError EXCEPTIONS_TO_CATCH += (Psycopg2InterfaceError,) except ImportError: pass logger = get_logger(__name__) class DatabaseBackend(BaseDictBackend): """The Django database backend, using models to store task state.""" TaskModel = TaskResult GroupModel = GroupResultModel subpolling_interval = 0.5 def exception_safe_to_retry(self, exc): """Check if an exception is safe to retry. Backends have to overload this method with correct predicates dealing with their exceptions. By default no exception is safe to retry, it's up to backend implementation to define which exceptions are safe. For Celery / django-celery-results, retry Django / Psycopg2 InterfaceErrors, like "Connection already closed", with new connection. Set result_backend_always_retry to True in order to enable retries. """ for exc_type in EXCEPTIONS_TO_CATCH: if isinstance(exc, exc_type): # Only called if InterfaceError occurs and always_retry is True connection.close() return True return False def _get_extended_properties(self, request, traceback): extended_props = { 'periodic_task_name': None, 'task_args': None, 'task_kwargs': None, 'task_name': None, 'traceback': None, 'worker': None, } if request and self.app.conf.find_value_for_key('extended', 'result'): if getattr(request, 'argsrepr', None) is not None: # task protocol 2 task_args = request.argsrepr else: # task protocol 1 task_args = getattr(request, 'args', None) if getattr(request, 'kwargsrepr', None) is not None: # task protocol 2 task_kwargs = request.kwargsrepr else: # task protocol 1 task_kwargs = getattr(request, 'kwargs', None) # Encode input arguments if task_args is not None: _, _, task_args = self.encode_content(task_args) if task_kwargs is not None: _, _, task_kwargs = self.encode_content(task_kwargs) properties = getattr(request, 'properties', {}) or {} periodic_task_name = properties.get('periodic_task_name', None) extended_props.update({ 'periodic_task_name': periodic_task_name, 'task_args': task_args, 'task_kwargs': task_kwargs, 'task_name': getattr(request, 'task', None), 'traceback': traceback, 'worker': getattr(request, 'hostname', None), }) return extended_props def _get_meta_from_request(self, request=None): """ Use the request or get_current_task to evaluate the `meta` attribute. With this, is possible to assign arbitrary data in request.meta to be retrieve and stored on the TaskResult. """ request = request or getattr(get_current_task(), "request", None) return getattr(request, "meta", {}) def _store_result( self, task_id, result, status, traceback=None, request=None, using=None ): """Store return value and status of an executed task.""" content_type, content_encoding, result = self.encode_content(result) meta = { **self._get_meta_from_request(request), "children": self.current_task_children(request), } _, _, encoded_meta = self.encode_content( meta, ) task_props = { 'content_encoding': content_encoding, 'content_type': content_type, 'meta': encoded_meta, 'result': result, 'status': status, 'task_id': task_id, 'traceback': traceback, 'using': using, } task_props.update( self._get_extended_properties(request, traceback) ) self.TaskModel._default_manager.store_result(**task_props) return result def _get_task_meta_for(self, task_id): """Get task metadata for a task by id.""" obj = self.TaskModel._default_manager.get_task(task_id) res = obj.as_dict() meta = self.decode_content(obj, res.pop('meta', None)) or {} result = self.decode_content(obj, res.get('result')) task_args = res.get('task_args') task_kwargs = res.get('task_kwargs') try: task_args = self.decode_content(obj, task_args) task_kwargs = self.decode_content(obj, task_kwargs) except (DecodeError, binascii.Error): pass # the right names are args/kwargs, not task_args/task_kwargs, # keep both for backward compatibility res.update( meta, result=result, task_args=task_args, task_kwargs=task_kwargs, args=task_args, kwargs=task_kwargs, ) return self.meta_from_decoded(res) def encode_content(self, data): content_type, content_encoding, content = self._encode(data) if content_encoding == 'binary': content = b64encode(content) return content_type, content_encoding, content def decode_content(self, obj, content): if content: if obj.content_encoding == 'binary': content = b64decode(content) return self.decode(content) def _forget(self, task_id): try: self.TaskModel._default_manager.get(task_id=task_id).delete() except self.TaskModel.DoesNotExist: pass def cleanup(self): """Delete expired metadata.""" self.TaskModel._default_manager.delete_expired(self.expires) self.GroupModel._default_manager.delete_expired(self.expires) def _restore_group(self, group_id): """return result value for a group by id.""" group_result = self.GroupModel._default_manager.get_group(group_id) if group_result: res = group_result.as_dict() decoded_result = self.decode_content(group_result, res["result"]) res["result"] = None if decoded_result: res["result"] = result_from_tuple(decoded_result, app=self.app) return res def _save_group(self, group_id, group_result): """Store return value of group""" content_type, content_encoding, result = self.encode_content( group_result.as_tuple() ) self.GroupModel._default_manager.store_group_result( content_type, content_encoding, group_id, result ) return group_result def _delete_group(self, group_id): try: self.GroupModel._default_manager.get_group(group_id).delete() except self.TaskModel.DoesNotExist: pass def apply_chord(self, header_result_args, body, **kwargs): """Add a ChordCounter with the expected number of results""" if not isinstance(header_result_args, GroupResult): # Celery 5.1 provides the GroupResult args header_result = self.app.GroupResult(*header_result_args) else: # celery <5.1 will pass a GroupResult object header_result = header_result_args results = [r.as_tuple() for r in header_result] chord_size = body.get("chord_size", None) or len(results) data = json.dumps(results) ChordCounter.objects.create( group_id=header_result.id, sub_tasks=data, count=chord_size ) def on_chord_part_return(self, request, state, result, **kwargs): """Called on finishing each part of a Chord header""" tid, gid = request.id, request.group if not gid or not tid: return call_callback = False with transaction.atomic(): # We need to know if `count` hits 0. # wrap the update in a transaction # with a `select_for_update` lock to prevent race conditions. # SELECT FOR UPDATE is not supported on all databases try: chord_counter = ( ChordCounter.objects.select_for_update() .get(group_id=gid) ) except ChordCounter.DoesNotExist: logger.warning("Can't find ChordCounter for Group %s", gid) return chord_counter.count -= 1 if chord_counter.count != 0: chord_counter.save(update_fields=["count"]) else: # Last task in the chord header has finished call_callback = True chord_counter.delete() if call_callback: deps = chord_counter.group_result(app=self.app) if deps.ready(): callback = maybe_signature(request.chord, app=self.app) trigger_callback( app=self.app, callback=callback, group_result=deps ) def trigger_callback(app, callback, group_result): """Add the callback to the queue or mark the callback as failed Implementation borrowed from `celery.app.builtins.unlock_chord` """ if group_result.supports_native_join: j = group_result.join_native else: j = group_result.join try: with allow_join_result(): ret = j(timeout=app.conf.result_chord_join_timeout, propagate=True) except Exception as exc: # pylint: disable=broad-except try: culprit = next(group_result._failed_join_report()) reason = f"Dependency {culprit.id} raised {exc!r}" except StopIteration: reason = repr(exc) logger.exception("Chord %r raised: %r", group_result.id, exc) app.backend.chord_error_from_stack(callback, ChordError(reason)) else: try: callback.delay(ret) except Exception as exc: # pylint: disable=broad-except logger.exception("Chord %r raised: %r", group_result.id, exc) app.backend.chord_error_from_stack( callback, exc=ChordError(f"Callback error: {exc!r}") ) django-celery-results-2.5.1/django_celery_results/locale/000077500000000000000000000000001442620237100235675ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/es/000077500000000000000000000000001442620237100241765ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/es/LC_MESSAGES/000077500000000000000000000000001442620237100257635ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/es/LC_MESSAGES/django.po000066400000000000000000000104351442620237100275700ustar00rootroot00000000000000# Spanish translation strings for django-celery-results. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as django-celery-results. # , 2020. # #, fuzzy msgid "" msgstr "" "Project-Id-Version:\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2020-02-26 18:34+0100\n" "PO-Revision-Date: 2020-02-26 20:25-0015\n" "Last-Translator: \n" "Language-Team: LANGUAGE \n" "Language: es\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" #: django_celery_results/admin.py:39 msgid "Parameters" msgstr "Parámetros" #: django_celery_results/admin.py:46 msgid "Result" msgstr "Resultado" #: django_celery_results/apps.py:15 msgid "Celery Results" msgstr "Resultados Celery" #: django_celery_results/models.py:28 msgid "Task ID" msgstr "ID de Tarea" #: django_celery_results/models.py:29 msgid "Celery ID for the Task that was run" msgstr "ID de Celery para la tarea que fue ejecutada" #: django_celery_results/models.py:32 msgid "Task Name" msgstr "Nombre de Tarea" #: django_celery_results/models.py:33 msgid "Name of the Task which was run" msgstr "Nombre de la Tarea que fue ejecutada" #: django_celery_results/models.py:36 msgid "Task Positional Arguments" msgstr "Argumentos posicionales de la Tarea" #: django_celery_results/models.py:37 msgid "JSON representation of the positional arguments used with the task" msgstr "Representación JSON de los argumentos posicionales usados en la tarea" #: django_celery_results/models.py:41 msgid "Task Named Arguments" msgstr "Argumentos opcionales de la tarea" #: django_celery_results/models.py:42 msgid "JSON representation of the named arguments used with the task" msgstr "Representación JSON de los argumentos opcionales usados en la tarea" #: django_celery_results/models.py:47 msgid "Task State" msgstr "Estado de la Tarea" #: django_celery_results/models.py:48 msgid "Current state of the task being run" msgstr "Estado actual en el que se encuentra la tarea en ejecución" #: django_celery_results/models.py:51 msgid "Worker" msgstr "Worker" #: django_celery_results/models.py:51 msgid "Worker that executes the task" msgstr "Worker que ejecuta la tarea" #: django_celery_results/models.py:55 msgid "Result Content Type" msgstr "Content Type del resultado" #: django_celery_results/models.py:56 msgid "Content type of the result data" msgstr "Atributo Content type de los datos del resultado" #: django_celery_results/models.py:59 msgid "Result Encoding" msgstr "Codificación del resultado" #: django_celery_results/models.py:60 msgid "The encoding used to save the task result data" msgstr "La codificación usada para guardar los datos del resultado" #: django_celery_results/models.py:63 msgid "Result Data" msgstr "Datos del resultado" #: django_celery_results/models.py:64 msgid "" "The data returned by the task. Use content_encoding and content_type fields" " to read." msgstr "" "Datos devueltos por la tarea. Usa los campos content_encoding y content_type" " para leerlos." #: django_celery_results/models.py:68 msgid "Created DateTime" msgstr "Fecha de creación" #: django_celery_results/models.py:69 msgid "Datetime field when the task result was created in UTC" msgstr "Fecha de creación de la tarea en UTC" #: django_celery_results/models.py:72 msgid "Completed DateTime" msgstr "Fecha de terminación" #: django_celery_results/models.py:73 msgid "Datetime field when the task was completed in UTC" msgstr "Fecha de completitud de la tarea en UTC" #: django_celery_results/models.py:76 msgid "Traceback" msgstr "Traceback" #: django_celery_results/models.py:77 msgid "Text of the traceback if the task generated one" msgstr "Texto del traceback si la tarea generó uno" #: django_celery_results/models.py:80 msgid "Task Meta Information" msgstr "Metadatos de la tarea" #: django_celery_results/models.py:81 msgid "" "JSON meta information about the task, such as information on child tasks" msgstr "" "Metainformación sobre la tarea en formato JSON, como la información de las " "tareas hijas" #: django_celery_results/models.py:91 msgid "task result" msgstr "resultado de la tarea" #: django_celery_results/models.py:92 msgid "task results" msgstr "resultados de tareas" django-celery-results-2.5.1/django_celery_results/locale/pt_BR/000077500000000000000000000000001442620237100245755ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/pt_BR/LC_MESSAGES/000077500000000000000000000000001442620237100263625ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/pt_BR/LC_MESSAGES/django.po000066400000000000000000000120711442620237100301650ustar00rootroot00000000000000# Brazilian portuguese translation strings for django-celery-results. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as django-celery-results. # Eduardo Oliveira , 2022. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: \n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2022-01-04 19:52-0300\n" "PO-Revision-Date: 2022-01-04 19:52-0300\n" "Last-Translator: Eduardo Oliveira \n" "Language-Team: LANGUAGE \n" "Language: pt_BR\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n > 1);\n" #: admin.py:40 msgid "Parameters" msgstr "Parâmetros" #: admin.py:47 msgid "Result" msgstr "Resultado" #: apps.py:14 msgid "Celery Results" msgstr "Resultados do celery" #: models.py:28 msgid "Task ID" msgstr "Id da tarefa" #: models.py:29 msgid "Celery ID for the Task that was run" msgstr "Id do celery em que a tarefa foi executada" #: models.py:32 msgid "Periodic Task Name" msgstr "Nome da tarefa periódica" #: models.py:33 msgid "Name of the Periodic Task which was run" msgstr "Nome da tarefa periódica que foi executada" #: models.py:36 msgid "Task Name" msgstr "Nome da tarefa" #: models.py:37 msgid "Name of the Task which was run" msgstr "Nome da tarefa que foi executada" #: models.py:40 msgid "Task Positional Arguments" msgstr "Argumentos posicionais da tarefa" #: models.py:41 msgid "JSON representation of the positional arguments used with the task" msgstr "Representação JSON dos argumentos posicionais usados pela tarefa" #: models.py:45 msgid "Task Named Arguments" msgstr "Argumentos nomeados da tarefa" #: models.py:46 msgid "JSON representation of the named arguments used with the task" msgstr "Representação JSON dos argumentos nomeados usados pela tarefa" #: models.py:51 msgid "Task State" msgstr "Status da tarefa" #: models.py:52 msgid "Current state of the task being run" msgstr "Status atual da tarefa em execução" #: models.py:55 msgid "Worker" msgstr "Worker" #: models.py:55 msgid "Worker that executes the task" msgstr "Worker que executa a tarefa" #: models.py:59 models.py:190 msgid "Result Content Type" msgstr "Tipo de conteúdo do resultado" #: models.py:60 models.py:191 msgid "Content type of the result data" msgstr "Tipo de conteúdo dos dados do resultado" #: models.py:63 models.py:195 msgid "Result Encoding" msgstr "Codificação do resultado" #: models.py:64 models.py:196 msgid "The encoding used to save the task result data" msgstr "A codificação usada para salvar os dados de resultado da tarefa" #: models.py:67 models.py:200 msgid "Result Data" msgstr "Dados do resultado" #: models.py:68 models.py:201 msgid "" "The data returned by the task. Use content_encoding and content_type fields " "to read." msgstr "Os dados retornados pela tarefa. Use os campos content_encoding e content_type para ler." #: models.py:72 models.py:180 msgid "Created DateTime" msgstr "Data/Horário de criação" #: models.py:73 msgid "Datetime field when the task result was created in UTC" msgstr "Data/Horário em que o resultado da tarefa foi criado (em UTC)" #: models.py:76 models.py:185 msgid "Completed DateTime" msgstr "Data/Horário em que foi concluída" #: models.py:77 msgid "Datetime field when the task was completed in UTC" msgstr "Data/Horário em que a tarefa foi concluída (em UTC)" #: models.py:80 msgid "Traceback" msgstr "Traceback" #: models.py:81 msgid "Text of the traceback if the task generated one" msgstr "Texto de traceback se a tarefa gerou um" #: models.py:84 msgid "Task Meta Information" msgstr "Meta informação da tarefa" #: models.py:85 msgid "" "JSON meta information about the task, such as information on child tasks" msgstr "Meta informação JSON sobre a tarefa, como informações sobre as subtarefas" #: models.py:95 msgid "task result" msgstr "resultado da tarefa" #: models.py:96 msgid "task results" msgstr "resultados das tarefas" #: models.py:133 models.py:175 msgid "Group ID" msgstr "Id do grupo" #: models.py:134 msgid "Celery ID for the Chord header group" msgstr "Id do celery para o grupo de cabeçalho Chord" #: models.py:138 msgid "" "JSON serialized list of task result tuples. use .group_result() to decode" msgstr "lista de tuplas de resultados de tarefas serializadas como JSON. Use .group_result() para decodificar" #: models.py:144 msgid "Starts at len(chord header) and decrements after each task is finished" msgstr "Começa em len(chord header) e decaí após o término de cada tarefa" #: models.py:176 msgid "Celery ID for the Group that was run" msgstr "Id do celery para o grupo que foi executado" #: models.py:181 msgid "Datetime field when the group result was created in UTC" msgstr "Data/Horário em que o resultado do grupo foi criado (em UTC)" #: models.py:186 msgid "Datetime field when the group was completed in UTC" msgstr "Data/Horário em que o grupo foi concluída (em UTC)" #: models.py:221 msgid "group result" msgstr "resultado do grupo" #: models.py:222 msgid "group results" msgstr "resultados dos grupos" django-celery-results-2.5.1/django_celery_results/locale/ru/000077500000000000000000000000001442620237100242155ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/ru/LC_MESSAGES/000077500000000000000000000000001442620237100260025ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/ru/LC_MESSAGES/django.po000066400000000000000000000150721442620237100276110ustar00rootroot00000000000000# Russian translation strings for django-celery-results. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the PACKAGE package. # ILDAR MINNAKHMETOV , 2021. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: \n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2021-11-09 19:16+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: ILDAR MINNAKHMETOV \n" "Language-Team: LANGUAGE \n" "Language: ru\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" #: django_celery_results/admin.py:38 msgid "Parameters" msgstr "Параметры" #: django_celery_results/admin.py:45 msgid "Result" msgstr "Результаты" #: django_celery_results/apps.py:14 msgid "Celery Results" msgstr "Результаты Celery" #: django_celery_results/models.py:28 msgid "Task ID" msgstr "ID Задачи" #: django_celery_results/models.py:29 msgid "Celery ID for the Task that was run" msgstr "Celery ID задачи" #: django_celery_results/models.py:32 msgid "Task Name" msgstr "Название задачи" #: django_celery_results/models.py:33 msgid "Name of the Task which was run" msgstr "Название задачи которая была запущена" #: django_celery_results/models.py:36 msgid "Task Positional Arguments" msgstr "Аргументы задачи" #: django_celery_results/models.py:37 msgid "JSON representation of the positional arguments used with the task" msgstr "JSON с позиционными аргументами задачи (*args)" #: django_celery_results/models.py:41 msgid "Task Named Arguments" msgstr "Именнованные аргументы задачи" #: django_celery_results/models.py:42 msgid "JSON representation of the named arguments used with the task" msgstr "JSON с именованными аргументами задачи (**kwargs)" #: django_celery_results/models.py:47 msgid "Task State" msgstr "Статус задачи" #: django_celery_results/models.py:48 msgid "Current state of the task being run" msgstr "Текущеий статус запущенной задачи" #: django_celery_results/models.py:51 msgid "Worker" msgstr "Воркер" #: django_celery_results/models.py:51 msgid "Worker that executes the task" msgstr "Воркер который выполняет задачу" #: django_celery_results/models.py:55 django_celery_results/models.py:186 msgid "Result Content Type" msgstr "Тип контента результата" #: django_celery_results/models.py:56 django_celery_results/models.py:187 msgid "Content type of the result data" msgstr "Тип контента данных результата" #: django_celery_results/models.py:59 django_celery_results/models.py:191 msgid "Result Encoding" msgstr "Кодировка результата" #: django_celery_results/models.py:60 django_celery_results/models.py:192 msgid "The encoding used to save the task result data" msgstr "Кодировка использованная для сохранения данных результата" #: django_celery_results/models.py:63 django_celery_results/models.py:196 msgid "Result Data" msgstr "Данные результата" #: django_celery_results/models.py:64 django_celery_results/models.py:197 msgid "" "The data returned by the task. Use content_encoding and content_type fields " "to read." msgstr "Данные, которые вернула задача. Используйте content_encoding и content_type для чтения." #: django_celery_results/models.py:68 django_celery_results/models.py:176 msgid "Created DateTime" msgstr "Дата и время создания" #: django_celery_results/models.py:69 msgid "Datetime field when the task result was created in UTC" msgstr "Дата и время когда результат был создан (UTC)" #: django_celery_results/models.py:72 django_celery_results/models.py:181 msgid "Completed DateTime" msgstr "Дата и время завершения" #: django_celery_results/models.py:73 msgid "Datetime field when the task was completed in UTC" msgstr "Дата и время когда задача была завершена (UTC)" #: django_celery_results/models.py:76 msgid "Traceback" msgstr "Traceback" #: django_celery_results/models.py:77 msgid "Text of the traceback if the task generated one" msgstr "Текст traceback, если есть" #: django_celery_results/models.py:80 msgid "Task Meta Information" msgstr "Метаинформация задачи" #: django_celery_results/models.py:81 msgid "" "JSON meta information about the task, such as information on child tasks" msgstr "" "JSON мета-информация о задаче, к примеру о дочерних задачах" #: django_celery_results/models.py:91 msgid "task result" msgstr "результат задачи" #: django_celery_results/models.py:92 msgid "task results" msgstr "результаты задач" #: django_celery_results/models.py:129 django_celery_results/models.py:171 msgid "Group ID" msgstr "ID группы" #: django_celery_results/models.py:130 msgid "Celery ID for the Chord header group" msgstr "Celery ID для заголовка группы" #: django_celery_results/models.py:134 msgid "" "JSON serialized list of task result tuples. use .group_result() to decode" msgstr "" "JSON-список кортежей результата. Используйте .group_result() для декодирования" #: django_celery_results/models.py:140 msgid "Starts at len(chord header) and decrements after each task is finished" msgstr "Начинается в len(chord header) и уменьшается после каждого завершенного Здаания" #: django_celery_results/models.py:172 msgid "Celery ID for the Group that was run" msgstr "Celery ID для группы которая была запущена" #: django_celery_results/models.py:177 msgid "Datetime field when the group result was created in UTC" msgstr "Дата и время если результат группы был создан (UTC)" #: django_celery_results/models.py:182 msgid "Datetime field when the group was completed in UTC" msgstr "Дата и время, когда группа была завершена (UTC)" #: django_celery_results/models.py:217 msgid "group result" msgstr "результат группы" #: django_celery_results/models.py:218 msgid "group results" msgstr "результаты групп" django-celery-results-2.5.1/django_celery_results/locale/zh_Hans/000077500000000000000000000000001442620237100251615ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/zh_Hans/LC_MESSAGES/000077500000000000000000000000001442620237100267465ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/locale/zh_Hans/LC_MESSAGES/django.po000066400000000000000000000122741442620237100305560ustar00rootroot00000000000000# Simplified Chinese translation strings for django-celery-results. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as django-celery-results. # , 2021. # #, fuzzy msgid "" msgstr "" "Project-Id-Version:\n" "Report-Msgid-Bugs-To:\n" "POT-Creation-Date: 2021-11-20 22:00+0800\n" "PO-Revision-Date: 2021-11-20 23:00+0800\n" "Last-Translator: ifmos \n" "Language-Team: LANGUAGE \n" "Language: zh-hans\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" #: django_celery_results/admin.py:39 msgid "Parameters" msgstr "参数" #: django_celery_results/admin.py:46 msgid "Result" msgstr "结果" #: django_celery_results/apps.py:15 msgid "Celery Results" msgstr "Celery 结果" #: django_celery_results/models.py:28 msgid "Task ID" msgstr "任务 ID" #: django_celery_results/models.py:29 msgid "Celery ID for the Task that was run" msgstr "已运行任务 Celery ID" #: django_celery_results/models.py:32 msgid "Task Name" msgstr "任务名称" #: django_celery_results/models.py:33 msgid "Name of the Task which was run" msgstr "已运行任务名称" #: django_celery_results/models.py:36 msgid "Task Positional Arguments" msgstr "任务位置参数" #: django_celery_results/models.py:37 msgid "JSON representation of the positional arguments used with the task" msgstr "该任务位置参数的 JSON 字符串" #: django_celery_results/models.py:41 msgid "Task Named Arguments" msgstr "任务具名参数" #: django_celery_results/models.py:42 msgid "JSON representation of the named arguments used with the task" msgstr "该任务具名参数的 JSON 字符串" #: django_celery_results/models.py:47 msgid "Task State" msgstr "任务状态" #: django_celery_results/models.py:48 msgid "Current state of the task being run" msgstr "运行中任务的当前状态" #: django_celery_results/models.py:51 msgid "Worker" msgstr "Worker" #: django_celery_results/models.py:51 msgid "Worker that executes the task" msgstr "执行该任务的 Worker" #: django_celery_results/models.py:55 msgid "Result Content Type" msgstr "结果内容类型" #: django_celery_results/models.py:56 msgid "Content type of the result data" msgstr "结果数据的内容类型" #: django_celery_results/models.py:59 msgid "Result Encoding" msgstr "结果编码格式" #: django_celery_results/models.py:60 msgid "The encoding used to save the task result data" msgstr "保存结果数据的编码格式" #: django_celery_results/models.py:63 msgid "Result Data" msgstr "结果数据" #: django_celery_results/models.py:64 msgid "" "The data returned by the task. Use content_encoding and content_type fields" " to read." msgstr "该任务返回数据,根据 content_encoding 和 content_type 字段读取。" #: django_celery_results/models.py:68 msgid "Created DateTime" msgstr "创建时间" #: django_celery_results/models.py:69 msgid "Datetime field when the task result was created in UTC" msgstr "UTC格式的任务创建时间字段" #: django_celery_results/models.py:72 msgid "Completed DateTime" msgstr "完成时间" #: django_celery_results/models.py:73 msgid "Datetime field when the task was completed in UTC" msgstr "UTC格式的任务完成时间字段" #: django_celery_results/models.py:76 msgid "Traceback" msgstr "Traceback" #: django_celery_results/models.py:77 msgid "Text of the traceback if the task generated one" msgstr "任务生成报错时的 traceback 文本" #: django_celery_results/models.py:80 msgid "Task Meta Information" msgstr "任务元信息" #: django_celery_results/models.py:81 msgid "" "JSON meta information about the task, such as information on child tasks" msgstr "关于该任务的 JSON 元信息,如子任务的信息" #: django_celery_results/models.py:91 msgid "task result" msgstr "任务结果" #: django_celery_results/models.py:92 msgid "task results" msgstr "任务结果" #: django_celery_results/models.py:129 django_celery_results/models.py:171 msgid "Group ID" msgstr "分组 ID" #: django_celery_results/models.py:130 msgid "Celery ID for the Chord header group" msgstr "Chord header 分组的 Celery ID" #: django_celery_results/models.py:134 msgid "" "JSON serialized list of task result tuples. use .group_result() to decode" msgstr "" "任务结果元组的 JSON 序列化列表。使用 .group_result() 进行解码" #: django_celery_results/models.py:140 msgid "Starts at len(chord header) and decrements after each task is finished" msgstr "在 len(chord header) 处开始并且会在每个任务结束后递减" #: django_celery_results/models.py:172 msgid "Celery ID for the Group that was run" msgstr "已运行分组的 Celery ID" #: django_celery_results/models.py:177 msgid "Datetime field when the group result was created in UTC" msgstr "分组结果创建时的 UTC 格式 datetime 字段" #: django_celery_results/models.py:182 msgid "Datetime field when the group was completed in UTC" msgstr "分组结果完成时的 UTC 格式 datetime 字段" #: django_celery_results/models.py:217 msgid "group result" msgstr "分组结果" #: django_celery_results/models.py:218 msgid "group results" msgstr "分组结果" django-celery-results-2.5.1/django_celery_results/managers.py000066400000000000000000000171661442620237100245120ustar00rootroot00000000000000"""Model managers.""" import warnings from functools import wraps from itertools import count from celery.utils.time import maybe_timedelta from django.conf import settings from django.db import connections, models, router, transaction from .utils import now W_ISOLATION_REP = """ Polling results with transaction isolation level 'repeatable-read' within the same transaction may give outdated results. Be sure to commit the transaction for each poll iteration. """ class TxIsolationWarning(UserWarning): """Warning emitted if the transaction isolation level is suboptimal.""" def transaction_retry(max_retries=1): """Decorate a function to retry database operations. For functions doing database operations, adding retrying if the operation fails. Keyword Arguments: max_retries (int): Maximum number of retries. Default one retry. """ def _outer(fun): @wraps(fun) def _inner(*args, **kwargs): _max_retries = kwargs.pop('exception_retry_count', max_retries) for retries in count(0): try: return fun(*args, **kwargs) except Exception: # pragma: no cover # Depending on the database backend used we can experience # various exceptions. E.g. psycopg2 raises an exception # if some operation breaks the transaction, so saving # the task result won't be possible until we rollback # the transaction. if retries >= _max_retries: raise return _inner return _outer class ResultManager(models.Manager): """Generic manager for celery results.""" def warn_if_repeatable_read(self): if 'mysql' in self.current_engine().lower(): cursor = self.connection_for_read().cursor() # MariaDB and MySQL since 8.0 have different transaction isolation # variables: the former has tx_isolation, while the latter has # transaction_isolation if cursor.execute("SHOW VARIABLES WHERE variable_name IN " "('tx_isolation', 'transaction_isolation');"): isolation = cursor.fetchone()[1] if isolation == 'REPEATABLE-READ': warnings.warn(TxIsolationWarning(W_ISOLATION_REP.strip())) def connection_for_write(self): return connections[router.db_for_write(self.model)] def connection_for_read(self): return connections[self.db] def current_engine(self): try: return settings.DATABASES[self.db]['ENGINE'] except AttributeError: return settings.DATABASE_ENGINE def get_all_expired(self, expires): """Get all expired results.""" return self.filter(date_done__lt=now() - maybe_timedelta(expires)) def delete_expired(self, expires): """Delete all expired results.""" with transaction.atomic(using=self.db): self.get_all_expired(expires).delete() class TaskResultManager(ResultManager): """Manager for :class:`~.models.TaskResult` models.""" _last_id = None def get_task(self, task_id): """Get result for task by ``task_id``. Keyword Arguments: exception_retry_count (int): How many times to retry by transaction rollback on exception. This could happen in a race condition if another worker is trying to create the same task. The default is to retry once. """ try: return self.get(task_id=task_id) except self.model.DoesNotExist: if self._last_id == task_id: self.warn_if_repeatable_read() self._last_id = task_id return self.model(task_id=task_id) @transaction_retry(max_retries=2) def store_result(self, content_type, content_encoding, task_id, result, status, traceback=None, meta=None, periodic_task_name=None, task_name=None, task_args=None, task_kwargs=None, worker=None, using=None): """Store the result and status of a task. Arguments: content_type (str): Mime-type of result and meta content. content_encoding (str): Type of encoding (e.g. binary/utf-8). task_id (str): Id of task. periodic_task_name (str): Celery Periodic task name. task_name (str): Celery task name. task_args (str): Task arguments. task_kwargs (str): Task kwargs. result (str): The serialized return value of the task, or an exception instance raised by the task. status (str): Task status. See :mod:`celery.states` for a list of possible status values. worker (str): Worker that executes the task. using (str): Django database connection to use. traceback (str): The traceback string taken at the point of exception (only passed if the task failed). meta (str): Serialized result meta data (this contains e.g. children). Keyword Arguments: exception_retry_count (int): How many times to retry by transaction rollback on exception. This could happen in a race condition if another worker is trying to create the same task. The default is to retry twice. """ fields = { 'status': status, 'result': result, 'traceback': traceback, 'meta': meta, 'content_encoding': content_encoding, 'content_type': content_type, 'periodic_task_name': periodic_task_name, 'task_name': task_name, 'task_args': task_args, 'task_kwargs': task_kwargs, 'worker': worker } obj, created = self.using(using).get_or_create(task_id=task_id, defaults=fields) if not created: for k, v in fields.items(): setattr(obj, k, v) obj.save(using=using) return obj class GroupResultManager(ResultManager): """Manager for :class:`~.models.GroupResult` models.""" _last_id = None def get_group(self, group_id): """Get result for group by ``group_id``. Keyword Arguments: exception_retry_count (int): How many times to retry by transaction rollback on exception. This could happen in a race condition if another worker is trying to create the same task. The default is to retry once. """ try: return self.get(group_id=group_id) except self.model.DoesNotExist: if self._last_id == group_id: self.warn_if_repeatable_read() self._last_id = group_id return self.model(group_id=group_id) @transaction_retry(max_retries=2) def store_group_result(self, content_type, content_encoding, group_id, result, using=None): fields = { 'result': result, 'content_encoding': content_encoding, 'content_type': content_type, } if not using: using = self.db obj, created = self.using(using).get_or_create(group_id=group_id, defaults=fields) if not created: for k, v in fields.items(): setattr(obj, k, v) obj.save(using=self.db) return obj django-celery-results-2.5.1/django_celery_results/migrations/000077500000000000000000000000001442620237100245045ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/migrations/0001_initial.py000066400000000000000000000050311442620237100271460ustar00rootroot00000000000000from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TaskResult', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('task_id', models.CharField( max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), unique=True, verbose_name='task id' )), ('status', models.CharField(choices=[('FAILURE', 'FAILURE'), ('PENDING', 'PENDING'), ('RECEIVED', 'RECEIVED'), ('RETRY', 'RETRY'), ('REVOKED', 'REVOKED'), ('STARTED', 'STARTED'), ('SUCCESS', 'SUCCESS')], default='PENDING', max_length=50, verbose_name='state')), ('content_type', models.CharField( max_length=128, verbose_name='content type')), ('content_encoding', models.CharField( max_length=64, verbose_name='content encoding')), ('result', models.TextField(default=None, editable=False, null=True)), ('date_done', models.DateTimeField( auto_now=True, verbose_name='done at')), ('traceback', models.TextField( blank=True, null=True, verbose_name='traceback')), ('hidden', models.BooleanField( db_index=True, default=False, editable=False)), ('meta', models.TextField(default=None, editable=False, null=True)), ], options={ 'verbose_name': 'task result', 'verbose_name_plural': 'task results', }, ), ] django-celery-results-2.5.1/django_celery_results/migrations/0002_add_task_name_args_kwargs.py000066400000000000000000000015501442620237100326640ustar00rootroot00000000000000# Generated by Django 1.9.1 on 2017-10-26 16:06 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0001_initial'), ] operations = [ migrations.AddField( model_name='taskresult', name='task_args', field=models.TextField(null=True, verbose_name='task arguments'), ), migrations.AddField( model_name='taskresult', name='task_kwargs', field=models.TextField(null=True, verbose_name='task kwargs'), ), migrations.AddField( model_name='taskresult', name='task_name', field=models.CharField(max_length=255, null=True, verbose_name='task name' ), ), ] django-celery-results-2.5.1/django_celery_results/migrations/0003_auto_20181106_1101.py000066400000000000000000000007721442620237100301220ustar00rootroot00000000000000# Generated by Django 2.1 on 2018-11-06 11:01 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0002_add_task_name_args_kwargs'), ] operations = [ migrations.AlterModelOptions( name='taskresult', options={ 'ordering': ['-date_done'], 'verbose_name': 'task result', 'verbose_name_plural': 'task results' }, ), ] django-celery-results-2.5.1/django_celery_results/migrations/0004_auto_20190516_0412.py000066400000000000000000000102361442620237100301300ustar00rootroot00000000000000# Generated by Django 1.11.20 on 2019-05-16 04:12 # this file is auto-generated so don't do flake8 on it # flake8: noqa from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0003_auto_20181106_1101'), ] operations = [ migrations.AlterField( model_name='taskresult', name='content_encoding', field=models.CharField(help_text='The encoding used to save the task result data', max_length=64, verbose_name='Result Encoding'), ), migrations.AlterField( model_name='taskresult', name='content_type', field=models.CharField(help_text='Content type of the result data', max_length=128, verbose_name='Result Content Type'), ), migrations.AlterField( model_name='taskresult', name='date_done', field=models.DateTimeField(auto_now=True, db_index=True, help_text='Datetime field when the task was completed in UTC', verbose_name='Completed DateTime'), ), migrations.AlterField( model_name='taskresult', name='hidden', field=models.BooleanField(db_index=True, default=False, editable=False, help_text='Soft Delete flag that can be used instead of full delete', verbose_name='Hidden'), ), migrations.AlterField( model_name='taskresult', name='meta', field=models.TextField(default=None, editable=False, help_text='JSON meta information about the task, such as information on child tasks', null=True, verbose_name='Task Meta Information'), ), migrations.AlterField( model_name='taskresult', name='result', field=models.TextField(default=None, editable=False, help_text='The data returned by the task. Use content_encoding and content_type fields to read.', null=True, verbose_name='Result Data'), ), migrations.AlterField( model_name='taskresult', name='status', field=models.CharField(choices=[('FAILURE', 'FAILURE'), ('PENDING', 'PENDING'), ('RECEIVED', 'RECEIVED'), ('RETRY', 'RETRY'), ('REVOKED', 'REVOKED'), ('STARTED', 'STARTED'), ('SUCCESS', 'SUCCESS')], db_index=True, default='PENDING', help_text='Current state of the task being run', max_length=50, verbose_name='Task State'), ), migrations.AlterField( model_name='taskresult', name='task_args', field=models.TextField(help_text='JSON representation of the positional arguments used with the task', null=True, verbose_name='Task Positional Arguments'), ), migrations.AlterField( model_name='taskresult', name='task_id', field=models.CharField( db_index=True, help_text='Celery ID for the Task that was run', max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), unique=True, verbose_name='Task ID' ), ), migrations.AlterField( model_name='taskresult', name='task_kwargs', field=models.TextField(help_text='JSON representation of the named arguments used with the task', null=True, verbose_name='Task Named Arguments'), ), migrations.AlterField( model_name='taskresult', name='task_name', field=models.CharField( db_index=True, help_text='Name of the Task which was run', max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), null=True, verbose_name='Task Name'), ), migrations.AlterField( model_name='taskresult', name='traceback', field=models.TextField(blank=True, help_text='Text of the traceback if the task generated one', null=True, verbose_name='Traceback'), ), ] django-celery-results-2.5.1/django_celery_results/migrations/0005_taskresult_worker.py000066400000000000000000000012601442620237100313130ustar00rootroot00000000000000# Generated by Django 1.11.22 on 2019-07-24 15:38 # this file is auto-generated so don't do flake8 on it # flake8: noqa from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0004_auto_20190516_0412'), ] operations = [ migrations.AddField( model_name='taskresult', name='worker', field=models.CharField(db_index=True, default=None, help_text='Worker that executes the task', max_length=100, null=True, verbose_name='Worker'), ), ] django-celery-results-2.5.1/django_celery_results/migrations/0006_taskresult_date_created.py000066400000000000000000000025671442620237100324220ustar00rootroot00000000000000# Generated by Django 2.2.4 on 2019-08-21 19:53 # this file is auto-generated so don't do flake8 on it # flake8: noqa import django.utils.timezone from django.db import migrations, models def copy_date_done_to_date_created(apps, schema_editor): TaskResult = apps.get_model('django_celery_results', 'taskresult') db_alias = schema_editor.connection.alias TaskResult.objects.using(db_alias).all().update( date_created=models.F('date_done') ) def reverse_copy_date_done_to_date_created(app, schema_editor): # the reverse of 'copy_date_done_to_date_created' is do nothing # because the 'date_created' will be removed. pass class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0005_taskresult_worker'), ] operations = [ migrations.AddField( model_name='taskresult', name='date_created', field=models.DateTimeField( auto_now_add=True, db_index=True, default=django.utils.timezone.now, help_text='Datetime field when the task result was created in UTC', verbose_name='Created DateTime' ), preserve_default=False, ), migrations.RunPython(copy_date_done_to_date_created, reverse_copy_date_done_to_date_created), ] django-celery-results-2.5.1/django_celery_results/migrations/0007_remove_taskresult_hidden.py000066400000000000000000000006471442620237100326240ustar00rootroot00000000000000# Generated by Django 2.2.6 on 2019-10-27 11:29 # this file is auto-generated so don't do flake8 on it # flake8: noqa from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0006_taskresult_date_created'), ] operations = [ migrations.RemoveField( model_name='taskresult', name='hidden', ), ] django-celery-results-2.5.1/django_celery_results/migrations/0008_chordcounter.py000066400000000000000000000025731442620237100302330ustar00rootroot00000000000000# Generated by Django 3.0.6 on 2020-05-12 12:05 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0007_remove_taskresult_hidden'), ] operations = [ migrations.CreateModel( name='ChordCounter', fields=[ ('id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('group_id', models.CharField( db_index=True, help_text='Celery ID for the Chord header group', max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), unique=True, verbose_name='Group ID')), ('sub_tasks', models.TextField( help_text='JSON serialized list of task result tuples. ' 'use .group_result() to decode')), ('count', models.PositiveIntegerField( help_text='Starts at len(chord header) ' 'and decrements after each task is finished')), ], ), ] django-celery-results-2.5.1/django_celery_results/migrations/0009_groupresult.py000066400000000000000000000175031442620237100301270ustar00rootroot00000000000000# Generated by Django 3.2 on 2021-04-19 14:55 from django.conf import settings from django.db import migrations, models class FakeAddIndex(migrations.AddIndex): """Fake AddIndex to correct for duplicate index added in the original 0009 migration """ def database_forwards(self, *args, **kwargs): """Don't do anything""" def database_backwards(self, *args, **kwargs): """Also don't do anything on reverting this migration The duplicate index will be cleaned up when migrating from the original 0009 to the cleanup 0010 """ class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0008_chordcounter'), ] operations = [ migrations.CreateModel( name='GroupResult', fields=[ ('id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('group_id', models.CharField( help_text='Celery ID for the Group that was run', max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), unique=True, verbose_name='Group ID')), ('date_created', models.DateTimeField( auto_now_add=True, help_text='Datetime field when the group result ' 'was created in UTC', verbose_name='Created DateTime')), ('date_done', models.DateTimeField( auto_now=True, help_text='Datetime field when the group was ' 'completed in UTC', verbose_name='Completed DateTime')), ('content_type', models.CharField( help_text='Content type of the result data', max_length=128, verbose_name='Result Content Type')), ('content_encoding', models.CharField( help_text='The encoding used to save the task ' 'result data', max_length=64, verbose_name='Result Encoding')), ('result', models.TextField( default=None, editable=False, help_text='The data returned by the task. Use ' 'content_encoding and content_type ' 'fields to read.', null=True, verbose_name='Result Data')), ], options={ 'verbose_name': 'group result', 'verbose_name_plural': 'group results', 'ordering': ['-date_done'], }, ), migrations.AlterField( model_name='chordcounter', name='group_id', field=models.CharField( help_text='Celery ID for the Chord header group', max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), unique=True, verbose_name='Group ID'), ), migrations.AlterField( model_name='taskresult', name='date_created', field=models.DateTimeField( auto_now_add=True, help_text='Datetime field when the task result ' 'was created in UTC', verbose_name='Created DateTime'), ), migrations.AlterField( model_name='taskresult', name='date_done', field=models.DateTimeField( auto_now=True, help_text='Datetime field when the task was completed in UTC', verbose_name='Completed DateTime'), ), migrations.AlterField( model_name='taskresult', name='status', field=models.CharField( choices=[ ('FAILURE', 'FAILURE'), ('PENDING', 'PENDING'), ('RECEIVED', 'RECEIVED'), ('RETRY', 'RETRY'), ('REVOKED', 'REVOKED'), ('STARTED', 'STARTED'), ('SUCCESS', 'SUCCESS')], default='PENDING', help_text='Current state of the task being run', max_length=50, verbose_name='Task State'), ), migrations.AlterField( model_name='taskresult', name='task_id', field=models.CharField( help_text='Celery ID for the Task that was run', max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), unique=True, verbose_name='Task ID'), ), migrations.AlterField( model_name='taskresult', name='task_name', field=models.CharField( help_text='Name of the Task which was run', max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), null=True, verbose_name='Task Name'), ), migrations.AlterField( model_name='taskresult', name='worker', field=models.CharField( default=None, help_text='Worker that executes the task', max_length=100, null=True, verbose_name='Worker'), ), FakeAddIndex( model_name='chordcounter', index=models.Index( fields=['group_id'], name='django_cele_group_i_299b0d_idx'), ), FakeAddIndex( model_name='taskresult', index=models.Index( fields=['task_id'], name='django_cele_task_id_7f8fca_idx'), ), migrations.AddIndex( model_name='taskresult', index=models.Index( fields=['task_name'], name='django_cele_task_na_08aec9_idx'), ), migrations.AddIndex( model_name='taskresult', index=models.Index( fields=['status'], name='django_cele_status_9b6201_idx'), ), migrations.AddIndex( model_name='taskresult', index=models.Index( fields=['worker'], name='django_cele_worker_d54dd8_idx'), ), migrations.AddIndex( model_name='taskresult', index=models.Index( fields=['date_created'], name='django_cele_date_cr_f04a50_idx'), ), migrations.AddIndex( model_name='taskresult', index=models.Index( fields=['date_done'], name='django_cele_date_do_f59aad_idx'), ), FakeAddIndex( model_name='groupresult', index=models.Index( fields=['group_id'], name='django_cele_group_i_3cddec_idx'), ), migrations.AddIndex( model_name='groupresult', index=models.Index( fields=['date_created'], name='django_cele_date_cr_bd6c1d_idx'), ), migrations.AddIndex( model_name='groupresult', index=models.Index( fields=['date_done'], name='django_cele_date_do_caae0e_idx'), ), ] django-celery-results-2.5.1/django_celery_results/migrations/0010_remove_duplicate_indices.py000066400000000000000000000025421442620237100325460ustar00rootroot00000000000000""" Migration to amend the 0009 migration released on django_celery_results 2.1.0 That migration introduced duplicate indexes breaking Oracle support. This migration will remove those indexes (on non-Oracle db's) while in-place changing migration 0009 to not add the duplicates for new installs """ from django.db import DatabaseError, migrations class TryRemoveIndex(migrations.RemoveIndex): """Operation to remove the Index without reintroducing it on reverting the migration """ def database_forwards(self, *args, **kwargs): """Remove the index on the database if it exists""" try: super().database_forwards(*args, **kwargs) except DatabaseError: pass def database_backwards(self, *args, **kwargs): """Don't re-add the index when reverting this migration""" pass class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0009_groupresult'), ] operations = [ TryRemoveIndex( model_name='chordcounter', name='django_cele_group_i_299b0d_idx', ), TryRemoveIndex( model_name='groupresult', name='django_cele_group_i_3cddec_idx', ), TryRemoveIndex( model_name='taskresult', name='django_cele_task_id_7f8fca_idx', ), ] django-celery-results-2.5.1/django_celery_results/migrations/0011_taskresult_periodic_task_name.py000066400000000000000000000011111442620237100336120ustar00rootroot00000000000000# Generated by Django 3.2.8 on 2021-11-10 08:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('django_celery_results', '0010_remove_duplicate_indices'), ] operations = [ migrations.AddField( model_name='taskresult', name='periodic_task_name', field=models.CharField( help_text='Name of the Periodic Task which was run', max_length=255, null=True, verbose_name='Periodic Task Name'), ), ] django-celery-results-2.5.1/django_celery_results/migrations/__init__.py000066400000000000000000000000001442620237100266030ustar00rootroot00000000000000django-celery-results-2.5.1/django_celery_results/models.py000066400000000000000000000176441442620237100242010ustar00rootroot00000000000000"""Database models.""" import json from celery import states from celery.result import GroupResult as CeleryGroupResult from celery.result import result_from_tuple from django.conf import settings from django.db import models from django.utils.translation import gettext_lazy as _ from . import managers ALL_STATES = sorted(states.ALL_STATES) TASK_STATE_CHOICES = sorted(zip(ALL_STATES, ALL_STATES)) class TaskResult(models.Model): """Task result/status.""" task_id = models.CharField( max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), unique=True, verbose_name=_('Task ID'), help_text=_('Celery ID for the Task that was run')) periodic_task_name = models.CharField( null=True, max_length=255, verbose_name=_('Periodic Task Name'), help_text=_('Name of the Periodic Task which was run')) task_name = models.CharField( null=True, max_length=getattr( settings, 'DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH', 255 ), verbose_name=_('Task Name'), help_text=_('Name of the Task which was run')) task_args = models.TextField( null=True, verbose_name=_('Task Positional Arguments'), help_text=_('JSON representation of the positional arguments ' 'used with the task')) task_kwargs = models.TextField( null=True, verbose_name=_('Task Named Arguments'), help_text=_('JSON representation of the named arguments ' 'used with the task')) status = models.CharField( max_length=50, default=states.PENDING, choices=TASK_STATE_CHOICES, verbose_name=_('Task State'), help_text=_('Current state of the task being run')) worker = models.CharField( max_length=100, default=None, null=True, verbose_name=_('Worker'), help_text=_('Worker that executes the task') ) content_type = models.CharField( max_length=128, verbose_name=_('Result Content Type'), help_text=_('Content type of the result data')) content_encoding = models.CharField( max_length=64, verbose_name=_('Result Encoding'), help_text=_('The encoding used to save the task result data')) result = models.TextField( null=True, default=None, editable=False, verbose_name=_('Result Data'), help_text=_('The data returned by the task. ' 'Use content_encoding and content_type fields to read.')) date_created = models.DateTimeField( auto_now_add=True, verbose_name=_('Created DateTime'), help_text=_('Datetime field when the task result was created in UTC')) date_done = models.DateTimeField( auto_now=True, verbose_name=_('Completed DateTime'), help_text=_('Datetime field when the task was completed in UTC')) traceback = models.TextField( blank=True, null=True, verbose_name=_('Traceback'), help_text=_('Text of the traceback if the task generated one')) meta = models.TextField( null=True, default=None, editable=False, verbose_name=_('Task Meta Information'), help_text=_('JSON meta information about the task, ' 'such as information on child tasks')) objects = managers.TaskResultManager() class Meta: """Table information.""" ordering = ['-date_done'] verbose_name = _('task result') verbose_name_plural = _('task results') # Explicit names to solve https://code.djangoproject.com/ticket/33483 indexes = [ models.Index(fields=['task_name'], name='django_cele_task_na_08aec9_idx'), models.Index(fields=['status'], name='django_cele_status_9b6201_idx'), models.Index(fields=['worker'], name='django_cele_worker_d54dd8_idx'), models.Index(fields=['date_created'], name='django_cele_date_cr_f04a50_idx'), models.Index(fields=['date_done'], name='django_cele_date_do_f59aad_idx'), ] def as_dict(self): return { 'task_id': self.task_id, 'task_name': self.task_name, 'task_args': self.task_args, 'task_kwargs': self.task_kwargs, 'status': self.status, 'result': self.result, 'date_done': self.date_done, 'traceback': self.traceback, 'meta': self.meta, 'worker': self.worker } def __str__(self): return ''.format(self) class ChordCounter(models.Model): """Chord synchronisation.""" group_id = models.CharField( max_length=getattr( settings, "DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH", 255), unique=True, verbose_name=_("Group ID"), help_text=_("Celery ID for the Chord header group"), ) sub_tasks = models.TextField( help_text=_( "JSON serialized list of task result tuples. " "use .group_result() to decode" ) ) count = models.PositiveIntegerField( help_text=_( "Starts at len(chord header) and decrements after each task is " "finished" ) ) def group_result(self, app=None): """Return the :class:`celery.result.GroupResult` of self. Arguments: app (celery.app.base.Celery): app instance to create the :class:`celery.result.GroupResult` with. """ return CeleryGroupResult( self.group_id, [result_from_tuple(r, app=app) for r in json.loads(self.sub_tasks)], app=app ) class GroupResult(models.Model): """Task Group result/status.""" group_id = models.CharField( max_length=getattr( settings, "DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH", 255 ), unique=True, verbose_name=_("Group ID"), help_text=_("Celery ID for the Group that was run"), ) date_created = models.DateTimeField( auto_now_add=True, verbose_name=_("Created DateTime"), help_text=_("Datetime field when the group result was created in UTC"), ) date_done = models.DateTimeField( auto_now=True, verbose_name=_("Completed DateTime"), help_text=_("Datetime field when the group was completed in UTC"), ) content_type = models.CharField( max_length=128, verbose_name=_("Result Content Type"), help_text=_("Content type of the result data"), ) content_encoding = models.CharField( max_length=64, verbose_name=_("Result Encoding"), help_text=_("The encoding used to save the task result data"), ) result = models.TextField( null=True, default=None, editable=False, verbose_name=_('Result Data'), help_text=_('The data returned by the task. ' 'Use content_encoding and content_type fields to read.')) def as_dict(self): return { 'group_id': self.group_id, 'result': self.result, 'date_done': self.date_done, } def __str__(self): return f'' objects = managers.GroupResultManager() class Meta: """Table information.""" ordering = ['-date_done'] verbose_name = _('group result') verbose_name_plural = _('group results') # Explicit names to solve https://code.djangoproject.com/ticket/33483 indexes = [ models.Index(fields=['date_created'], name='django_cele_date_cr_bd6c1d_idx'), models.Index(fields=['date_done'], name='django_cele_date_do_caae0e_idx'), ] django-celery-results-2.5.1/django_celery_results/urls.py000066400000000000000000000041351442620237100236720ustar00rootroot00000000000000"""URLs defined for celery. * ``/$task_id/done/`` URL to :func:`~celery.views.is_successful`. * ``/$task_id/status/`` URL to :func:`~celery.views.task_status`. """ import warnings from django.conf import settings from django.urls import path, register_converter from . import views class TaskPatternConverter: """Custom path converter for task & group id's. They are slightly different from the built `uuid` """ regex = r'[\w\d\-\.]+' def to_python(self, value): """Convert url to python value.""" return str(value) def to_url(self, value): """Convert python value into url, just a string.""" return value register_converter(TaskPatternConverter, 'task_pattern') urlpatterns = [ path( 'task/done//', views.is_task_successful, name='celery-is_task_successful' ), path( 'task/status//', views.task_status, name='celery-task_status' ), path( 'group/done//', views.is_group_successful, name='celery-is_group_successful' ), path( 'group/status//', views.group_status, name='celery-group_status' ), ] if getattr(settings, 'DJANGO_CELERY_RESULTS_ID_FIRST_URLS', True): warnings.warn( "ID first urls depricated, use noun first urls instead." "Will be removed in 2022.", DeprecationWarning ) urlpatterns += [ path( '/done/', views.is_task_successful, name='celery-is_task_successful' ), path( '/status/', views.task_status, name='celery-task_status' ), path( '/group/done/', views.is_group_successful, name='celery-is_group_successful' ), path( '/group/status/', views.group_status, name='celery-group_status' ), ] django-celery-results-2.5.1/django_celery_results/utils.py000066400000000000000000000007351442620237100240470ustar00rootroot00000000000000"""Utilities.""" # -- XXX This module must not use translation as that causes # -- a recursive loader import! from django.conf import settings from django.utils import timezone # see Issue celery/django-celery#222 now_localtime = getattr(timezone, 'template_localtime', timezone.localtime) def now(): """Return the current date and time.""" if getattr(settings, 'USE_TZ', False): return now_localtime(timezone.now()) else: return timezone.now() django-celery-results-2.5.1/django_celery_results/views.py000066400000000000000000000033751442620237100240470ustar00rootroot00000000000000"""Views.""" from celery import states from celery.result import AsyncResult, GroupResult from celery.utils import get_full_cls_name from django.http import JsonResponse from kombu.utils.encoding import safe_repr def is_task_successful(request, task_id): """Return task execution status in JSON format.""" return JsonResponse({'task': { 'id': task_id, 'executed': AsyncResult(task_id).successful(), }}) def task_status(request, task_id): """Return task status and result in JSON format.""" result = AsyncResult(task_id) state, retval = result.state, result.result response_data = {'id': task_id, 'status': state, 'result': retval} if state in states.EXCEPTION_STATES: traceback = result.traceback response_data.update({'result': safe_repr(retval), 'exc': get_full_cls_name(retval.__class__), 'traceback': traceback}) return JsonResponse({'task': response_data}) def is_group_successful(request, group_id): """Return if group was successfull as boolean.""" results = GroupResult.restore(group_id) return JsonResponse({ 'group': { 'id': group_id, 'results': [ {'id': task.id, 'executed': task.successful()} for task in results ] if results else [] } }) def group_status(request, group_id): """Return group id and its async results status & result in JSON format.""" result = GroupResult.restore(group_id) retval = [ {"result": async_result.result, "status": async_result.status} for async_result in result.results ] response_data = {'id': group_id, 'results': retval} return JsonResponse({'group': response_data}) django-celery-results-2.5.1/docs/000077500000000000000000000000001442620237100166725ustar00rootroot00000000000000django-celery-results-2.5.1/docs/Makefile000066400000000000000000000203321442620237100203320ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from https://www.sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " epub3 to make an epub3" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" @echo " apicheck to verify that all modules are present in autodoc" @echo " configcheck to verify that all modules are present in autodoc" @echo " spelling to run a spell checker on the documentation" .PHONY: clean clean: rm -rf $(BUILDDIR)/* .PHONY: html html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." .PHONY: dirhtml dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." .PHONY: singlehtml singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." .PHONY: pickle pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." .PHONY: json json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." .PHONY: htmlhelp htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." .PHONY: qthelp qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PROJ.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PROJ.qhc" .PHONY: applehelp applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." .PHONY: devhelp devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/PROJ" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PROJ" @echo "# devhelp" .PHONY: epub epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." .PHONY: epub3 epub3: $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 @echo @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." .PHONY: latex latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." .PHONY: latexpdf latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: latexpdfja latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: text text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." .PHONY: man man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." .PHONY: texinfo texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." .PHONY: info info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." .PHONY: gettext gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/django_celery_results/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/django_celery_results/locale." .PHONY: changes changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." .PHONY: linkcheck linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." .PHONY: doctest doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." .PHONY: coverage coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." .PHONY: apicheck apicheck: $(SPHINXBUILD) -b apicheck $(ALLSPHINXOPTS) $(BUILDDIR)/apicheck .PHONY: configcheck configcheck: $(SPHINXBUILD) -b configcheck $(ALLSPHINXOPTS) $(BUILDDIR)/configcheck .PHONY: spelling spelling: SPELLCHECK=1 $(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling .PHONY: xml xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." .PHONY: pseudoxml pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." django-celery-results-2.5.1/docs/_static/000077500000000000000000000000001442620237100203205ustar00rootroot00000000000000django-celery-results-2.5.1/docs/_static/.keep000066400000000000000000000000001442620237100212330ustar00rootroot00000000000000django-celery-results-2.5.1/docs/_templates/000077500000000000000000000000001442620237100210275ustar00rootroot00000000000000django-celery-results-2.5.1/docs/_templates/.keep000066400000000000000000000000001442620237100217420ustar00rootroot00000000000000django-celery-results-2.5.1/docs/changelog.rst000066400000000000000000000000321442620237100213460ustar00rootroot00000000000000.. include:: ../Changelog django-celery-results-2.5.1/docs/conf.py000066400000000000000000000015451442620237100201760ustar00rootroot00000000000000import os from sphinx_celery import conf globals().update(conf.build_config( 'django_celery_results', __file__, project='django_celery_results', # version_dev='2.0', # version_stable='1.4', canonical_url='https://django-celery-results.readthedocs.io', webdomain='', github_project='celery/django-celery-results', copyright='2009-2022', django_settings='proj.settings', include_intersphinx={'python', 'sphinx', 'django', 'celery'}, path_additions=[os.path.join(os.pardir, 't')], extra_extensions=['sphinx.ext.napoleon'], html_logo='images/logo.png', html_favicon='images/favicon.ico', html_prepend_sidebars=[], apicheck_ignore_modules=[ 'django_celery_results', 'django_celery_results.apps', 'django_celery_results.admin', r'django_celery_results.migrations.*', ], )) django-celery-results-2.5.1/docs/copyright.rst000066400000000000000000000017071442620237100214410ustar00rootroot00000000000000Copyright ========= *django-celery-results User Manual* by Ask Solem .. |copy| unicode:: U+000A9 .. COPYRIGHT SIGN Copyright |copy| 2016, Ask Solem All rights reserved. This material may be copied or distributed only subject to the terms and conditions set forth in the `Creative Commons Attribution-ShareAlike 4.0 International `_ license. You may share and adapt the material, even for commercial purposes, but you must give the original author credit. If you alter, transform, or build upon this work, you may distribute the resulting work only under the same license or a license compatible to this one. .. note:: While the django-celery-results *documentation* is offered under the Creative Commons *Attribution-ShareAlike 4.0 International* license the django-celery-results *software* is offered under the `BSD License (3 Clause) `_ django-celery-results-2.5.1/docs/getting_started.rst000066400000000000000000000035161442620237100226200ustar00rootroot00000000000000Getting started =============== To use :pypi:`django-celery-results` with your project you need to follow these steps: #. Install the :pypi:`django-celery-results` library: .. code-block:: console $ pip install django-celery-results #. Add ``django_celery_results`` to ``INSTALLED_APPS`` in your Django project's :file:`settings.py`:: INSTALLED_APPS = ( ..., 'django_celery_results', ) Note that there is no dash in the module name, only underscores. #. Create the Celery database tables by performing a database migrations: .. code-block:: console $ python manage.py migrate django_celery_results #. Configure Celery to use the :pypi:`django-celery-results` backend. Assuming you are using Django's :file:`settings.py` to also configure Celery, add the following settings: .. code-block:: python CELERY_RESULT_BACKEND = 'django-db' For the cache backend you can use: .. code-block:: python CELERY_CACHE_BACKEND = 'django-cache' We can also use the cache defined in the CACHES setting in django. .. code-block:: python # celery setting. CELERY_CACHE_BACKEND = 'default' # django setting. CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'my_cache_table', } } If you want to include extended information about your tasks remember to enable the :setting:`result_extended` setting. .. code-block:: python CELERY_RESULT_EXTENDED = True If you want to track the execution duration of your tasks (by comparing `date_created` and `date_done` in TaskResult), enable the :setting:`track_started` setting. .. code-block:: python CELERY_TASK_TRACK_STARTED = True django-celery-results-2.5.1/docs/glossary.rst000066400000000000000000000001431442620237100212650ustar00rootroot00000000000000.. _glossary: Glossary ======== .. glossary:: :sorted: term Description of term django-celery-results-2.5.1/docs/images/000077500000000000000000000000001442620237100201375ustar00rootroot00000000000000django-celery-results-2.5.1/docs/images/favicon.ico000066400000000000000000000064441442620237100222700ustar00rootroot00000000000000PNG  IHDR szziCCPICC Profile8TOA-b""6l1)4 FA$z$l 7`z1d!ԍ:55wO:zi c l,2qd#>("ⶬ^a;5'F" iZ+*Zqޤ 'QX\qQL8q-jʃ_ym1) ɝv KDHjrG ` *u{븕޲@˪g3\g0laS,+@5?eϚ漯ն~4<ع_}R 1 L'YfIDATX WKl\WcNm&TIPJ RPB,ڪ v,R%V,*+VTBxg- UNb9~df)@dʮb]S,$C 6?I>(IJ>2wa,c0hO!$1U,vCƪ)@@݅GtX">Ӑ`uXoXM?Gp,yp`M4 ԂzZu.7c&XI@g$ɈsKj.Jg mx֥<(>Jf[,T@5-jbSob\zI`ɝY̽"UR1!+^ZfE쬼$WmQߋBR1iu,ee[8<F]F޸c&k\}&dTDTgzK jb2a'd~Ȓ4d6˘>|YvԇG{,' 'G:!|İXfL ӶYQWu P<4U)K2z"l0[ WSc$FP z!oF^@RA̵h24StXhUlpKF~;A,Ą1,P#Kt? ѧa-[aXQ(&tbha#'x`7?ͮ@YeȍzEIMefv7֮_uc9Qs^ooK3gq7 DT71#`!e|u&T"*fVN?33!9<; Rln 8MtZu^XzD4I/%ڎQXiK.nة{zyr=6٪l8RgFXO:r;;R yhY\8ť.`HemudܘPɯK0#9X<G mie8C@wpKxYdCJw7d 즛X25BjRJN(ub<ơ+7[6yz;ǣU>.lڲC죨aP?#N0YL^֘a4SJw'㙖(#M*qY젢;?ྩ<>!PnL$NzXӳFk}sOχ﷿`|#[O>fc DH5Nuu#YԮ=٨^_Azulņ]}ٯך;n%W(>? x W ?rHIENDB`django-celery-results-2.5.1/docs/images/logo.png000066400000000000000000000632121442620237100216110ustar00rootroot00000000000000PNG  IHDR>aiCCPICC ProfilexTkPe:g >hndStCkWZ6!Hm\$~ًo:w> كo{ a"L"4M'S9'^qZ/USO^C+hMJ&G@Ӳylto߫c՚  5"Yi\t։15LsX g8ocግ#f45@ B:K@8i ΁'&.)@ry[:Vͦ#wQ?HBd(B acĪL"JitTy8;(Gx_^[%׎ŷQ麲uan7m QH^eOQu6Su 2%vX ^*l O—ޭˀq,>S%LdB1CZ$M9P 'w\/].r#E|!3>_oa۾d1Zӑz'=~V+cjJtO%mN |-bWO+ o ^ IH.;S]i_s9*p.7U^s.3u |^,<;c=ma>Vt.[՟Ϫ x# ¡_2 pHYs   IDATx nY}sΦ,H#hA aYl(*q$ؘpSq1 %V!$!Ѯhf_w?3~W,&U.v9]y|c/eΕ /Ly#ya|]t>OmQv_%[vgn?Tǟ 4Q8.=y\9 \,._|'2x=- _k^7uONNc_yPcnC/C8y6#w`ĪUT ).B:>>ֶv/^' rnb[\~?,TzȻbpEq7?mwG̞611ٶ Jbᳳڏ;\\w)*m X]2.a 5 by۔W/Y罞\4y0Bt#C?ǂRߑno)L@N]n~W7>_{,Xw^_.]:ڹ~7ͬӫG7ϭj[k0,ǥV4i0D )N)o)&bĢK2jOJP IE2@'{vP'ċ' ߷O [x[Ql1Thz@c=_U Jp#=ip[a[a|00z83b/!UT V^s,,k ֓/ԹПjYx`5)D1CDm7?ꡅJ⑿7Xl|{v?6y+lC,O ݯK Z&U?oxYӟۚ;dN RQ9XP\V2I-AP.zHچm NOjIdDg@K^7h_ *VDg~uۇ*Fe $Me}c3.# $+ޜ{o;^;D]:-4i]_;}%{i0^vRQ2EU~&UѾ;^8dSLy0p]#jlorA{ʷPWIuo$5៏8=ڴGAi.4Bu.OL$1G:iT\xu8sp7Pmjͥ_{uܽ:w|sM1*в^Imè *S~ y 3T0x(ۤQ0q,!wȫ]X4W֓V 1Pv!k ` 4?\#WPy[?. ;hD'iF"? 'omF3 3(#&۩'6oE ]ZsG^ 钥_fy{;nkva]X]) 1z0'Q5WW_ca5^PE3 Y2I_ɂIKUJ裱9*:YpQp2˫vWΔiJ+6:*=؞55z)0dкpHVށvWVt{=7Qjom'Y)P[۷~-}u #y?WxAO G@ڤIf"2 0Qe( 7Z^I\=KcT%[08,Bm{s‰jt Ϯ1Qź7C+"ȹ)l*ƠUNфe/F@ &'~N4}ĭׇډ׏Z "C9'r'sNJBtI6ʋ!\7Q&T):Ӻ& 1 0'oi**$r,1Yp7QI 35eEYFWΠ N^W3> e^5Q!1Ox'/$ jo=R<QSk7*v\s (·HKo v"RKi(@8n(HL87SWޤP*yGHYU:QBnCdmmQQ ʢ0:L~UyPEQfE$AQTYšr:j_WQ`}t:0p3b#"!$4jND.VF! 'I!q/z*`gOGipeou>FOOSON9WP9ަR F)ɳ\l yjz."Ɯ)h`f#PØ.R%o)Nhi^8 r86XApd .M @T1@\1 g1B} Qko4%cn~jMO= XYf[X+Kt^pIa5Gnu>F(&hxh2GaB*nJ0N]akq7CH1kWFAsNr0R)(p*fL(&a Ӷ1Efgε/|?z=}=<Xگv[opk{-=v[a3@FЌcc)c+v)/#VzΛjHdLJ,*?q*TGF F<Ŋ%.dpyy cxd)V!|}֬IM}6m`j 1$(ɨa~3±oﵥ=׷wq:qk1cbZқ&έoOLo uÑo~-寸kKn9Ԏ\̓k|Rh+m}G( *1JI>"rh7 /Q6z8]4[(MW0y(b[P.1S":Cⵋi*7fm1qG (ꐃOdo><|{Ͽ@?]ZᕋE4u1CY`j#й٦ƦQZύŶݦo&~YQOOvkvniwW bC,//6jxLƘ3Zo5,oK$$֜0ւPlx6ø%!`ѝ lsUS𢯑%FD O$P(u . x'8i< ]3*Dr1xWYA[>GN>uګpMclMчs3 b|=MM*8?37Q ܑ2>qҠEi`喚U,_oO;o>R{ -͡x-xfoG(<^:˹1ĥ[͞Ymgu 'P,i(&gkN'K4ekkЋa9vK^~D8۾5W剳…%;Tp% hANRLbsK~+۝"r/K@FbJ BI̕ȖL݅{T N1a$Ub,j8f@(l06mG;,2ܯG.Mea螝i 5,L)_X^o 5m\g,o@f1 ?{<vƐ7F2Qef(+(ivKk ] ϛo`[:{2YTicP|dʔ %Bnj=<d/Qik<:qHeh<Й%I*e%$pG18ȴOFQcqOm7ͽ6M7<=;֦0yz~pOWAGGj (~Fۿ5BejXuJۻw&us0VOvp~^[÷>ok(tcr ̜HD)]Ro{!46>/ﺮ;_׾]>G\=C eYY+]`)SHΥnԾʤG$!H˔́]"G$3aJRB/1cHNvj|:O5t?wMY{!L ͣNfP( Pz 00:^Deh3R2rqឱu h2O TL܄%i _A'_ l;SzEvKŒbq~w?vjX5\ :\L;xp5Ta LG񮽋afTTfp/]`ηq轀 0TJ4 d#9Idz蟲c_嶷Fp/sd б>\f*sqGjNN1Ĺέb6$ k}aj9Zc$8'#| z}߾9Hn#Qsis18|h_{mCv뵍+Rgp 2h߻o&_ 0;;!X@bMz{~T[;v۝׶'O}4ӽuV QQ]c.:m5);˺7Ct\*<]pnnys(aN'`++́n61˪5ȐuQb%6 !e ll;{bۭ7m20MsH]5b//ߋB#i`oO=ߞfD;־_n{٭BF'Oj'OjN'x=ǏN=ܞ9gDڛ_1N!LCfX^8QPFDP.+2X\D]:,$YMUm+W0 [vQJ`&0lر3Dia1dx{hS>PBߺ!ί&k L;&X[kj{SЍsO*pV4=.tiާkbVnϟ񔵅q6l 3W}M{뿶}˷~C?f|{YsejCm^G޸wniվ V'o~?'?nO Ay"icĹdÖ{Pp 3}gPۿpC{^|{?P7~vK&h~诵=Kc𩱩xyA_ Mؤή#mP[ E9|4sQ!#o07kTZ(@cī³VAHT*ATR#l IDATr.p=硆)n)ۼ1\pLJ{=|u{׾=s3=_xmmrTtRm+mgoe{\ @/gթإ)a iCJT[|Hgt V}2˔#B)3+i)1nάlSs#_CNղ$, 'UM'mC,8XY]Cϴ[Y2|8Gu||=wͶF{sJ Yw9Uyo 9r:\ɾo?19b5D o1[#G'>Llomox&Mo~wcǟ3sc rPqgl!Ł$Q%eS pJlIu3%i%> #pd(m(=b(O vC⭓qaPvxiwi>^/zkip=qAfs THIhH:I# @URXskx=r0$:P8.Ȕge86 ͺC.(vU(ߛ9y*@pUgLsgBAX pm?b/q"W<)d=St_;&Rq$|Xs=Spk] -dڦzCBƩAt4D(0#;{ UEf0 ‹CLM. +R!Oڔ2q1<:MvYZZ͍ܱ 3K7dw:"thdPY;.eq'aNǮ0) 'BC K1S`#m\;Jũ}WlS Lm+E1udvw&)o#)Nb2q48t)dAQfxs~l)L >,,.9'lKLB,Y w\~vpQqHʵ+]J# *;a6t1(ҹ;z3JB*c}Б,\J1m`(Stt#+d8Y|9;yؚPgn9 }r.G!+wRdsxląSpLw!2pQe'h ǔ6Jq3 G̃%-4.Њl u|#|QεЌAJ1*gQ[+R;bڨ4έ>V׭(F3ӳ-`А0k=(f< h'1t߾}w~ag:˫; 0 0j*v|su'Vb٨4WQ 1&X[7-gh.cՓ,*4oʍ /2$#xz>\_ZYv nteV k\QGè~|P7y_ɠri6f]yT]qn!,Mi7 JksY@A990ƾ5 H\Y fr7\OOg,`*u:sw{X T8,An*9& r8?!\<E2^f|@-kr%ixu d)s Ù 爇V7D+R] 1I/" ՚\PǹΧg"k^k(CK`B3_Vn+QR!Pn%K*h i.謲9\ı7>#%n6pΜ+`[k'=VyS!QF"9C2Obnu8ݝ+RW! [2\B)Dj4 9WҨg0zw^4H[rguQFZFA$'_b`Q삋h?Ȭc4-#~PeQgh'bz._`{ɽӷGNlY@y싁{X+ og=WXH=Ti8%+u#\2$)O 5U8xsS 1fՊ957yfxW..5P4YEpKdT EG]̌MN:S nCCsn\ek#}o[#k 3HȢIFah_yJ_4 hayK^<֝ђaxG*7YYӹ^/X]*eDxv*kcEʫ1:H(,`6sP2d# k2hr[?DkCIt"LFWXf \`[}VRM= Z]8RE EK4HBq/pV34,j`|p6pr RL35œXhR!_VHc5e|1uAG\W(b9_k* Jz@$M!*ti-(8 c0c9/%Xę8ZS @_ GQ .4m%x@:wSiX!; qx*6]]{cig@#gq>S5*C/4w}N U2'[=8+;ɐtۑ3w!=lJNaDNi+ PTQr02Ɉ .#/G4rI`]̷#*)hpdE9J7Dýa[]MdDy1TZw>]]du(Q27{Hz/ V|oM`1h+Da*"?yK#Z^' 9 jwG6g/:4:F]9 ,\9ri!F`NgrG@6VAr (y25huM6 \]5KC h}@GU)\"LzM|< Gc?ܢWKdOd ;0TXy՟,4#BuKR7aW6OjXz\TxJ^+lb8L# - ~#7D6SWaxyIW s[!!BaY*'^c5_,fs iX8U_ʠ]/֝c A5Kp<-*Q˸Dx([~T~)鍥οP6c&ݨ6xuDl/M=OEMC'KM1ͯeiYpH Τ¶gzµ]yʩV%R}rz+OGpKe$H n@u:<x *W{!H/?\IHntQ k]aM#p`(Q+=ԆJT`È( #0,@K]ؾBvGnsЧ(R0- z ' S:XǴ I8eevپIS8]Q"Oo%\X[ #b22Ls` x{lQ_6daR&`ӿɐzqXUPUi;8yU!+%0TYb1 B?@JP-{K)NASa]:-zS6bԩY6l+ ay;$dӦpH GV.z5 3O"jp({'wj")QNs}+@3d"A&h\ %Xy ֺ/(0ݍ!3<)Pp*Vt 0 ݓpD +]$AeS>o22/ Ag9*0? penEP.]F\zMo_rQR(.qR_WF++@H04xHM*o2v$ -> ̚텓Oέ+`Igdb1Hÿ ́i#(h?B)m@l𬁾ϹBʂB2 vyœK,3 BYu~dmfFRu l']rR鑿0"A/hg!կF $YM(-AI_/wU:>]!Z"|_Ԫ(Q-Yg :u !^~_ 1=t%Pap:чlC),~--4{ 9F N@ΰ ~IQH5#RK^N5N*zt_fs-5IVMYhXB,u^ Ma67)T/u;:ċ3c2  %!DlWio.f|F~"n*l^E]FA6W"G.}Q̗d}ֲ4N][J# @o#LedN|PAH`2f GZHHC& ̟lVqsqƟ/29"{4W֌CL gZ^TJ # "-1$#4ɖ/^]jx>QqNE}k44OAeN<ܾ'?h{ "_z'U~jI ĝ#vdJ(:2+S!Z\C)]UH0pgHVRW{Tnժug%);aC%6`%~8n߁vsxaC[p̰'P{> o=|}o_-2PtiE;tt޾+9 R^fpNT|ut \wkU8Kg)=־gg?L6ÏSNQv?śCna69 IT)vCf8~yq+99KQiM*^`a "z?}ɗ^_b2"߰6/KVgސ {d?su9~=g*B o n|վ݇<ܮe;oLM͵1~ySLJu b#>6Z \*ړ?sZZ5, .Zl0~W~ud#oDu_nGYZ嗖wK;k l; xəW8Hjh6OxtvC,{VPocӽ3 AsL@0 u1>JW4'Q 6K?3`x;3K,+GJo7; yp3guA3'1~Ez6u慬E8JF6ůj{fc?vێ΂!3@-/ EI"櫢AuFEIwxH?$O#C_dL hfqQ"f@(1(`rkhh6&_%045ܻHߟ]E:,Rv(KL fN_oC_n0#V;5ZQEc*e3'۱gηg=yCZ`qmF)+Ǯ=iZwT=n67V;䒧*vMjSc#I9NPFMJ黀3hGyX:Gq*p%sS+q2Ṗ>oܮvíW<@}o]`Пs\>-^m-7%^wӾp'iLX{O]pζ<۞vy^?A[3s" $ӾUJNoyf]<^yr LjcֈL/B2=Me(&zkX Z=Q}|W0"4Ó+d&|wu V!H4A}ht+/%{&1ƥȍS5okwM_2C#CI9 㥯iX|^ nt~2n߇)DzCOr-[gl'$1m| FpToqmA,co^`Ԏ:ef<"(E `y@ՉR^a!!8QXNnN[_ov9|>]?a? Q||x++Ƨ p6ɚ/(}|XX# û@O3ݔw9b%Ҳ F iҳ0AʿpT ?0>πrcTt@mC Hq@3M] o>Ң"wu=G<5>v_̇n?p:}E M<3p& "Hi״?~q/1B [ *P,J/G2v@>hv!5}9d.>$:0ByH?ow* %~Rs_w8t?o? S#5ĥZ,&<FS+< Ҡ:bdeRѯ CB8heV(!2hHsK.: a۾/\MʌU_K.1|]ww#03)L:8&:,b:=)B7}=um񪩶z=g z#nh2Q# *ۼi5М*~q*C38wX\84T?n\)|ϻ!|>!5!Jvj V>*d}zen?v~j#,\]˷O}h59-`;.Rd 0qNzY @CxWOl9֎=E<%+ᄃ}߻CGN=}i\LsuT Dz~rd nVu>TyꦾzK+~4 PpԾB0P7CD7:BMD/Y>a| IȔ׻!SAX"Fp'Omqg펯xuY$=vvy zn"2b7k Ac5&o$uB4!}6x;_پnlv-WŃ(]?Ξh_xYdvϹR x5jP%+eL(2<嘞bDGKipGjjHQWJW0{]#v@L5>UDvUF -5M·>8bPa:'NϬ~x-_l~?N]}W6λTϐ-) e, k癧϶13|Hw;h[np;įc8k+%g1ny_<> _zT gMUK:rPΦ܇q$N:ԡ܍6/T=Qjc̠pНD.!罈\|t l[??1C#@u˺ }uv=ϵps?G=g>a4rKsI>Ǵϙ%Loo\sU;r͡vKrci_[?pblJZVۣGmg~b_WsctcaųD?=BL KN#/ dȜuG`@9Rm" C^J ˕N.ʬLR)@\To(|]41ZI[q\S>^׶7fFx.#cϝ?sr):d>~w`1LuTЏn0Nᅓ'N ,?d RG"^ ]Z4 en0v$ , ZtOAEwWFa G}`WJ A/Rnx=G=BxivapQ+XOEJ< nl b˞M (sOE ^51' nck~18xܬY#ߐ׭^r'hhKR: p'}1UhT*lu5ձ3_)g+xz@eiBNeT ixRH#Ԩ~]B eLjC].`#F01aEy:r[sHQ8RF<:ybYk^(8Qp[s+cn[(^30&x-n5J ^E:e\ hϠ3K9]{XFH 6HZ !x1V8ƺxѭUk Z+XseXjBGC8ɨҭ˵uT|§2r0b'ϐ,z0{?ܭZ&Vx4t~(nxvShUy O Z5 <(ɓ)C{]9 O 2 V26hqّY9H݅L}(Ӷ0JWĶ%jW,#rW(Be ^#BTP/2E\p"2R`0(p GS%Ql@MzM9|hgϝ! R5i=*;y.ZӰQRe()7Н}Wf_/gi Odzn-nSg7mg7X/WT^8ѥw3,l0f|+m$N"/" se\`>Xf*X WzL'e6u0Gq~2 zuڶf2[eMZ*9N%>tK?)TjU~* @WiqxQmO^S,ۗ<5pgϻbnZ~v>hn r}_jq̪ҙ\^7w$ئ?8_\a"B;a 8!Քixj >Sk6~#fٜ;+<Tݠpg&*)(Y. "1ԡM"|ؚ[Xn?O?kz1eҕ @͝99=5/ߺg(ۍ >W-*o\3HGX,+!GUFͷc#:F-N/XB0|O8"(Ca,|οvVશ|Oia7GS0%$|M_$(Oy wȻ0<څWgكGDZr=SHf8ҭ4IDATL}v=.;n[,KH*u׷' S{X+CSۨer$0\%IEі;x+М\MMJ^{C{*p;HSc(8sse~9pͨJ2ڡ%j /SbT&繎,.;ʷ~UkU6szyf艧pG>|qsEH%5mómصly䁯Z7yY\A~1:\\7+XsuA b$6A97?ݡ~u.<7jHGt<W[q庲z F^pj y{cmʹ/ٸoeg>>peK@*Wcw&gvͭG~Zn٬/2MNM+%\*仏,nW 'Y/ZHү){I=:.xt |x4ISY'UC!:~m^TbY@vVW{&VX78nɹ|} E ˧?Mc_5mvcM.X=j˫֗B%@=3`ʩ7mbn K."c ^.8 !J?\ ċ|$?_Lʿx1/_# =!˔PM(%IENDB`django-celery-results-2.5.1/docs/includes/000077500000000000000000000000001442620237100205005ustar00rootroot00000000000000django-celery-results-2.5.1/docs/includes/installation.txt000066400000000000000000000020131442620237100237360ustar00rootroot00000000000000.. _installation: Installation ============ You can install django-celery-results either via the Python Package Index (PyPI) or from source. To install using `pip`,:: $ pip install -U django-celery-results .. _installing-from-source: Downloading and installing from source -------------------------------------- Download the latest version of django-celery-results from https://pypi.python.org/pypi/django-celery-results You can install it by doing the following,:: $ tar xvfz django-celery-results-0.0.0.tar.gz $ cd django-celery-results-0.0.0 $ python setup.py build # python setup.py install The last command must be executed as a privileged user if you are not currently using a virtualenv. .. _installing-from-git: Using the development version ----------------------------- With pip ~~~~~~~~ You can install the latest snapshot of django-celery-results using the following pip command:: $ pip install https://github.com/celery/django-celery-results/zipball/master#egg=django-celery-results django-celery-results-2.5.1/docs/includes/introduction.txt000066400000000000000000000016741442620237100237720ustar00rootroot00000000000000:Version: 2.5.1 :Web: https://django-celery-results.readthedocs.io/ :Download: https://pypi.python.org/pypi/django-celery-results :Source: https://github.com/celery/django-celery-results :Keywords: django, celery, database, results About ===== This extension enables you to store Celery task and group results using the Django ORM. It defines 2 models (:class:`django_celery_results.models.TaskResult` and :class:`django_celery_results.models.GroupResult`) used to store task and group results, and you can query these database tables like any other Django model. If your :pypi:`django-celery-beat` carries ``request["properties"]["periodic_task_name"]``, it will be stored in :attr:`TaskResult.periodic_task_name ` to track the periodic task. Installing ========== The installation instructions for this extension is available from the :ref:`Celery documentation `. django-celery-results-2.5.1/docs/index.rst000066400000000000000000000010531442620237100205320ustar00rootroot00000000000000======================================================================= django-celery-results - Celery Result Backends for Django ======================================================================= .. include:: includes/introduction.txt Contents ======== .. toctree:: :maxdepth: 1 getting_started injecting_metadata copyright .. toctree:: :maxdepth: 2 reference/index .. toctree:: :maxdepth: 1 changelog glossary Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` django-celery-results-2.5.1/docs/injecting_metadata.rst000066400000000000000000000037441442620237100232460ustar00rootroot00000000000000Injecting metadata =============== To save arbitrary data on the field TaskResult.meta, the Celery Task Request must be manipulated as such: .. code-block:: python from celery import Celery app = Celery('hello', broker='amqp://guest@localhost//') @app.task(bind=True) def hello(task_instance): task_instance.request.meta = {'some_key': 'some_value'} task_instance.update_state( state='PROGRESS', meta='Task current result' ) # If TaskResult is queried from DB at this momento it will yield # TaskResult( # result='Task current result', # meta={'some_key': 'some_value'} # some discrepancies apply as I didn't document the json parse and children data # ) return 'hello world' # After task is completed, if TaskResult is queried from DB at this momento it will yield # TaskResult( # result='hello world', # meta={'some_key': 'some_value'} # some discrepancies apply as I didn't document the json parse and children data # ) This way, the value of ``task_instance.request.meta`` will be stored on ``TaskResult.meta``. Note that the `meta` arg in the method `update_state` is not really a metadata and it's not stored on ``TaskResult.meta``. This arg is used to save the CURRENT result of the task. So it's stored on ``TaskResult.result``. It works this way because while a task is executing, the `TaskResult` is used really as current task state; holding information, temporarily, until the task completes. Subsequent calls to `update_state` will update the same `TaskResult`, overwriting what was there previously. Upon completion of the task, the results of the task are stored in the same TaskResult, overwriting the previous state of the task. So the return from the function is stored in ``TaskResult.result`` and ``TaskResult.status`` is set to 'SUCCESS' (or 'FAILURE'). django-celery-results-2.5.1/docs/make.bat000066400000000000000000000161251442620237100203040ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. epub3 to make an epub3 echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled echo. coverage to run coverage check of the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 1>NUL 2>NUL if errorlevel 9009 goto sphinx_python goto sphinx_ok :sphinx_python set SPHINXBUILD=python -m sphinx.__init__ %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) :sphinx_ok if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PROJ.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PROJ.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "epub3" ( %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/django_celery_results/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/django_celery_results/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "coverage" ( %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage if errorlevel 1 exit /b 1 echo. echo.Testing of coverage in the sources finished, look at the ^ results in %BUILDDIR%/coverage/python.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end django-celery-results-2.5.1/docs/reference/000077500000000000000000000000001442620237100206305ustar00rootroot00000000000000django-celery-results-2.5.1/docs/reference/django_celery_results.backends.cache.rst000066400000000000000000000005011442620237100305570ustar00rootroot00000000000000===================================================== ``django_celery_results.backends.cache`` ===================================================== .. contents:: :local: .. currentmodule:: django_celery_results.backends.cache .. automodule:: django_celery_results.backends.cache :members: :undoc-members: django-celery-results-2.5.1/docs/reference/django_celery_results.backends.database.rst000066400000000000000000000005121442620237100312620ustar00rootroot00000000000000===================================================== ``django_celery_results.backends.database`` ===================================================== .. contents:: :local: .. currentmodule:: django_celery_results.backends.database .. automodule:: django_celery_results.backends.database :members: :undoc-members: django-celery-results-2.5.1/docs/reference/django_celery_results.backends.rst000066400000000000000000000004571442620237100275270ustar00rootroot00000000000000===================================================== ``django_celery_results.backends`` ===================================================== .. contents:: :local: .. currentmodule:: django_celery_results.backends .. automodule:: django_celery_results.backends :members: :undoc-members: django-celery-results-2.5.1/docs/reference/django_celery_results.managers.rst000066400000000000000000000004571442620237100275520ustar00rootroot00000000000000===================================================== ``django_celery_results.managers`` ===================================================== .. contents:: :local: .. currentmodule:: django_celery_results.managers .. automodule:: django_celery_results.managers :members: :undoc-members: django-celery-results-2.5.1/docs/reference/django_celery_results.models.rst000066400000000000000000000004511442620237100272320ustar00rootroot00000000000000===================================================== ``django_celery_results.models`` ===================================================== .. contents:: :local: .. currentmodule:: django_celery_results.models .. automodule:: django_celery_results.models :members: :undoc-members: django-celery-results-2.5.1/docs/reference/django_celery_results.utils.rst000066400000000000000000000004461442620237100271130ustar00rootroot00000000000000===================================================== ``django_celery_results.utils`` ===================================================== .. contents:: :local: .. currentmodule:: django_celery_results.utils .. automodule:: django_celery_results.utils :members: :undoc-members: django-celery-results-2.5.1/docs/reference/index.rst000066400000000000000000000005341442620237100224730ustar00rootroot00000000000000.. _apiref: =============== API Reference =============== :Release: |version| :Date: |today| .. toctree:: :maxdepth: 1 django_celery_results.backends django_celery_results.backends.database django_celery_results.backends.cache django_celery_results.models django_celery_results.managers django_celery_results.utils django-celery-results-2.5.1/docs/templates/000077500000000000000000000000001442620237100206705ustar00rootroot00000000000000django-celery-results-2.5.1/docs/templates/readme.txt000066400000000000000000000027071442620237100226740ustar00rootroot00000000000000===================================================================== Celery Result Backends using the Django ORM/Cache framework. ===================================================================== |build-status| |coverage| |license| |wheel| |pyversion| |pyimp| .. include:: ../includes/introduction.txt .. include:: ../includes/installation.txt .. |build-status| image:: https://secure.travis-ci.org/celery/django-celery-results.png?branch=master :alt: Build status :target: https://travis-ci.org/celery/django-celery-results .. |coverage| image:: https://codecov.io/github/celery/django-celery-results/coverage.svg?branch=master :target: https://codecov.io/github/celery/django-celery-results?branch=master .. |license| image:: https://img.shields.io/pypi/l/django-celery-results.svg :alt: BSD License :target: https://opensource.org/licenses/BSD-3-Clause .. |wheel| image:: https://img.shields.io/pypi/wheel/django-celery-results.svg :alt: django-celery-results can be installed via wheel :target: https://pypi.python.org/pypi/django-celery-results/ .. |pyversion| image:: https://img.shields.io/pypi/pyversions/django-celery-results.svg :alt: Supported Python versions. :target: https://pypi.python.org/pypi/django-celery-results/ .. |pyimp| image:: https://img.shields.io/pypi/implementation/django-celery-results.svg :alt: Support Python implementations. :target: https://pypi.python.org/pypi/django-celery-results/ django-celery-results-2.5.1/manage.py000077500000000000000000000003721442620237100175510ustar00rootroot00000000000000#!/usr/bin/env python import os import sys if __name__ == '__main__': os.environ.setdefault('DJANGO_SETTINGS_MODULE', 't.proj.settings') from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) django-celery-results-2.5.1/requirements/000077500000000000000000000000001442620237100204655ustar00rootroot00000000000000django-celery-results-2.5.1/requirements/default.txt000066400000000000000000000000421442620237100226460ustar00rootroot00000000000000celery>=5.2.7,<6.0 Django>=3.2.18 django-celery-results-2.5.1/requirements/docs.txt000066400000000000000000000000651442620237100221570ustar00rootroot00000000000000sphinx_celery>=1.1 Django>=3.2.18 celery>=5.2.7,<6.0 django-celery-results-2.5.1/requirements/pkgutils.txt000066400000000000000000000001411442620237100230640ustar00rootroot00000000000000setuptools>=40.8.0 wheel>=0.33.1 flake8>=3.8.3 tox>=2.3.1 sphinx2rst>=1.0 bumpversion pydocstyle django-celery-results-2.5.1/requirements/test-ci.txt000066400000000000000000000001001442620237100225650ustar00rootroot00000000000000pytest-cov codecov importlib-metadata<5.0; python_version<"3.8" django-celery-results-2.5.1/requirements/test-django.txt000066400000000000000000000000401442620237100234370ustar00rootroot00000000000000Django>=3.2,<5.0 psycopg>=3.1.8 django-celery-results-2.5.1/requirements/test-django32.txt000066400000000000000000000000241442620237100236060ustar00rootroot00000000000000django>=3.2.18,<4.0 django-celery-results-2.5.1/requirements/test-django40.txt000066400000000000000000000000241442620237100236050ustar00rootroot00000000000000django>=4.0.10,<4.1 django-celery-results-2.5.1/requirements/test-django41.txt000066400000000000000000000000231442620237100236050ustar00rootroot00000000000000django>=4.1.7,<4.2 django-celery-results-2.5.1/requirements/test-django42.txt000066400000000000000000000001711442620237100236120ustar00rootroot00000000000000django>=4.2,<5.0 psycopg>=3.1.8 # necessary due to https://docs.djangoproject.com/en/4.2/releases/4.2/#psycopg-3-support django-celery-results-2.5.1/requirements/test.txt000066400000000000000000000001221442620237100222000ustar00rootroot00000000000000case>=1.3.1 pytest>=6.2.5 pytest-django>=4.5.2 pytest-benchmark pytz psycopg2cffi django-celery-results-2.5.1/setup.cfg000066400000000000000000000007021442620237100175620ustar00rootroot00000000000000[tool:pytest] testpaths = t/ python_classes = test_* python_files = test_* benchmark_* DJANGO_SETTINGS_MODULE = t.proj.settings markers = benchmark: mark a test as a benchmark [flake8] # classes can be lowercase, arguments and variables can be uppercase # whenever it makes the code more readable. ignore = N806, N802, N801, N803 [pep257] convention=google add-ignore = D102,D104,D203,D105,D213 match-dir = [^migrations] [isort] profile=black django-celery-results-2.5.1/setup.py000066400000000000000000000107151442620237100174600ustar00rootroot00000000000000#!/usr/bin/env python3 import codecs import os import re import sys import setuptools import setuptools.command.test try: import platform _pyimp = platform.python_implementation except (AttributeError, ImportError): def _pyimp(): return 'Python' NAME = 'django_celery_results' E_UNSUPPORTED_PYTHON = f'{NAME} 1.0 requires %s %s or later!' PYIMP = _pyimp() PY37_OR_LESS = sys.version_info < (3, 7) PYPY_VERSION = getattr(sys, 'pypy_version_info', None) PYPY73_ATLEAST = PYPY_VERSION and PYPY_VERSION >= (7, 3) if PY37_OR_LESS and not PYPY73_ATLEAST: raise Exception(E_UNSUPPORTED_PYTHON % (PYIMP, '3.7')) # -*- Classifiers -*- classes = """ Development Status :: 5 - Production/Stable License :: OSI Approved :: BSD License Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy Framework :: Django Framework :: Django :: 3.2 Framework :: Django :: 4.0 Framework :: Django :: 4.1 Framework :: Django :: 4.2 Operating System :: OS Independent Topic :: Communications Topic :: System :: Distributed Computing Topic :: Software Development :: Libraries :: Python Modules """ classifiers = [s.strip() for s in classes.split('\n') if s] # -*- Distribution Meta -*- re_meta = re.compile(r'__(\w+?)__\s*=\s*(.*)') re_doc = re.compile(r'^"""(.+?)"""') def add_default(m): attr_name, attr_value = m.groups() return ((attr_name, attr_value.strip("\"'")),) def add_doc(m): return (('doc', m.groups()[0]),) pats = {re_meta: add_default, re_doc: add_doc} here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, NAME, '__init__.py')) as meta_fh: meta = {} for line in meta_fh: if line.strip() == '# -eof meta-': break for pattern, handler in pats.items(): m = pattern.match(line.strip()) if m: meta.update(handler(m)) # -*- Installation Requires -*- def strip_comments(line): return line.split('#', 1)[0].strip() def _pip_requirement(req): if req.startswith('-r '): _, path = req.split() return reqs(*path.split('/')) return [req] def _reqs(*f): with open(os.path.join(os.getcwd(), 'requirements', *f)) as fp: return [ _pip_requirement(r) for r in (strip_comments(line) for line in fp) if r ] def reqs(*f): return [req for subreq in _reqs(*f) for req in subreq] # -*- Long Description -*- if os.path.exists('README.rst'): long_description = codecs.open('README.rst', 'r', 'utf-8').read() else: long_description = f'See https://pypi.python.org/pypi/{NAME}' # -*- %%% -*- class pytest(setuptools.command.test.test): user_options = [('pytest-args=', 'a', 'Arguments to pass to pytest')] def initialize_options(self): super().initialize_options() self.pytest_args = [] def run_tests(self): import pytest sys.exit(pytest.main(self.pytest_args)) setuptools.setup( name=NAME, packages=setuptools.find_packages(exclude=['ez_setup', 't', 't.*']), version=meta['version'], description=meta['doc'], long_description=long_description, long_description_content_type='text/x-rst', keywords='celery django database result backend', author=meta['author'], author_email=meta['contact'], url=meta['homepage'], project_urls={ 'Documentation': ( 'https://django-celery-results.readthedocs.io/en/latest/' ), 'Changelog': ( 'https://django-celery-results.readthedocs.io/en/latest/' 'changelog.html' ), 'Repository': 'https://github.com/celery/django-celery-results', }, platforms=['any'], license='BSD', classifiers=classifiers, install_requires=reqs('default.txt'), tests_require=reqs('test.txt') + reqs('test-django.txt'), cmdclass={'test': pytest}, entry_points={ 'celery.result_backends': [ 'django-db = django_celery_results.backends:DatabaseBackend', 'django-cache = django_celery_results.backends:CacheBackend', ], }, zip_safe=False, include_package_data=True, ) django-celery-results-2.5.1/t/000077500000000000000000000000001442620237100162055ustar00rootroot00000000000000django-celery-results-2.5.1/t/__init__.py000066400000000000000000000000001442620237100203040ustar00rootroot00000000000000django-celery-results-2.5.1/t/conftest.py000066400000000000000000000024521442620237100204070ustar00rootroot00000000000000import pytest # we have to import the pytest plugin fixtures here, # in case user did not do the `python setup.py develop` yet, # that installs the pytest plugin into the setuptools registry. from celery.contrib.pytest import ( celery_app, celery_config, celery_enable_logging, celery_parameters, depends_on_current_app, use_celery_app_trap, ) from celery.contrib.testing.app import TestApp, Trap # Tricks flake8 into silencing redefining fixtures warnings. __all__ = ( 'celery_app', 'celery_enable_logging', 'depends_on_current_app', 'celery_parameters', 'celery_config', 'use_celery_app_trap' ) @pytest.fixture(scope='session', autouse=True) def setup_default_app_trap(): from celery._state import set_default_app set_default_app(Trap()) @pytest.fixture() def app(celery_app): return celery_app @pytest.fixture(autouse=True) def test_cases_shortcuts(request, app, patching): if request.instance: @app.task def add(x, y): return x + y # IMPORTANT: We set an .app attribute for every test case class. request.instance.app = app request.instance.Celery = TestApp request.instance.add = add request.instance.patching = patching yield if request.instance: request.instance.app = None django-celery-results-2.5.1/t/integration/000077500000000000000000000000001442620237100205305ustar00rootroot00000000000000django-celery-results-2.5.1/t/integration/__init__.py000066400000000000000000000000001442620237100226270ustar00rootroot00000000000000django-celery-results-2.5.1/t/integration/benchmark_models.py000066400000000000000000000043201442620237100243760ustar00rootroot00000000000000import time from datetime import timedelta import pytest from celery import uuid from django.test import TransactionTestCase from django_celery_results.models import TaskResult from django_celery_results.utils import now RECORDS_COUNT = 100000 @pytest.fixture() def use_benchmark(request, benchmark): def wrapped(a=10, b=5): return a + b request.cls.benchmark = benchmark @pytest.mark.usefixtures('use_benchmark') @pytest.mark.usefixtures('depends_on_current_app') class benchmark_Models(TransactionTestCase): @pytest.fixture(autouse=True) def setup_app(self, app): self.app = app self.app.conf.result_serializer = 'pickle' self.app.conf.result_backend = ( 'django_celery_results.backends:DatabaseBackend') def create_many_task_result(self, count): start = time.time() draft_results = [TaskResult(task_id=uuid()) for _ in range(count)] drafted = time.time() results = TaskResult.objects.bulk_create(draft_results) done_creating = time.time() print(( 'drafting time: {drafting:.2f}\n' 'bulk_create time: {done:.2f}\n' '------' ).format(drafting=drafted - start, done=done_creating - drafted)) return results def setup_records_to_delete(self): self.create_many_task_result(count=RECORDS_COUNT) mid_point = TaskResult.objects.order_by('id')[int(RECORDS_COUNT / 2)] todelete = TaskResult.objects.filter(id__gte=mid_point.id) todelete.update(date_done=now() - timedelta(days=10)) def test_taskresult_delete_expired(self): start = time.time() self.setup_records_to_delete() after_setup = time.time() self.benchmark.pedantic( TaskResult.objects.delete_expired, args=(self.app.conf.result_expires,), iterations=1, rounds=1, ) done = time.time() assert TaskResult.objects.count() == int(RECORDS_COUNT / 2) print(( '------' 'setup time: {setup:.2f}\n' 'bench time: {bench:.2f}\n' ).format(setup=after_setup - start, bench=done - after_setup)) assert self.benchmark.stats.stats.max < 1 django-celery-results-2.5.1/t/proj/000077500000000000000000000000001442620237100171575ustar00rootroot00000000000000django-celery-results-2.5.1/t/proj/__init__.py000066400000000000000000000000561442620237100212710ustar00rootroot00000000000000from .celery import app as celery_app # noqa django-celery-results-2.5.1/t/proj/celery.py000066400000000000000000000004721442620237100210170ustar00rootroot00000000000000import os from celery import Celery os.environ.setdefault('DJANGO_SETTINGS_MODULE', 't.proj.settings') app = Celery('proj') # Using a string here means the worker doesn't have to serialize # the configuration object. app.config_from_object('django.conf:settings', namespace='CELERY') app.autodiscover_tasks() django-celery-results-2.5.1/t/proj/settings.py000066400000000000000000000102461442620237100213740ustar00rootroot00000000000000""" Django settings for Test project. Generated by 'django-admin startproject' using Django 1.9.1. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os import sys # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, os.path.abspath(os.path.join(BASE_DIR, os.pardir))) # configure psycopg2cffi for psycopg2 compatibility. We must use this package # support pypy. # if not installed, use sqlite as a backup (some tests may fail), # otherwise even makemigrations won't run. try: from psycopg2cffi import compat compat.register() DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'HOST': 'localhost', 'NAME': 'postgres', 'USER': 'postgres', 'PASSWORD': 'postgres', 'OPTIONS': { 'connect_timeout': 1000, } }, 'secondary': { 'ENGINE': 'django.db.backends.postgresql', 'HOST': 'localhost', 'NAME': 'postgres', 'USER': 'postgres', 'PASSWORD': 'postgres', 'OPTIONS': { 'connect_timeout': 1000, }, 'TEST': { 'MIRROR': 'default', }, }, } except ImportError: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), 'OPTIONS': { 'timeout': 1000, } }, 'secondary': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), 'OPTIONS': { 'timeout': 1000, } }, } # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'u($kbs9$irs0)436gbo9%!b&#zyd&70tx!n7!i&fl6qun@z1_l' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django_celery_results', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ] ROOT_URLCONF = 't.proj.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 't.proj.wsgi.application' CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'dummy': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', }, } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators django_auth = 'django.contrib.auth.password_validation.' AUTH_PASSWORD_VALIDATORS = [ ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True DJANGO_CELERY_RESULTS_TASK_ID_MAX_LENGTH = 191 # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/' django-celery-results-2.5.1/t/proj/urls.py000066400000000000000000000001661442620237100205210ustar00rootroot00000000000000from django.contrib import admin from django.urls import path urlpatterns = [ path('admin/', admin.site.urls), ] django-celery-results-2.5.1/t/proj/wsgi.py000066400000000000000000000006031442620237100205010ustar00rootroot00000000000000""" WSGI config for Test project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "t.proj.settings") application = get_wsgi_application() django-celery-results-2.5.1/t/unit/000077500000000000000000000000001442620237100171645ustar00rootroot00000000000000django-celery-results-2.5.1/t/unit/__init__.py000066400000000000000000000000001442620237100212630ustar00rootroot00000000000000django-celery-results-2.5.1/t/unit/backends/000077500000000000000000000000001442620237100207365ustar00rootroot00000000000000django-celery-results-2.5.1/t/unit/backends/__init__.py000066400000000000000000000000001442620237100230350ustar00rootroot00000000000000django-celery-results-2.5.1/t/unit/backends/test_cache.py000066400000000000000000000064261442620237100234220ustar00rootroot00000000000000import sys from datetime import timedelta import pytest from billiard.einfo import ExceptionInfo from celery import result, states, uuid from kombu.utils.encoding import bytes_to_str from django_celery_results.backends.cache import CacheBackend class SomeClass: def __init__(self, data): self.data = data class test_CacheBackend: def setup(self): self.b = CacheBackend(app=self.app) def test_mark_as_done(self): tid = uuid() assert self.b.get_status(tid) == states.PENDING assert self.b.get_result(tid) is None self.b.mark_as_done(tid, 42) assert self.b.get_status(tid) == states.SUCCESS assert self.b.get_result(tid) == 42 def test_forget(self): tid = uuid() self.b.mark_as_done(tid, {'foo': 'bar'}) assert self.b.get_result(tid).get('foo') == 'bar' self.b.forget(tid) assert tid not in self.b._cache assert self.b.get_result(tid) is None @pytest.mark.usefixtures('depends_on_current_app') def test_save_restore_delete_group(self): group_id = uuid() result_ids = [uuid() for i in range(10)] results = list(map(result.AsyncResult, result_ids)) res = result.GroupResult(group_id, results) res.save(backend=self.b) saved = result.GroupResult.restore(group_id, backend=self.b) assert saved.results == results assert saved.id == group_id saved.delete(backend=self.b) assert result.GroupResult.restore(group_id, backend=self.b) is None def test_is_pickled(self): tid2 = uuid() result = {'foo': 'baz', 'bar': SomeClass(12345)} self.b.mark_as_done(tid2, result) # is serialized properly. rindb = self.b.get_result(tid2) assert rindb.get('foo') == 'baz' assert rindb.get('bar').data == 12345 def test_convert_key_from_byte_to_str(self): """ Tests that key in byte form passed into cache are succesfully completed """ tid = bytes_to_str(uuid()) assert self.b.get_status(tid) == states.PENDING assert self.b.get_result(tid) is None self.b.mark_as_done(tid, 42) assert self.b.get_status(tid) == states.SUCCESS assert self.b.get_result(tid) == 42 def test_mark_as_failure(self): einfo = None tid3 = uuid() try: raise KeyError('foo') except KeyError as exception: einfo = ExceptionInfo(sys.exc_info()) self.b.mark_as_failure(tid3, exception, traceback=einfo.traceback) assert self.b.get_status(tid3) == states.FAILURE assert isinstance(self.b.get_result(tid3), KeyError) assert self.b.get_traceback(tid3) == einfo.traceback def test_process_cleanup(self): self.b.process_cleanup() def test_set_expires(self): cb1 = CacheBackend(app=self.app, expires=timedelta(seconds=16)) assert cb1.expires == 16 cb2 = CacheBackend(app=self.app, expires=32) assert cb2.expires == 32 class test_custom_CacheBackend: def test_custom_cache_backend(self): self.app.conf.cache_backend = 'dummy' b = CacheBackend(app=self.app) assert ( b.cache_backend.__class__.__module__ == 'django.core.cache.backends.dummy' # noqa ) django-celery-results-2.5.1/t/unit/backends/test_database.py000066400000000000000000000770531442620237100241270ustar00rootroot00000000000000import json import pickle import re from unittest import mock import celery import pytest from celery import states, uuid from celery.app.task import Context from celery.result import AsyncResult, GroupResult from celery.utils.serialization import b64decode from celery.worker.request import Request from celery.worker.strategy import hybrid_to_proto2 from django_celery_results.backends.database import DatabaseBackend from django_celery_results.models import ChordCounter, TaskResult class SomeClass: def __init__(self, data): self.data = data @pytest.mark.django_db() @pytest.mark.usefixtures('depends_on_current_app') class test_DatabaseBackend: @pytest.fixture(autouse=True) def setup_backend(self): self.app.conf.result_serializer = 'json' self.app.conf.result_backend = ( 'django_celery_results.backends:DatabaseBackend') self.app.conf.result_extended = True self.b = DatabaseBackend(app=self.app) def _create_request(self, task_id, name, args, kwargs, argsrepr=None, kwargsrepr=None, task_protocol=2): msg = self.app.amqp.task_protocols[task_protocol]( task_id=task_id, name=name, args=args, kwargs=kwargs, argsrepr=argsrepr, kwargsrepr=kwargsrepr, ) if task_protocol == 1: body, headers, _, _ = hybrid_to_proto2(msg, msg.body) properties = None sent_event = {} else: headers, properties, body, sent_event = msg context = Context( headers=headers, properties=properties, body=body, sent_event=sent_event, ) request = Request(context, decoded=True, task=name) if task_protocol == 1: assert request.argsrepr is None assert request.kwargsrepr is None else: assert request.argsrepr is not None assert request.kwargsrepr is not None return request def test_backend__pickle_serialization__dict_result(self): self.app.conf.result_serializer = 'pickle' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, SomeClass(67)], kwargs={'c': 6, 'd': 'e', 'f': SomeClass(89)}, ) result = {'foo': 'baz', 'bar': SomeClass(12345)} self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result').get('foo') == 'baz' assert mindb.get('result').get('bar').data == 12345 assert len(mindb.get('worker')) > 1 assert mindb.get('task_name') == 'my_task' assert bool(re.match( r"\['a', 1, <.*SomeClass object at .*>\]", mindb.get('task_args') )) assert bool(re.match( r"{'c': 6, 'd': 'e', 'f': <.*SomeClass object at .*>}", mindb.get('task_kwargs') )) # check task_result object tr = TaskResult.objects.get(task_id=tid2) task_args = pickle.loads(b64decode(tr.task_args)) task_kwargs = pickle.loads(b64decode(tr.task_kwargs)) assert task_args == mindb.get('task_args') assert task_kwargs == mindb.get('task_kwargs') # check async_result ar = AsyncResult(tid2) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') # check backward compatibility task_kwargs2 = str(request.kwargs) task_args2 = str(request.args) assert tr.task_args != task_args2 assert tr.task_kwargs != task_kwargs2 tr.task_args = task_args2 tr.task_kwargs = task_kwargs2 tr.save() mindb = self.b.get_task_meta(tid2) assert bool(re.match( r"\['a', 1, <.*SomeClass object at .*>\]", mindb.get('task_args') )) assert bool(re.match( r"{'c': 6, 'd': 'e', 'f': <.*SomeClass object at .*>}", mindb.get('task_kwargs') )) ar = AsyncResult(tid2) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') tid3 = uuid() try: raise KeyError('foo') except KeyError as exception: self.b.mark_as_failure(tid3, exception) assert self.b.get_status(tid3) == states.FAILURE assert isinstance(self.b.get_result(tid3), KeyError) def test_backend__pickle_serialization__str_result(self): self.app.conf.result_serializer = 'pickle' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, SomeClass(67)], kwargs={'c': 6, 'd': 'e', 'f': SomeClass(89)}, ) result = 'foo' self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result') == 'foo' assert mindb.get('task_name') == 'my_task' assert len(mindb.get('worker')) > 1 assert bool(re.match( r"\['a', 1, <.*SomeClass object at .*>\]", mindb.get('task_args') )) assert bool(re.match( r"{'c': 6, 'd': 'e', 'f': <.*SomeClass object at .*>}", mindb.get('task_kwargs') )) # check task_result object tr = TaskResult.objects.get(task_id=tid2) task_args = pickle.loads(b64decode(tr.task_args)) task_kwargs = pickle.loads(b64decode(tr.task_kwargs)) assert task_args == mindb.get('task_args') assert task_kwargs == mindb.get('task_kwargs') # check async_result ar = AsyncResult(tid2) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') def test_backend__pickle_serialization__bytes_result(self): self.app.conf.result_serializer = 'pickle' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, SomeClass(67)], kwargs={'c': 6, 'd': 'e', 'f': SomeClass(89)}, ) result = b'foo' self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result') == b'foo' assert mindb.get('task_name') == 'my_task' assert len(mindb.get('worker')) > 1 assert bool(re.match( r"\['a', 1, <.*SomeClass object at .*>\]", mindb.get('task_args') )) assert bool(re.match( r"{'c': 6, 'd': 'e', 'f': <.*SomeClass object at .*>}", mindb.get('task_kwargs') )) # check task_result objects tr = TaskResult.objects.get(task_id=tid2) task_args = pickle.loads(b64decode(tr.task_args)) task_kwargs = pickle.loads(b64decode(tr.task_kwargs)) assert task_args == mindb.get('task_args') assert task_kwargs == mindb.get('task_kwargs') # check async_result ar = AsyncResult(tid2) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') def test_backend__json_serialization__dict_result(self): self.app.conf.result_serializer = 'json' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, True], kwargs={'c': 6, 'd': 'e', 'f': False}, ) result = {'foo': 'baz', 'bar': True} self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result').get('foo') == 'baz' assert mindb.get('result').get('bar') is True assert mindb.get('task_name') == 'my_task' assert mindb.get('task_args') == "['a', 1, True]" assert mindb.get('task_kwargs') == "{'c': 6, 'd': 'e', 'f': False}" # check task_result object tr = TaskResult.objects.get(task_id=tid2) assert json.loads(tr.task_args) == "['a', 1, True]" assert json.loads(tr.task_kwargs) == "{'c': 6, 'd': 'e', 'f': False}" # check async_result ar = AsyncResult(tid2) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') # check backward compatibility task_kwargs2 = str(request.kwargs) task_args2 = str(request.args) assert tr.task_args != task_args2 assert tr.task_kwargs != task_kwargs2 tr.task_args = task_args2 tr.task_kwargs = task_kwargs2 tr.save() mindb = self.b.get_task_meta(tid2) assert mindb.get('task_args') == "['a', 1, True]" assert mindb.get('task_kwargs') == "{'c': 6, 'd': 'e', 'f': False}" ar = AsyncResult(tid2) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') tid3 = uuid() try: raise KeyError('foo') except KeyError as exception: self.b.mark_as_failure(tid3, exception) assert self.b.get_status(tid3) == states.FAILURE assert isinstance(self.b.get_result(tid3), KeyError) def test_backend__json_serialization__str_result(self): self.app.conf.result_serializer = 'json' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, True], kwargs={'c': 6, 'd': 'e', 'f': False}, ) result = 'foo' self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result') == 'foo' assert mindb.get('task_name') == 'my_task' assert mindb.get('task_args') == "['a', 1, True]" assert mindb.get('task_kwargs') == "{'c': 6, 'd': 'e', 'f': False}" # check task_result object tr = TaskResult.objects.get(task_id=tid2) assert json.loads(tr.task_args) == "['a', 1, True]" assert json.loads(tr.task_kwargs) == "{'c': 6, 'd': 'e', 'f': False}" # check async_result ar = AsyncResult(tid2) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') def test_backend__pickle_serialization__dict_result__protocol_1(self): self.app.conf.result_serializer = 'pickle' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, SomeClass(67)], kwargs={'c': 6, 'd': 'e', 'f': SomeClass(89)}, task_protocol=1, ) result = {'foo': 'baz', 'bar': SomeClass(12345)} self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result').get('foo') == 'baz' assert mindb.get('result').get('bar').data == 12345 assert mindb.get('task_name') == 'my_task' assert mindb.get('task_args')[0] == 'a' assert mindb.get('task_args')[1] == 1 assert mindb.get('task_args')[2].data == 67 assert mindb.get('task_kwargs')['c'] == 6 assert mindb.get('task_kwargs')['d'] == 'e' assert mindb.get('task_kwargs')['f'].data == 89 # check task_result object tr = TaskResult.objects.get(task_id=tid2) task_args = pickle.loads(b64decode(tr.task_args)) assert task_args[0] == 'a' assert task_args[1] == 1 assert task_args[2].data == 67 task_kwargs = pickle.loads(b64decode(tr.task_kwargs)) assert task_kwargs['c'] == 6 assert task_kwargs['d'] == 'e' assert task_kwargs['f'].data == 89 tid3 = uuid() try: raise KeyError('foo') except KeyError as exception: self.b.mark_as_failure(tid3, exception) assert self.b.get_status(tid3) == states.FAILURE assert isinstance(self.b.get_result(tid3), KeyError) def test_backend__pickle_serialization__str_result__protocol_1(self): self.app.conf.result_serializer = 'pickle' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, SomeClass(67)], kwargs={'c': 6, 'd': 'e', 'f': SomeClass(89)}, task_protocol=1, ) result = 'foo' self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result') == 'foo' assert mindb.get('task_name') == 'my_task' assert mindb.get('task_args')[0] == 'a' assert mindb.get('task_args')[1] == 1 assert mindb.get('task_args')[2].data == 67 assert mindb.get('task_kwargs')['c'] == 6 assert mindb.get('task_kwargs')['d'] == 'e' assert mindb.get('task_kwargs')['f'].data == 89 # check task_result object tr = TaskResult.objects.get(task_id=tid2) task_args = pickle.loads(b64decode(tr.task_args)) assert task_args[0] == 'a' assert task_args[1] == 1 assert task_args[2].data == 67 task_kwargs = pickle.loads(b64decode(tr.task_kwargs)) assert task_kwargs['c'] == 6 assert task_kwargs['d'] == 'e' assert task_kwargs['f'].data == 89 def test_backend__pickle_serialization__bytes_result__protocol_1(self): self.app.conf.result_serializer = 'pickle' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, SomeClass(67)], kwargs={'c': 6, 'd': 'e', 'f': SomeClass(89)}, task_protocol=1, ) result = b'foo' self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result') == b'foo' assert mindb.get('task_name') == 'my_task' assert mindb.get('task_args')[0] == 'a' assert mindb.get('task_args')[1] == 1 assert mindb.get('task_args')[2].data == 67 assert mindb.get('task_kwargs')['c'] == 6 assert mindb.get('task_kwargs')['d'] == 'e' assert mindb.get('task_kwargs')['f'].data == 89 # check task_result object tr = TaskResult.objects.get(task_id=tid2) task_args = pickle.loads(b64decode(tr.task_args)) assert task_args[0] == 'a' assert task_args[1] == 1 assert task_args[2].data == 67 task_kwargs = pickle.loads(b64decode(tr.task_kwargs)) assert task_kwargs['c'] == 6 assert task_kwargs['d'] == 'e' assert task_kwargs['f'].data == 89 def test_backend__json_serialization__dict_result__protocol_1(self): self.app.conf.result_serializer = 'json' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, True], kwargs={'c': 6, 'd': 'e', 'f': False}, task_protocol=1, ) result = {'foo': 'baz', 'bar': True} self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result').get('foo') == 'baz' assert mindb.get('result').get('bar') is True assert mindb.get('task_name') == 'my_task' assert mindb.get('task_args') == ['a', 1, True] assert mindb.get('task_kwargs') == {'c': 6, 'd': 'e', 'f': False} # check task_result object tr = TaskResult.objects.get(task_id=tid2) assert json.loads(tr.task_args) == ['a', 1, True] assert json.loads(tr.task_kwargs) == {'c': 6, 'd': 'e', 'f': False} tid3 = uuid() try: raise KeyError('foo') except KeyError as exception: self.b.mark_as_failure(tid3, exception) assert self.b.get_status(tid3) == states.FAILURE assert isinstance(self.b.get_result(tid3), KeyError) def test_backend__json_serialization__str_result__protocol_1(self): self.app.conf.result_serializer = 'json' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, True], kwargs={'c': 6, 'd': 'e', 'f': False}, task_protocol=1, ) result = 'foo' self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result') == 'foo' assert mindb.get('task_name') == 'my_task' assert mindb.get('task_args') == ['a', 1, True] assert mindb.get('task_kwargs') == {'c': 6, 'd': 'e', 'f': False} # check task_result object tr = TaskResult.objects.get(task_id=tid2) assert json.loads(tr.task_args) == ['a', 1, True] assert json.loads(tr.task_kwargs) == {'c': 6, 'd': 'e', 'f': False} def test_backend__task_result_meta_injection(self): self.app.conf.result_serializer = 'json' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=[], kwargs={}, task_protocol=1, ) result = None # inject request meta arbitrary data request.meta = { 'key': 'value' } self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check task meta assert mindb.get('result') is None assert mindb.get('task_name') == 'my_task' # check task_result object tr = TaskResult.objects.get(task_id=tid2) assert json.loads(tr.meta) == {'key': 'value', 'children': []} def xxx_backend(self): tid = uuid() assert self.b.get_status(tid) == states.PENDING assert self.b.get_result(tid) is None self.b.mark_as_done(tid, 42) assert self.b.get_status(tid) == states.SUCCESS assert self.b.get_result(tid) == 42 tid2 = uuid() try: raise KeyError('foo') except KeyError as exception: self.b.mark_as_failure(tid2, exception) assert self.b.get_status(tid2) == states.FAILURE assert isinstance(self.b.get_result(tid2), KeyError) def test_forget(self): tid = uuid() self.b.mark_as_done(tid, {'foo': 'bar'}) x = self.app.AsyncResult(tid) assert x.result.get('foo') == 'bar' x.forget() if celery.VERSION[0:3] == (3, 1, 10): # bug in 3.1.10 means result did not clear cache after forget. x._cache = None assert x.result is None def test_secrets__pickle_serialization(self): self.app.conf.result_serializer = 'pickle' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid = uuid() request = self._create_request( task_id=tid, name='my_task', args=['a', 1, 'password'], kwargs={'c': 3, 'd': 'e', 'password': 'password'}, argsrepr='argsrepr', kwargsrepr='kwargsrepr', ) result = {'foo': 'baz'} self.b.mark_as_done(tid, result, request=request) mindb = self.b.get_task_meta(tid) # check task meta assert mindb.get('result') == {'foo': 'baz'} assert mindb.get('task_args') == 'argsrepr' assert mindb.get('task_kwargs') == 'kwargsrepr' assert len(mindb.get('worker')) > 1 # check task_result object tr = TaskResult.objects.get(task_id=tid) task_args = pickle.loads(b64decode(tr.task_args)) task_kwargs = pickle.loads(b64decode(tr.task_kwargs)) assert task_args == 'argsrepr' assert task_kwargs == 'kwargsrepr' # check async_result ar = AsyncResult(tid) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') def test_secrets__json_serialization(self): self.app.conf.result_serializer = 'json' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid = uuid() request = self._create_request( task_id=tid, name='my_task', args=['a', 1, True], kwargs={'c': 6, 'd': 'e', 'f': False}, argsrepr='argsrepr', kwargsrepr='kwargsrepr', ) result = {'foo': 'baz'} self.b.mark_as_done(tid, result, request=request) mindb = self.b.get_task_meta(tid) # check task meta assert mindb.get('result') == {'foo': 'baz'} assert mindb.get('task_args') == 'argsrepr' assert mindb.get('task_kwargs') == 'kwargsrepr' # check task_result object tr = TaskResult.objects.get(task_id=tid) assert json.loads(tr.task_args) == 'argsrepr' assert json.loads(tr.task_kwargs) == 'kwargsrepr' # check async_result ar = AsyncResult(tid) assert ar.args == mindb.get('task_args') assert ar.kwargs == mindb.get('task_kwargs') def test_secrets__pickle_serialization__protocol_1(self): self.app.conf.result_serializer = 'pickle' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid = uuid() request = self._create_request( task_id=tid, name='my_task', args=['a', 1, SomeClass(67)], kwargs={'c': 6, 'd': 'e', 'f': SomeClass(89)}, argsrepr='argsrepr', kwargsrepr='kwargsrepr', task_protocol=1, ) result = {'foo': 'baz'} self.b.mark_as_done(tid, result, request=request) mindb = self.b.get_task_meta(tid) assert mindb.get('result') == {'foo': 'baz'} assert mindb.get('task_args')[0] == 'a' assert mindb.get('task_args')[1] == 1 assert mindb.get('task_args')[2].data == 67 assert mindb.get('task_kwargs')['c'] == 6 assert mindb.get('task_kwargs')['d'] == 'e' assert mindb.get('task_kwargs')['f'].data == 89 tr = TaskResult.objects.get(task_id=tid) task_args = pickle.loads(b64decode(tr.task_args)) assert task_args[0] == 'a' assert task_args[1] == 1 assert task_args[2].data == 67 task_kwargs = pickle.loads(b64decode(tr.task_kwargs)) assert task_kwargs['c'] == 6 assert task_kwargs['d'] == 'e' assert task_kwargs['f'].data == 89 def test_secrets__json_serialization__protocol_1(self): self.app.conf.result_serializer = 'json' self.app.conf.accept_content = {'pickle', 'json'} self.b = DatabaseBackend(app=self.app) tid = uuid() request = self._create_request( task_id=tid, name='my_task', args=['a', 1, True], kwargs={'c': 6, 'd': 'e', 'f': False}, argsrepr='argsrepr', kwargsrepr='kwargsrepr', task_protocol=1, ) result = {'foo': 'baz'} self.b.mark_as_done(tid, result, request=request) mindb = self.b.get_task_meta(tid) assert mindb.get('result') == {'foo': 'baz'} assert mindb.get('task_name') == 'my_task' assert mindb.get('task_args') == ['a', 1, True] assert mindb.get('task_kwargs') == {'c': 6, 'd': 'e', 'f': False} tr = TaskResult.objects.get(task_id=tid) assert json.loads(tr.task_args) == ['a', 1, True] assert json.loads(tr.task_kwargs) == {'c': 6, 'd': 'e', 'f': False} def test_apply_chord_header_result_arg(self): """Test if apply_chord can handle Celery <= 5.1 call signature""" gid = uuid() tid1 = uuid() tid2 = uuid() subtasks = [AsyncResult(tid1), AsyncResult(tid2)] group = GroupResult(id=gid, results=subtasks) # Celery < 5.1 self.b.apply_chord(group, self.add.s()) # Celery 5.1 self.b.apply_chord((uuid(), subtasks), self.add.s()) def test_on_chord_part_return(self): """Test if the ChordCounter is properly decremented and the callback is triggered after all chord parts have returned""" gid = uuid() tid1 = uuid() tid2 = uuid() subtasks = [AsyncResult(tid1), AsyncResult(tid2)] group = GroupResult(id=gid, results=subtasks) self.b.apply_chord(group, self.add.s()) chord_counter = ChordCounter.objects.get(group_id=gid) assert chord_counter.count == 2 request = mock.MagicMock() request.id = subtasks[0].id request.group = gid request.task = "my_task" request.args = ["a", 1, "password"] request.kwargs = {"c": 3, "d": "e", "password": "password"} request.argsrepr = "argsrepr" request.kwargsrepr = "kwargsrepr" request.hostname = "celery@ip-0-0-0-0" request.properties = {"periodic_task_name": "my_periodic_task"} request.ignore_result = False result = {"foo": "baz"} self.b.mark_as_done(tid1, result, request=request) chord_counter.refresh_from_db() assert chord_counter.count == 1 self.b.mark_as_done(tid2, result, request=request) with pytest.raises(ChordCounter.DoesNotExist): ChordCounter.objects.get(group_id=gid) request.chord.delay.assert_called_once() def test_on_chord_part_return_counter_not_found(self): """Test if the chord does not raise an error if the ChordCounter is not found Basically this covers the case where a chord was created with a version <2.0.0 and the update was done before the chord was finished """ request = mock.MagicMock() request.id = uuid() request.group = uuid() self.b.on_chord_part_return(request=request, state=None, result=None) def test_callback_failure(self): """Test if a failure in the chord callback is properly handled""" gid = uuid() tid1 = uuid() tid2 = uuid() cid = uuid() subtasks = [AsyncResult(tid1), AsyncResult(tid2)] group = GroupResult(id=gid, results=subtasks) self.b.apply_chord(group, self.add.s()) chord_counter = ChordCounter.objects.get(group_id=gid) assert chord_counter.count == 2 request = mock.MagicMock() request.id = subtasks[0].id request.group = gid request.task = "my_task" request.args = ["a", 1, "password"] request.kwargs = {"c": 3, "d": "e", "password": "password"} request.argsrepr = "argsrepr" request.kwargsrepr = "kwargsrepr" request.hostname = "celery@ip-0-0-0-0" request.properties = {"periodic_task_name": "my_periodic_task"} request.ignore_result = False request.chord.id = cid result = {"foo": "baz"} # Trigger an exception when the callback is triggered request.chord.delay.side_effect = ValueError() self.b.mark_as_done(tid1, result, request=request) chord_counter.refresh_from_db() assert chord_counter.count == 1 self.b.mark_as_done(tid2, result, request=request) with pytest.raises(ChordCounter.DoesNotExist): ChordCounter.objects.get(group_id=gid) request.chord.delay.assert_called_once() assert TaskResult.objects.get(task_id=cid).status == states.FAILURE def test_on_chord_part_return_failure(self): """Test if a failure in one of the chord header tasks is properly handled and the callback was not triggered """ gid = uuid() tid1 = uuid() tid2 = uuid() cid = uuid() subtasks = [AsyncResult(tid1), AsyncResult(tid2)] group = GroupResult(id=gid, results=subtasks) self.b.apply_chord(group, self.add.s()) chord_counter = ChordCounter.objects.get(group_id=gid) assert chord_counter.count == 2 request = mock.MagicMock() request.id = tid1 request.group = gid request.task = "my_task" request.args = ["a", 1, "password"] request.kwargs = {"c": 3, "d": "e", "password": "password"} request.argsrepr = "argsrepr" request.kwargsrepr = "kwargsrepr" request.hostname = "celery@ip-0-0-0-0" request.properties = {"periodic_task_name": "my_periodic_task"} request.chord.id = cid result = {"foo": "baz"} self.b.mark_as_done(tid1, result, request=request) chord_counter.refresh_from_db() assert chord_counter.count == 1 request.id = tid2 self.b.mark_as_failure(tid2, ValueError(), request=request) with pytest.raises(ChordCounter.DoesNotExist): ChordCounter.objects.get(group_id=gid) request.chord.delay.assert_not_called() def test_groupresult_save_restore(self): """Test if we can save and restore a GroupResult""" group_id = uuid() results = [AsyncResult(id=uuid())] group = GroupResult(id=group_id, results=results) group.save(backend=self.b) restored_group = self.b.restore_group(group_id=group_id) assert restored_group == group def test_groupresult_save_restore_nested(self): """Test if we can save and restore a nested GroupResult""" group_id = uuid() async_result = AsyncResult(id=uuid()) nested_results = [AsyncResult(id=uuid()), AsyncResult(id=uuid())] nested_group = GroupResult(id=uuid(), results=nested_results) group = GroupResult(id=group_id, results=[nested_group, async_result]) group.save(backend=self.b) restored_group = self.b.restore_group(group_id=group_id) assert restored_group == group def test_backend_result_extended_is_false(self): self.app.conf.result_extended = False self.b = DatabaseBackend(app=self.app) tid2 = uuid() request = self._create_request( task_id=tid2, name='my_task', args=['a', 1, True], kwargs={'c': 6, 'd': 'e', 'f': False}, ) result = 'foo' self.b.mark_as_done(tid2, result, request=request) mindb = self.b.get_task_meta(tid2) # check meta data assert mindb.get('result') == 'foo' assert mindb.get('task_name') is None assert mindb.get('task_args') is None assert mindb.get('task_kwargs') is None # check task_result object tr = TaskResult.objects.get(task_id=tid2) assert tr.task_args is None assert tr.task_kwargs is None django-celery-results-2.5.1/t/unit/test_migrations.py000066400000000000000000000026231442620237100227540ustar00rootroot00000000000000import os from django.core.management import call_command from django.test import TestCase, override_settings from django_celery_results import migrations as result_migrations class MigrationTests(TestCase): def test_no_duplicate_migration_numbers(self): """Verify no duplicate migration numbers. Migration files with the same number can cause issues with backward migrations, so avoid them. """ path = os.path.dirname(result_migrations.__file__) files = [f[:4] for f in os.listdir(path) if f.endswith('.py')] self.assertEqual( len(files), len(set(files)), msg='Detected migration files with the same migration number') def test_models_match_migrations(self): """Make sure that no pending migrations exist for the app. Here just detect if model changes exist that require a migration, and if so we fail. """ call_command( "makemigrations", "django_celery_results", "--check", "--dry-run" ) @override_settings(DEFAULT_AUTO_FIELD='django.db.models.BigAutoField') def test_models_match_migrations_with_changed_default_auto_field(self): """Test with changing default_auto_field. This logic make sure that no pending migrations created even if the user changes the `DEFAULT_AUTO_FIELD`. """ self.test_models_match_migrations() django-celery-results-2.5.1/t/unit/test_models.py000066400000000000000000000214211442620237100220600ustar00rootroot00000000000000from datetime import datetime, timedelta from unittest.mock import patch import pytest from celery import states, uuid from django.db import transaction from django.db.utils import InterfaceError from django.test import TransactionTestCase from django_celery_results.backends import DatabaseBackend from django_celery_results.models import GroupResult, TaskResult from django_celery_results.utils import now @pytest.mark.usefixtures('depends_on_current_app') class test_Models(TransactionTestCase): databases = '__all__' @pytest.fixture(autouse=True) def setup_app(self, app): self.app = app self.app.conf.result_serializer = 'pickle' self.app.conf.result_backend = ( 'django_celery_results.backends:DatabaseBackend') def create_task_result(self): id = uuid() taskmeta, created = TaskResult.objects.get_or_create(task_id=id) return taskmeta def test_taskmeta(self, ctype='application/json', cenc='utf-8'): m1 = self.create_task_result() m2 = self.create_task_result() m3 = self.create_task_result() assert str(m1).startswith('