pax_global_header00006660000000000000000000000064146065120720014515gustar00rootroot0000000000000052 comment=8949ab086bc8c41bd6a64e53d9b5653d85a90928 cachelib-0.13.0/000077500000000000000000000000001460651207200133305ustar00rootroot00000000000000cachelib-0.13.0/.editorconfig000066400000000000000000000003311460651207200160020ustar00rootroot00000000000000root = true [*] indent_style = space indent_size = 4 insert_final_newline = true trim_trailing_whitespace = true end_of_line = lf charset = utf-8 max_line_length = 88 [*.{yml,yaml,json,js,css,html}] indent_size = 2 cachelib-0.13.0/.github/000077500000000000000000000000001460651207200146705ustar00rootroot00000000000000cachelib-0.13.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001460651207200170535ustar00rootroot00000000000000cachelib-0.13.0/.github/ISSUE_TEMPLATE/bug-report.md000066400000000000000000000011531460651207200214630ustar00rootroot00000000000000--- name: Bug report about: Report a bug in CacheLib (not other projects which depend on CacheLib) --- Environment: - Python version: - CacheLib version: cachelib-0.13.0/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000005271460651207200210470ustar00rootroot00000000000000blank_issues_enabled: false contact_links: - name: Questions url: https://stackoverflow.com/search?tab=relevance&q=cachelib about: Search for and ask questions about your code on Stack Overflow. - name: Questions and discussions url: https://discord.gg/pallets about: Discuss questions about your code on our Discord chat. cachelib-0.13.0/.github/ISSUE_TEMPLATE/feature-request.md000066400000000000000000000006461460651207200225240ustar00rootroot00000000000000--- name: Feature request about: Suggest a new feature for CacheLib --- cachelib-0.13.0/.github/dependabot.yml000066400000000000000000000002351460651207200175200ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/requirements" schedule: interval: monthly time: "08:00" open-pull-requests-limit: 99 cachelib-0.13.0/.github/pull_request_template.md000066400000000000000000000017131460651207200216330ustar00rootroot00000000000000 - fixes # Checklist: - [ ] Add tests that demonstrate the correct behavior of the change. Tests should fail without the change. - [ ] Add or update relevant docs, in the docs folder and in code. - [ ] Add an entry in `CHANGES.rst` summarizing the change and linking to the issue. - [ ] Add `.. versionchanged::` entries in any relevant code docs. - [ ] Run `pre-commit` hooks and fix any issues. - [ ] Run `pytest` and `tox`, no tests failed. cachelib-0.13.0/.github/workflows/000077500000000000000000000000001460651207200167255ustar00rootroot00000000000000cachelib-0.13.0/.github/workflows/tests.yaml000066400000000000000000000041601460651207200207540ustar00rootroot00000000000000name: Tests on: push: branches: - main - '*.x' paths-ignore: - 'docs/**' - '*.md' - '*.rst' pull_request: branches: - main - '*.x' paths-ignore: - 'docs/**' - '*.md' - '*.rst' jobs: tests: name: ${{ matrix.name }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: include: - {name: Linux, python: '3.11', os: ubuntu-latest, tox: py311} - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} - {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39} - {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310} - {name: '3.11', python: '3.11', os: ubuntu-latest, tox: py311} - {name: Typing, python: '3.12', os: ubuntu-latest, tox: typing} steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - name: install external dependencies Linux if: matrix.os == 'ubuntu-latest' run: | sudo apt-get update sudo apt-get install libmemcached-dev memcached redis-server - name: setup dynamodb-local uses: rrainn/dynamodb-action@v2.0.1 - name: Start MongoDB uses: wbari/start-mongoDB@v0.2 with: mongoDBVersion: 'latest' - name: update pip run: | pip install -U wheel pip install -U setuptools python -m pip install -U pip - name: get pip cache dir id: pip-cache run: echo "::set-output name=dir::$(pip cache dir)" - name: cache pip uses: actions/cache@v2 with: path: ${{ steps.pip-cache.outputs.dir }} key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }}|${{ hashFiles('requirements/*.txt') }} - name: cache mypy if: matrix.tox == 'typing' uses: actions/cache@v2 with: path: ./.mypy_cache key: mypy|${{ matrix.python }}|${{ hashFiles('setup.cfg') }} - run: pip install tox - run: tox -e ${{ matrix.tox }} cachelib-0.13.0/.gitignore000066400000000000000000000005771460651207200153310ustar00rootroot00000000000000# general things to ignore build/ dist/ *.egg-info/ *.egg *.eggs *.py[cod] __pycache__/ *.so *~ venv/ env/ .DS_Store *.swp docs/_build # due to using t/nox and pytest .tox .cache .pytest_cache .coverage htmlcov/ .xprocess .vscode .python-version /.idea/.gitignore /.idea/cachelib.iml /.idea/misc.xml /.idea/modules.xml /.idea/inspectionProfiles/profiles_settings.xml /.idea/vcs.xml cachelib-0.13.0/.pre-commit-config.yaml000066400000000000000000000014331460651207200176120ustar00rootroot00000000000000ci: autoupdate_schedule: monthly repos: - repo: https://github.com/asottile/pyupgrade rev: v3.15.2 hooks: - id: pyupgrade args: ["--py37-plus"] - repo: https://github.com/asottile/reorder-python-imports rev: v3.12.0 hooks: - id: reorder-python-imports args: ["--application-directories", "src"] - repo: https://github.com/psf/black rev: 24.3.0 hooks: - id: black - repo: https://github.com/PyCQA/flake8 rev: 7.0.0 hooks: - id: flake8 additional_dependencies: - flake8-bugbear - flake8-implicit-str-concat - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: fix-byte-order-marker - id: trailing-whitespace - id: end-of-file-fixer cachelib-0.13.0/.readthedocs.yaml000066400000000000000000000003211460651207200165530ustar00rootroot00000000000000version: 2 build: os: ubuntu-22.04 tools: python: "3.10" python: install: - requirements: requirements/docs.txt - method: pip path: . sphinx: builder: dirhtml fail_on_warning: true cachelib-0.13.0/CHANGES.rst000066400000000000000000000100321460651207200151260ustar00rootroot00000000000000Version 0.13.0 -------------- Released 2024-04-13 - default ``hashlib.md5`` may not be available in FIPS builds. We now do not access it at import time on ``FileSystemCache``so developers have time to change the default. ``hashlib.md5`` will be lazy loaded when a new default is not provided Version 0.12.0 -------------- Released 2024-02-11 - ``RedisCache`` now supports callables as keys - Added ``MongoDB`` as a cache backend Version 0.11.0 -------------- Released 2024-02-10 - Drop python 3.7 support - Add python 3.11 support Version 0.10.2 -------------- Released 2023-01-31 - Fix broken release Version 0.10.1 -------------- Released 2023-01-22 - Fix logging pollution due to ``DynamoDB`` logging handler Version 0.10.0 -------------- Released 2023-01-22 - Improve error message when ``FileSystemCache`` methods are called with non-str keys. :pr:`170` - Added ``DynamoDb`` as a cache backend :pr:`209` Version 0.9.0 ------------- Released 2022-06-26 - Add separate internal read/write clients to ``RedisCache`` to improve compatibility with flask-caching. :pr:`159` - Fix bug where cache entries would expire immediately when ``RedisCache.add`` was called without timeout. :pr:`157` - Improve ``FileSystemCache.set`` compatibility with Windows systems. :pr:`158` Version 0.8.0 ------------- Released 2022-06-13 - Remove deprecated ``RedisCache.load_object`` and ``RedisCache.dump_object``. :pr:`147` Version 0.7.0 ------------- Released 2022-05-14 - ``FileSystemCache`` now stores universal expiration timestamps using python's ``struct`` module. :pr:`126` - Drop support for Python 3.6. :pr:`134` Version 0.6.0 ------------- Released 2022-01-18 - A custom ``hash_method`` may now be provided to ``FileSystemCache`` for hashing keys. :pr:`107` - Fix ``PermissionError`` issue with ``FileSystemCache`` on Windows. :pr:`111` Version 0.5.0 ------------- Released 2021-12-31 - Cache types now have configurable serializers. :pr:`63` Version 0.4.1 ------------- Released 2021-10-04 - Fix break in ``RedisCache`` when a host object was passed in ``RedisCache.host`` instead of a string. :pr:`82` Version 0.4.0 ------------- Released 2021-10-03 - All cache types now implement ``BaseCache`` interface both in behavior and method return types. Thus, code written for one cache type should work with any other cache type. :pr:`71` - Add type information for static typing tools. :pr:`48` - ``FileNotFound`` exceptions will not be logged anymore in ``FileSystemCache`` methods in order to avoid polluting application log files. :pr:`69` Version 0.3.0 ------------- Released 2021-08-12 - Optimize ``FileSystemCache`` pruning. :pr:`52` - Fix a bug in ``FileSystemCache`` where entries would not be removed when the total was over the threshold, and the entry count would be lost. :pr:`52` - ``FileSystemCache`` logs system-related exceptions. :pr:`51` - Removal of expired entries in ``FileSystemCache`` is only triggered if the number of entries is over the ``threshhold`` when calling ``set``. ``get`` ``has`` still return ``None`` and ``False`` respectively for expired entries, but will not remove the files. All removals happen at pruning time or explicitly with ``clear`` and ``delete``. :pr:`53` Version 0.2.0 ------------- Released 2021-06-25 - Support for Python 2 has been dropped. Only Python 3.6 and above are supported. - Fix ``FileSystemCache.set`` incorrectly considering value overrides on existing keys as new cache entries. :issue:`18` - ``SimpleCache`` and ``FileSystemCache`` first remove expired entries, followed by older entries, when cleaning up. :pr:`26` - Fix problem where file count was not being updated in ``FileSystemCache.get`` and ``FileSystemCache.has`` after removals. :issue:`20` - When attempting to access non-existent entries with ``Memcached``, these will now be initialized with a given value ``delta``. :pr:`31` Version 0.1.1 ------------- Released 2020-06-20 - Fix ``FileSystemCache`` on Windows. cachelib-0.13.0/CODE_OF_CONDUCT.md000066400000000000000000000064361460651207200161400ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at report@palletsprojects.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq cachelib-0.13.0/CONTRIBUTING.rst000066400000000000000000000147501460651207200160000ustar00rootroot00000000000000How to contribute to CacheLib ============================= Thank you for considering contributing to CacheLib! Support questions ----------------- Please don't use the issue tracker for this. The issue tracker is a tool to address bugs and feature requests in CacheLib itself. Use one of the following resources for questions about using CacheLib or issues with your own code: - The ``#get-help`` channel on our Discord chat: https://discord.gg/pallets - Ask on `Stack Overflow`_. Search with Google first using: ``site:stackoverflow.com cachelib {search term, exception message, etc.}`` .. _Stack Overflow: https://stackoverflow.com/search?tab=relevance&q=cachelib Reporting issues ---------------- Include the following information in your post: - Describe what you expected to happen. - If possible, include a `minimal reproducible example`_ to help us identify the issue. This also helps check that the issue is not with your own code. - Describe what actually happened. Include the full traceback if there was an exception. - List your Python and CacheLib versions. If possible, check if this issue is already fixed in the latest releases or the latest code in the repository. .. _minimal reproducible example: https://stackoverflow.com/help/minimal-reproducible-example Submitting patches ------------------ If there is not an open issue for what you want to submit, prefer opening one for discussion before working on a PR. You can work on any issue that doesn't have an open PR linked to it or a maintainer assigned to it. These show up in the sidebar. No need to ask if you can work on an issue that interests you. Include the following in your patch: - Use `Black`_ to format your code. This and other tools will run automatically if you install `pre-commit`_ using the instructions below. - Include tests if your patch adds or changes code. Make sure the test fails without your patch. - Update any relevant docs pages and docstrings. Docs pages and docstrings should be wrapped at 72 characters. - Add an entry in ``CHANGES.rst``. Use the same style as other entries. Also include ``.. versionchanged::`` inline changelogs in relevant docstrings. .. _Black: https://black.readthedocs.io .. _pre-commit: https://pre-commit.com First time setup ~~~~~~~~~~~~~~~~ - Download and install the `latest version of git`_. - Configure git with your `username`_ and `email`_. .. code-block:: text $ git config --global user.name 'your name' $ git config --global user.email 'your email' - Make sure you have a `GitHub account`_. - Fork CacheLib to your GitHub account by clicking the `Fork`_ button. - `Clone`_ the main repository locally. .. code-block:: text $ git clone https://github.com/pallets/cachelib $ cd cachelib - Add your fork as a remote to push your work to. Replace ``{username}`` with your username. This names the remote "fork", the default Pallets remote is "origin". .. code-block:: text git remote add fork https://github.com/{username}/cachelib - Create a virtualenv. .. tabs:: .. group-tab:: Linux/macOS .. code-block:: text $ python3 -m venv env $ . env/bin/activate .. group-tab:: Windows .. code-block:: text > py -3 -m venv env > env\Scripts\activate - Upgrade pip and setuptools. .. code-block:: text $ python -m pip install --upgrade pip setuptools - Install the development dependencies, then install CacheLib in editable mode. .. code-block:: text $ pip install -r requirements/dev.txt && pip install -e . - Install the pre-commit hooks. .. code-block:: text $ pre-commit install .. _latest version of git: https://git-scm.com/downloads .. _username: https://docs.github.com/en/github/using-git/setting-your-username-in-git .. _email: https://docs.github.com/en/github/setting-up-and-managing-your-github-user-account/setting-your-commit-email-address .. _GitHub account: https://github.com/join .. _Fork: https://github.com/pallets/cachelib/fork .. _Clone: https://docs.github.com/en/github/getting-started-with-github/fork-a-repo#step-2-create-a-local-clone-of-your-fork Start coding ~~~~~~~~~~~~ - Create a branch to identify the issue you would like to work on. If you're submitting a bug or documentation fix, branch off of the latest ".x" branch. .. code-block:: text $ git fetch origin $ git checkout -b your-branch-name origin/main If you're submitting a feature addition or change, branch off of the "main" branch. .. code-block:: text $ git fetch origin $ git checkout -b your-branch-name origin/main - Using your favorite editor, make your changes, `committing as you go`_. - Include tests that cover any code changes you make. Make sure the test fails without your patch. Run the tests as described below. - Push your commits to your fork on GitHub and `create a pull request`_. Link to the issue being addressed with ``fixes #123`` in the pull request. .. code-block:: text $ git push --set-upstream fork your-branch-name .. _committing as you go: https://dont-be-afraid-to-commit.readthedocs.io/en/latest/git/commandlinegit.html#commit-your-changes .. _create a pull request: https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request Running the tests ~~~~~~~~~~~~~~~~~ Run the basic test suite with pytest. .. code-block:: text $ pytest This runs the tests for the current environment, which is usually sufficient. CI will run the full suite when you submit your pull request. You can run the full test suite with tox if you don't want to wait. .. code-block:: text $ tox Running test coverage ~~~~~~~~~~~~~~~~~~~~~ Generating a report of lines that do not have test coverage can indicate where to start contributing. Run ``pytest`` using ``coverage`` and generate a report. .. code-block:: text $ pip install coverage $ coverage run -m pytest $ coverage html Open ``htmlcov/index.html`` in your browser to explore the report. Read more about `coverage `__. Building the docs ~~~~~~~~~~~~~~~~~ Build the docs in the ``docs`` directory using Sphinx. .. code-block:: text $ cd docs $ make html Open ``_build/html/index.html`` in your browser to view the docs. Read more about `Sphinx `__. cachelib-0.13.0/LICENSE.rst000066400000000000000000000027031460651207200151460ustar00rootroot00000000000000Copyright 2018 Pallets Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. cachelib-0.13.0/MANIFEST.in000066400000000000000000000002331460651207200150640ustar00rootroot00000000000000include CHANGES.rst include tox.ini include requirements/*.txt graft docs prune docs/_build graft tests include src/cachelib/py.typed global-exclude *.pyc cachelib-0.13.0/README.rst000066400000000000000000000016641460651207200150260ustar00rootroot00000000000000CacheLib ======== A collection of cache libraries in the same API interface. Extracted from Werkzeug. Installing ---------- Install and update using `pip`_: .. code-block:: text $ pip install -U cachelib .. _pip: https://pip.pypa.io/en/stable/getting-started/ Donate ------ The Pallets organization develops and supports Flask and the libraries it uses. In order to grow the community of contributors and users, and allow the maintainers to devote more time to the projects, `please donate today`_. .. _please donate today: https://palletsprojects.com/donate Links ----- - Documentation: https://cachelib.readthedocs.io/ - Changes: https://cachelib.readthedocs.io/changes/ - PyPI Releases: https://pypi.org/project/cachelib/ - Source Code: https://github.com/pallets/cachelib/ - Issue Tracker: https://github.com/pallets/cachelib/issues/ - Twitter: https://twitter.com/PalletsTeam - Chat: https://discord.gg/pallets cachelib-0.13.0/docs/000077500000000000000000000000001460651207200142605ustar00rootroot00000000000000cachelib-0.13.0/docs/Makefile000066400000000000000000000011721460651207200157210ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) cachelib-0.13.0/docs/base.rst000066400000000000000000000001471460651207200157260ustar00rootroot00000000000000Base API ======== .. automodule:: cachelib.base :members: :undoc-members: :show-inheritance: cachelib-0.13.0/docs/changes.rst000066400000000000000000000000551460651207200164220ustar00rootroot00000000000000Changes ======= .. include:: ../CHANGES.rst cachelib-0.13.0/docs/conf.py000066400000000000000000000032061460651207200155600ustar00rootroot00000000000000from pallets_sphinx_themes import get_version from pallets_sphinx_themes import ProjectLink # Project -------------------------------------------------------------- project = "CacheLib" copyright = "2018 Pallets" author = "Pallets" release, version = get_version("cachelib") # General -------------------------------------------------------------- extensions = [ "sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinxcontrib.log_cabinet", "pallets_sphinx_themes", "sphinx_issues", "sphinx_tabs.tabs", ] autodoc_typehints = "description" intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), } issues_github_path = "pallets/cachelib" # HTML ----------------------------------------------------------------- html_theme = "werkzeug" html_theme_options = {"index_sidebar_logo": False} html_context = { "project_links": [ ProjectLink("Donate", "https://palletsprojects.com/donate"), ProjectLink("PyPI Releases", "https://pypi.org/project/cachelib/"), ProjectLink("Source Code", "https://github.com/pallets/cachelib/"), ProjectLink("Issue Tracker", "https://github.com/pallets/cachelib/issues/"), ProjectLink("Twitter", "https://twitter.com/PalletsTeam"), ProjectLink("Chat", "https://discord.gg/pallets"), ] } html_sidebars = { "index": ["project.html", "localtoc.html", "searchbox.html", "ethicalads.html"], "**": ["localtoc.html", "relations.html", "searchbox.html", "ethicalads.html"], } singlehtml_sidebars = {"index": ["project.html", "localtoc.html", "ethicalads.html"]} html_title = f"{project} Documentation ({version})" html_show_sourcelink = False cachelib-0.13.0/docs/dynamodb.rst000066400000000000000000000001731460651207200166100ustar00rootroot00000000000000DynamoDb Backend ================ .. automodule:: cachelib.dynamodb :members: :undoc-members: :show-inheritance: cachelib-0.13.0/docs/file.rst000066400000000000000000000001571460651207200157340ustar00rootroot00000000000000File Backend ============ .. automodule:: cachelib.file :members: :undoc-members: :show-inheritance: cachelib-0.13.0/docs/index.rst000066400000000000000000000004271460651207200161240ustar00rootroot00000000000000CacheLib ======== A collection of cache libraries in the same API interface. Extracted from Werkzeug. .. toctree:: :maxdepth: 2 base simple file redis memcached uwsgi dynamodb mongodb .. toctree:: :maxdepth: 2 license changes cachelib-0.13.0/docs/license.rst000066400000000000000000000001071460651207200164320ustar00rootroot00000000000000BSD-3-Clause License ==================== .. include:: ../LICENSE.rst cachelib-0.13.0/docs/make.bat000066400000000000000000000014331460651207200156660ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd cachelib-0.13.0/docs/memcached.rst000066400000000000000000000001761460651207200167240ustar00rootroot00000000000000Memcached Backend ================= .. automodule:: cachelib.memcached :members: :undoc-members: :show-inheritance: cachelib-0.13.0/docs/mongodb.rst000066400000000000000000000001701460651207200164350ustar00rootroot00000000000000MongoDb Backend =============== .. automodule:: cachelib.mongodb :members: :undoc-members: :show-inheritance: cachelib-0.13.0/docs/redis.rst000066400000000000000000000001621460651207200161170ustar00rootroot00000000000000Redis Backend ============= .. automodule:: cachelib.redis :members: :undoc-members: :show-inheritance: cachelib-0.13.0/docs/simple.rst000066400000000000000000000002031460651207200162760ustar00rootroot00000000000000Simple Memory Backend ===================== .. automodule:: cachelib.simple :members: :undoc-members: :show-inheritance: cachelib-0.13.0/docs/uwsgi.rst000066400000000000000000000001621460651207200161470ustar00rootroot00000000000000uWSGI Backend ============= .. automodule:: cachelib.uwsgi :members: :undoc-members: :show-inheritance: cachelib-0.13.0/requirements.txt000066400000000000000000000002641460651207200166160ustar00rootroot00000000000000# To ensure app dependencies are ported from your virtual environment/host machine into your container, run 'pip freeze > requirements.txt' in the terminal to overwrite this file cachelib-0.13.0/requirements/000077500000000000000000000000001460651207200160535ustar00rootroot00000000000000cachelib-0.13.0/requirements/dev.in000066400000000000000000000000751460651207200171630ustar00rootroot00000000000000-r tests.in -r docs.in -r typing.in pip-tools pre-commit tox cachelib-0.13.0/requirements/dev.txt000066400000000000000000000063741460651207200174040ustar00rootroot00000000000000# # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile requirements/dev.in # alabaster==0.7.16 # via sphinx babel==2.14.0 # via sphinx boto3==1.34.39 # via -r tests.in botocore==1.34.39 # via # boto3 # s3transfer build==1.0.3 # via pip-tools cachetools==5.3.2 # via tox certifi==2024.2.2 # via requests cffi==1.16.0 # via cryptography cfgv==3.4.0 # via pre-commit chardet==5.2.0 # via tox charset-normalizer==3.3.2 # via requests click==8.1.7 # via pip-tools colorama==0.4.6 # via tox cryptography==42.0.4 # via # types-pyopenssl # types-redis distlib==0.3.8 # via virtualenv dnspython==2.5.0 # via pymongo docutils==0.20.1 # via # sphinx # sphinx-tabs filelock==3.13.1 # via # tox # virtualenv identify==2.5.34 # via pre-commit idna==3.7 # via requests imagesize==1.4.1 # via sphinx iniconfig==2.0.0 # via pytest jinja2==3.1.3 # via sphinx jmespath==1.0.1 # via # boto3 # botocore markupsafe==2.1.5 # via jinja2 mypy==1.9.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy nodeenv==1.8.0 # via pre-commit packaging==23.2 # via # build # pallets-sphinx-themes # pyproject-api # pytest # sphinx # tox pallets-sphinx-themes==2.1.1 # via -r docs.in pip-tools==7.4.1 # via -r dev.in platformdirs==4.2.0 # via # tox # virtualenv pluggy==1.4.0 # via # pytest # tox pre-commit==3.7.0 # via -r dev.in psutil==5.9.8 # via pytest-xprocess pycparser==2.21 # via cffi pygments==2.17.2 # via # sphinx # sphinx-tabs pylibmc==1.6.3 # via -r tests.in pymongo==4.6.1 # via -r tests.in pyproject-api==1.6.1 # via tox pyproject-hooks==1.0.0 # via # build # pip-tools pytest==8.0.0 # via # -r tests.in # pytest-xprocess pytest-xprocess==0.23.0 # via -r tests.in python-dateutil==2.8.2 # via botocore pyyaml==6.0.1 # via pre-commit redis==5.0.1 # via -r tests.in requests==2.31.0 # via sphinx s3transfer==0.10.0 # via boto3 six==1.16.0 # via python-dateutil snowballstemmer==2.2.0 # via sphinx sphinx==7.2.6 # via # -r docs.in # pallets-sphinx-themes # sphinx-issues # sphinx-tabs # sphinxcontrib-log-cabinet sphinx-issues==4.0.0 # via -r docs.in sphinx-tabs==3.4.5 # via -r docs.in sphinxcontrib-applehelp==1.0.8 # via sphinx sphinxcontrib-devhelp==1.0.6 # via sphinx sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-log-cabinet==1.0.1 # via -r docs.in sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx tox==4.12.1 # via -r dev.in types-pyopenssl==24.0.0.20240130 # via types-redis types-redis==4.6.0.20240311 # via -r typing.in typing-extensions==4.9.0 # via mypy urllib3==2.0.7 # via # botocore # requests uwsgi==2.0.24 # via -r tests.in virtualenv==20.25.0 # via # pre-commit # tox wheel==0.42.0 # via pip-tools # The following packages are considered to be unsafe in a requirements file: # pip # setuptools cachelib-0.13.0/requirements/docs.in000066400000000000000000000001211460651207200173250ustar00rootroot00000000000000Pallets-Sphinx-Themes Sphinx sphinx-issues sphinxcontrib-log-cabinet sphinx-tabs cachelib-0.13.0/requirements/docs.txt000066400000000000000000000025721460651207200175520ustar00rootroot00000000000000# # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile requirements/docs.in # alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx certifi==2023.7.22 # via requests charset-normalizer==2.0.7 # via requests docutils==0.18.1 # via # sphinx # sphinx-tabs idna==3.7 # via requests imagesize==1.4.1 # via sphinx jinja2==3.1.3 # via sphinx markupsafe==2.0.1 # via jinja2 packaging==21.2 # via # pallets-sphinx-themes # sphinx pallets-sphinx-themes==2.1.1 # via -r docs.in pygments==2.15.0 # via # sphinx # sphinx-tabs pyparsing==2.4.7 # via packaging pytz==2021.3 # via babel requests==2.31.0 # via sphinx snowballstemmer==2.1.0 # via sphinx sphinx==7.2.6 # via # -r docs.in # pallets-sphinx-themes # sphinx-issues # sphinx-tabs # sphinxcontrib-log-cabinet sphinx-issues==4.0.0 # via -r docs.in sphinx-tabs==3.4.5 # via -r docs.in sphinxcontrib-applehelp==1.0.2 # via sphinx sphinxcontrib-devhelp==1.0.2 # via sphinx sphinxcontrib-htmlhelp==2.0.0 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-log-cabinet==1.0.1 # via -r docs.in sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx urllib3==1.26.18 # via requests cachelib-0.13.0/requirements/tests.in000066400000000000000000000000711460651207200175430ustar00rootroot00000000000000pytest pylibmc redis uwsgi pytest-xprocess boto3 pymongo cachelib-0.13.0/requirements/tests.txt000066400000000000000000000016721460651207200177640ustar00rootroot00000000000000# # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile requirements/tests.in # boto3==1.34.39 # via -r requirements/tests.in botocore==1.34.39 # via # boto3 # s3transfer dnspython==2.5.0 # via pymongo iniconfig==2.0.0 # via pytest jmespath==1.0.1 # via # boto3 # botocore packaging==23.2 # via pytest pluggy==1.4.0 # via pytest psutil==5.9.8 # via pytest-xprocess pylibmc==1.6.3 # via -r requirements/tests.in pymongo==4.6.1 # via -r requirements/tests.in pytest==8.0.0 # via # -r requirements/tests.in # pytest-xprocess pytest-xprocess==0.23.0 # via -r requirements/tests.in python-dateutil==2.8.2 # via botocore redis==5.0.1 # via -r requirements/tests.in s3transfer==0.10.0 # via boto3 six==1.16.0 # via python-dateutil urllib3 # via botocore uwsgi==2.0.24 # via -r requirements/tests.in cachelib-0.13.0/requirements/typing.in000066400000000000000000000000211460651207200177060ustar00rootroot00000000000000mypy types-redis cachelib-0.13.0/requirements/typing.txt000066400000000000000000000007511460651207200201310ustar00rootroot00000000000000# # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # # pip-compile requirements/typing.in # cffi==1.15.1 # via cryptography cryptography==42.0.4 # via # types-pyopenssl # types-redis mypy==1.9.0 # via -r typing.in mypy-extensions==1.0.0 # via mypy pycparser==2.21 # via cffi types-pyopenssl==23.0.0.2 # via types-redis types-redis==4.6.0.20240311 # via -r typing.in typing-extensions==4.9.0 # via mypy cachelib-0.13.0/setup.cfg000066400000000000000000000043521460651207200151550ustar00rootroot00000000000000[metadata] name = cachelib version = attr: cachelib.__version__ url = https://github.com/pallets-eco/cachelib/ project_urls = Donate = https://palletsprojects.com/donate Documentation = https://cachelib.readthedocs.io/ Changes = https://cachelib.readthedocs.io/changes/ Source Code = https://github.com/pallets-eco/cachelib/ Issue Tracker = https://github.com/pallets-eco/cachelib/issues/ Twitter = https://twitter.com/PalletsTeam Chat = https://discord.gg/pallets license = BSD-3-Clause license_files = LICENSE.rst maintainer = Pallets maintainer_email = contact@palletsprojects.com description = A collection of cache libraries in the same API interface. long_description = file: README.rst long_description_content_type = text/x-rst classifiers = Development Status :: 5 - Production/Stable Intended Audience :: Developers License :: OSI Approved :: BSD License Operating System :: OS Independent Programming Language :: Python [options] packages = find: package_dir = = src include_package_data = true python_requires = >= 3.8 [options.packages.find] where = src [tool:pytest] testpaths = tests filterwarnings = error default::DeprecationWarning:cachelib.uwsgi default::DeprecationWarning:cachelib.redis [coverage:run] branch = True source = cachelib tests [coverage:paths] source = src */site-packages [flake8] # B = bugbear # E = pycodestyle errors # F = flake8 pyflakes # W = pycodestyle warnings # B9 = bugbear opinions # ISC = implicit-str-concat select = B, E, F, W, B9, ISC ignore = # slice notation whitespace, invalid E203 # line length, handled by bugbear B950 E501 # bare except, handled by bugbear B001 E722 # bin op line break, invalid W503 # zip without an explicit strict B905 # up to 88 allowed by bugbear B950 max-line-length = 80 [mypy] files = src/cachelib python_version = 3.7 disallow_subclassing_any = True disallow_untyped_calls = True disallow_untyped_defs = True disallow_incomplete_defs = True no_implicit_optional = True local_partial_types = True no_implicit_reexport = True strict_equality = True warn_redundant_casts = True warn_unused_configs = True warn_unused_ignores = True warn_return_any = True warn_unreachable = True cachelib-0.13.0/setup.py000066400000000000000000000000651460651207200150430ustar00rootroot00000000000000from setuptools import setup setup(name="cachelib") cachelib-0.13.0/src/000077500000000000000000000000001460651207200141175ustar00rootroot00000000000000cachelib-0.13.0/src/cachelib/000077500000000000000000000000001460651207200156515ustar00rootroot00000000000000cachelib-0.13.0/src/cachelib/__init__.py000066400000000000000000000010771460651207200177670ustar00rootroot00000000000000from cachelib.base import BaseCache from cachelib.base import NullCache from cachelib.dynamodb import DynamoDbCache from cachelib.file import FileSystemCache from cachelib.memcached import MemcachedCache from cachelib.mongodb import MongoDbCache from cachelib.redis import RedisCache from cachelib.simple import SimpleCache from cachelib.uwsgi import UWSGICache __all__ = [ "BaseCache", "NullCache", "SimpleCache", "FileSystemCache", "MemcachedCache", "RedisCache", "UWSGICache", "DynamoDbCache", "MongoDbCache", ] __version__ = "0.13.0" cachelib-0.13.0/src/cachelib/base.py000066400000000000000000000151131460651207200171360ustar00rootroot00000000000000import typing as _t class BaseCache: """Baseclass for the cache systems. All the cache systems implement this API or a superset of it. :param default_timeout: the default timeout (in seconds) that is used if no timeout is specified on :meth:`set`. A timeout of 0 indicates that the cache never expires. """ def __init__(self, default_timeout: int = 300): self.default_timeout = default_timeout def _normalize_timeout(self, timeout: _t.Optional[int]) -> int: if timeout is None: timeout = self.default_timeout return timeout def get(self, key: str) -> _t.Any: """Look up key in the cache and return the value for it. :param key: the key to be looked up. :returns: The value if it exists and is readable, else ``None``. """ return None def delete(self, key: str) -> bool: """Delete `key` from the cache. :param key: the key to delete. :returns: Whether the key existed and has been deleted. :rtype: boolean """ return True def get_many(self, *keys: str) -> _t.List[_t.Any]: """Returns a list of values for the given keys. For each key an item in the list is created:: foo, bar = cache.get_many("foo", "bar") Has the same error handling as :meth:`get`. :param keys: The function accepts multiple keys as positional arguments. """ return [self.get(k) for k in keys] def get_dict(self, *keys: str) -> _t.Dict[str, _t.Any]: """Like :meth:`get_many` but return a dict:: d = cache.get_dict("foo", "bar") foo = d["foo"] bar = d["bar"] :param keys: The function accepts multiple keys as positional arguments. """ return dict(zip(keys, self.get_many(*keys))) # noqa: B905 def set( self, key: str, value: _t.Any, timeout: _t.Optional[int] = None ) -> _t.Optional[bool]: """Add a new key/value to the cache (overwrites value, if key already exists in the cache). :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 indicates that the cache never expires. :returns: ``True`` if key has been updated, ``False`` for backend errors. Pickling errors, however, will raise a subclass of ``pickle.PickleError``. :rtype: boolean """ return True def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool: """Works like :meth:`set` but does not overwrite the values of already existing keys. :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 indicates that the cache never expires. :returns: Same as :meth:`set`, but also ``False`` for already existing keys. :rtype: boolean """ return True def set_many( self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None ) -> _t.List[_t.Any]: """Sets multiple keys and values from a mapping. :param mapping: a mapping with the keys/values to set. :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 indicates that the cache never expires. :returns: A list containing all keys successfully set :rtype: boolean """ set_keys = [] for key, value in mapping.items(): if self.set(key, value, timeout): set_keys.append(key) return set_keys def delete_many(self, *keys: str) -> _t.List[_t.Any]: """Deletes multiple keys at once. :param keys: The function accepts multiple keys as positional arguments. :returns: A list containing all successfully deleted keys :rtype: boolean """ deleted_keys = [] for key in keys: if self.delete(key): deleted_keys.append(key) return deleted_keys def has(self, key: str) -> bool: """Checks if a key exists in the cache without returning it. This is a cheap operation that bypasses loading the actual data on the backend. :param key: the key to check """ raise NotImplementedError( "%s doesn't have an efficient implementation of `has`. That " "means it is impossible to check whether a key exists without " "fully loading the key's data. Consider using `self.get` " "explicitly if you don't care about performance." ) def clear(self) -> bool: """Clears the cache. Keep in mind that not all caches support completely clearing the cache. :returns: Whether the cache has been cleared. :rtype: boolean """ return True def inc(self, key: str, delta: int = 1) -> _t.Optional[int]: """Increments the value of a key by `delta`. If the key does not yet exist it is initialized with `delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to add. :returns: The new value or ``None`` for backend errors. """ value = (self.get(key) or 0) + delta return value if self.set(key, value) else None def dec(self, key: str, delta: int = 1) -> _t.Optional[int]: """Decrements the value of a key by `delta`. If the key does not yet exist it is initialized with `-delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to subtract. :returns: The new value or `None` for backend errors. """ value = (self.get(key) or 0) - delta return value if self.set(key, value) else None class NullCache(BaseCache): """A cache that doesn't cache. This can be useful for unit testing. :param default_timeout: a dummy parameter that is ignored but exists for API compatibility with other caches. """ def has(self, key: str) -> bool: return False cachelib-0.13.0/src/cachelib/dynamodb.py000066400000000000000000000205001460651207200200150ustar00rootroot00000000000000import datetime import typing as _t from cachelib.base import BaseCache from cachelib.serializers import DynamoDbSerializer CREATED_AT_FIELD = "created_at" RESPONSE_FIELD = "response" class DynamoDbCache(BaseCache): """ Implementation of cachelib.BaseCache that uses an AWS DynamoDb table as the backend. Your server process will require dynamodb:GetItem and dynamodb:PutItem IAM permissions on the cache table. Limitations: DynamoDB table items are limited to 400 KB in size. Since this class stores cached items in a table, the max size of a cache entry will be slightly less than 400 KB, since the cache key and expiration time fields are also part of the item. :param table_name: The name of the DynamoDB table to use :param default_timeout: Set the timeout in seconds after which cache entries expire :param key_field: The name of the hash_key attribute in the DynamoDb table. This must be a string attribute. :param expiration_time_field: The name of the table attribute to store the expiration time in. This will be an int attribute. The timestamp will be stored as seconds past the epoch. If you configure this as the TTL field, then DynamoDB will automatically delete expired entries. :param key_prefix: A prefix that should be added to all keys. """ serializer = DynamoDbSerializer() def __init__( self, table_name: _t.Optional[str] = "python-cache", default_timeout: int = 300, key_field: _t.Optional[str] = "cache_key", expiration_time_field: _t.Optional[str] = "expiration_time", key_prefix: _t.Optional[str] = None, **kwargs: _t.Any ): super().__init__(default_timeout) try: import boto3 # type: ignore except ImportError as err: raise RuntimeError("no boto3 module found") from err self._table_name = table_name self._key_field = key_field self._expiration_time_field = expiration_time_field self.key_prefix = key_prefix or "" self._dynamo = boto3.resource("dynamodb", **kwargs) self._attr = boto3.dynamodb.conditions.Attr try: self._table = self._dynamo.Table(table_name) self._table.load() # catch this exception (triggered if the table doesn't exist) except Exception: table = self._dynamo.create_table( AttributeDefinitions=[ {"AttributeName": key_field, "AttributeType": "S"} ], TableName=table_name, KeySchema=[ {"AttributeName": key_field, "KeyType": "HASH"}, ], BillingMode="PAY_PER_REQUEST", ) table.wait_until_exists() dynamo = boto3.client("dynamodb", **kwargs) dynamo.update_time_to_live( TableName=table_name, TimeToLiveSpecification={ "Enabled": True, "AttributeName": expiration_time_field, }, ) self._table = self._dynamo.Table(table_name) self._table.load() def _utcnow(self) -> _t.Any: """Return a tz-aware UTC datetime representing the current time""" return datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) def _get_item(self, key: str, attributes: _t.Optional[list] = None) -> _t.Any: """ Get an item from the cache table, optionally limiting the returned attributes. :param key: The cache key of the item to fetch :param attributes: An optional list of attributes to fetch. If not given, all attributes are fetched. The expiration_time field will always be added to the list of fetched attributes. :return: The table item for key if it exists and is not expired, else None """ kwargs = {} if attributes: if self._expiration_time_field not in attributes: attributes = list(attributes) + [self._expiration_time_field] kwargs = dict(ProjectionExpression=",".join(attributes)) response = self._table.get_item(Key={self._key_field: key}, **kwargs) cache_item = response.get("Item") if cache_item: now = int(self._utcnow().timestamp()) if cache_item.get(self._expiration_time_field, now + 100) > now: return cache_item return None def get(self, key: str) -> _t.Any: """ Get a cache item :param key: The cache key of the item to fetch :return: cache value if not expired, else None """ cache_item = self._get_item(self.key_prefix + key) if cache_item: response = cache_item[RESPONSE_FIELD] value = self.serializer.loads(response) return value return None def delete(self, key: str) -> bool: """ Deletes an item from the cache. This is a no-op if the item doesn't exist :param key: Key of the item to delete. :return: True if the key existed and was deleted """ try: self._table.delete_item( Key={self._key_field: self.key_prefix + key}, ConditionExpression=self._attr(self._key_field).exists(), ) return True except self._dynamo.meta.client.exceptions.ConditionalCheckFailedException: return False def _set( self, key: str, value: _t.Any, timeout: _t.Optional[int] = None, overwrite: _t.Optional[bool] = True, ) -> _t.Any: """ Store a cache item, with the option to not overwrite existing items :param key: Cache key to use :param value: a serializable object :param timeout: The timeout in seconds for the cached item, to override the default :param overwrite: If true, overwrite any existing cache item with key. If false, the new value will only be stored if no non-expired cache item exists with key. :return: True if the new item was stored. """ timeout = self._normalize_timeout(timeout) now = self._utcnow() kwargs = {} if not overwrite: # Cause the put to fail if a non-expired item with this key # already exists cond = self._attr(self._key_field).not_exists() | self._attr( self._expiration_time_field ).lte(int(now.timestamp())) kwargs = dict(ConditionExpression=cond) try: dump = self.serializer.dumps(value) item = { self._key_field: key, CREATED_AT_FIELD: now.isoformat(), RESPONSE_FIELD: dump, } if timeout > 0: expiration_time = now + datetime.timedelta(seconds=timeout) item[self._expiration_time_field] = int(expiration_time.timestamp()) self._table.put_item(Item=item, **kwargs) return True except Exception: return False def set(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any: return self._set(self.key_prefix + key, value, timeout=timeout, overwrite=True) def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any: return self._set(self.key_prefix + key, value, timeout=timeout, overwrite=False) def has(self, key: str) -> bool: return ( self._get_item(self.key_prefix + key, [self._expiration_time_field]) is not None ) def clear(self) -> bool: paginator = self._dynamo.meta.client.get_paginator("scan") pages = paginator.paginate( TableName=self._table_name, ProjectionExpression=self._key_field ) with self._table.batch_writer() as batch: for page in pages: for item in page["Items"]: batch.delete_item(Key=item) return True cachelib-0.13.0/src/cachelib/file.py000066400000000000000000000277561460651207200171630ustar00rootroot00000000000000import errno import hashlib import logging import os import platform import stat import struct import tempfile import typing as _t from contextlib import contextmanager from pathlib import Path from time import sleep from time import time from cachelib.base import BaseCache from cachelib.serializers import FileSystemSerializer def _lazy_md5(string: bytes = b"") -> _t.Any: """Don't access ``hashlib.md5`` until runtime. FIPS builds may not include md5, in which case the import and use as a default would fail before the developer can configure something else. """ return hashlib.md5(string) class FileSystemCache(BaseCache): """A cache that stores the items on the file system. This cache depends on being the only user of the `cache_dir`. Make absolutely sure that nobody but this cache stores files there or otherwise the cache will randomly delete files therein. :param cache_dir: the directory where cache files are stored. :param threshold: the maximum number of items the cache stores before it starts deleting some. A threshold value of 0 indicates no threshold. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param mode: the file mode wanted for the cache files, default 0600 :param hash_method: Default hashlib.md5. The hash method used to generate the filename for cached results. Default is lazy loaded and can be overriden by seeting `_default_hash_method` """ #: used for temporary files by the FileSystemCache _fs_transaction_suffix = ".__wz_cache" #: keep amount of files in a cache element _fs_count_file = "__wz_cache_count" #: default file name hashing method _default_hash_method = staticmethod(_lazy_md5) serializer = FileSystemSerializer() def __init__( self, cache_dir: str, threshold: int = 500, default_timeout: int = 300, mode: _t.Optional[int] = None, hash_method: _t.Any = None, ): BaseCache.__init__(self, default_timeout) self._path = cache_dir self._threshold = threshold self._hash_method = self._default_hash_method if hash_method is not None: self._hash_method = hash_method # Mode set by user takes precedence. If no mode has # been given, we need to set the correct default based # on user platform. self._mode = mode if self._mode is None: self._mode = self._get_compatible_platform_mode() try: os.makedirs(self._path) except OSError as ex: if ex.errno != errno.EEXIST: raise # If there are many files and a zero threshold, # the list_dir can slow initialisation massively if self._threshold != 0: self._update_count(value=len(list(self._list_dir()))) def _get_compatible_platform_mode(self) -> int: mode = 0o600 # nix systems if platform.system() == "Windows": mode = stat.S_IWRITE return mode @property def _file_count(self) -> int: return self.get(self._fs_count_file) or 0 def _update_count( self, delta: _t.Optional[int] = None, value: _t.Optional[int] = None ) -> None: # If we have no threshold, don't count files if self._threshold == 0: return if delta: new_count = self._file_count + delta else: new_count = value or 0 self.set(self._fs_count_file, new_count, mgmt_element=True) def _normalize_timeout(self, timeout: _t.Optional[int]) -> int: timeout = BaseCache._normalize_timeout(self, timeout) if timeout != 0: timeout = int(time()) + timeout return int(timeout) def _is_mgmt(self, name: str) -> bool: fshash = self._get_filename(self._fs_count_file).split(os.sep)[-1] return name == fshash or name.endswith(self._fs_transaction_suffix) def _list_dir(self) -> _t.Generator[str, None, None]: """return a list of (fully qualified) cache filenames""" return ( os.path.join(self._path, fn) for fn in os.listdir(self._path) if not self._is_mgmt(fn) ) def _over_threshold(self) -> bool: return self._threshold != 0 and self._file_count > self._threshold def _remove_expired(self, now: float) -> None: for fname in self._list_dir(): try: with self._safe_stream_open(fname, "rb") as f: expires = struct.unpack("I", f.read(4))[0] if expires != 0 and expires < now: os.remove(fname) self._update_count(delta=-1) except FileNotFoundError: pass except (OSError, EOFError, struct.error): logging.warning( "Exception raised while handling cache file '%s'", fname, exc_info=True, ) def _remove_older(self) -> bool: exp_fname_tuples = [] for fname in self._list_dir(): try: with self._safe_stream_open(fname, "rb") as f: timestamp = struct.unpack("I", f.read(4))[0] exp_fname_tuples.append((timestamp, fname)) except FileNotFoundError: pass except (OSError, EOFError, struct.error): logging.warning( "Exception raised while handling cache file '%s'", fname, exc_info=True, ) fname_sorted = ( fname for _, fname in sorted(exp_fname_tuples, key=lambda item: item[0]) ) for fname in fname_sorted: try: os.remove(fname) self._update_count(delta=-1) except FileNotFoundError: pass except OSError: logging.warning( "Exception raised while handling cache file '%s'", fname, exc_info=True, ) return False if not self._over_threshold(): break return True def _prune(self) -> None: if self._over_threshold(): now = time() self._remove_expired(now) # if still over threshold if self._over_threshold(): self._remove_older() def clear(self) -> bool: for i, fname in enumerate(self._list_dir()): try: os.remove(fname) except FileNotFoundError: pass except OSError: logging.warning( "Exception raised while handling cache file '%s'", fname, exc_info=True, ) self._update_count(delta=-i) return False self._update_count(value=0) return True def _get_filename(self, key: str) -> str: if isinstance(key, str): bkey = key.encode("utf-8") # XXX unicode review bkey_hash = self._hash_method(bkey).hexdigest() else: raise TypeError(f"Key must be a string, received type {type(key)}") return os.path.join(self._path, bkey_hash) def get(self, key: str) -> _t.Any: filename = self._get_filename(key) try: with self._safe_stream_open(filename, "rb") as f: pickle_time = struct.unpack("I", f.read(4))[0] if pickle_time == 0 or pickle_time >= time(): return self.serializer.load(f) except FileNotFoundError: pass except (OSError, EOFError, struct.error): logging.warning( "Exception raised while handling cache file '%s'", filename, exc_info=True, ) return None def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool: filename = self._get_filename(key) if not os.path.exists(filename): return self.set(key, value, timeout) return False def set( self, key: str, value: _t.Any, timeout: _t.Optional[int] = None, mgmt_element: bool = False, ) -> bool: # Management elements have no timeout if mgmt_element: timeout = 0 # Don't prune on management element update, to avoid loop else: self._prune() timeout = self._normalize_timeout(timeout) filename = self._get_filename(key) overwrite = os.path.isfile(filename) try: fd, tmp = tempfile.mkstemp( suffix=self._fs_transaction_suffix, dir=self._path ) with os.fdopen(fd, "wb") as f: f.write(struct.pack("I", timeout)) self.serializer.dump(value, f) self._run_safely(os.replace, tmp, filename) self._run_safely(os.chmod, filename, self._mode) fsize = Path(filename).stat().st_size except OSError: logging.warning( "Exception raised while handling cache file '%s'", filename, exc_info=True, ) return False else: # Management elements should not count towards threshold if not overwrite and not mgmt_element: self._update_count(delta=1) return fsize > 0 # function should fail if file is empty def delete(self, key: str, mgmt_element: bool = False) -> bool: try: os.remove(self._get_filename(key)) except FileNotFoundError: # if file doesn't exist we consider it deleted return True except OSError: logging.warning("Exception raised while handling cache file", exc_info=True) return False else: # Management elements should not count towards threshold if not mgmt_element: self._update_count(delta=-1) return True def has(self, key: str) -> bool: filename = self._get_filename(key) try: with self._safe_stream_open(filename, "rb") as f: pickle_time = struct.unpack("I", f.read(4))[0] if pickle_time == 0 or pickle_time >= time(): return True else: return False except FileNotFoundError: # if there is no file there is no key return False except (OSError, EOFError, struct.error): logging.warning( "Exception raised while handling cache file '%s'", filename, exc_info=True, ) return False def _run_safely(self, fn: _t.Callable, *args: _t.Any, **kwargs: _t.Any) -> _t.Any: """On Windows os.replace, os.chmod and open can yield permission errors if executed by two different processes.""" if platform.system() == "Windows": output = None wait_step = 0.001 max_sleep_time = 10.0 total_sleep_time = 0.0 while total_sleep_time < max_sleep_time: try: output = fn(*args, **kwargs) except PermissionError: sleep(wait_step) total_sleep_time += wait_step wait_step *= 2 else: break else: output = fn(*args, **kwargs) return output @contextmanager def _safe_stream_open(self, path: str, mode: str) -> _t.Generator: fs = self._run_safely(open, path, mode) if fs is None: raise OSError try: yield fs finally: fs.close() cachelib-0.13.0/src/cachelib/memcached.py000066400000000000000000000157701460651207200201430ustar00rootroot00000000000000import re import typing as _t from time import time from cachelib.base import BaseCache _test_memcached_key = re.compile(r"[^\x00-\x21\xff]{1,250}$").match class MemcachedCache(BaseCache): """A cache that uses memcached as backend. The first argument can either be an object that resembles the API of a :class:`memcache.Client` or a tuple/list of server addresses. In the event that a tuple/list is passed, Werkzeug tries to import the best available memcache library. This cache looks into the following packages/modules to find bindings for memcached: - ``pylibmc`` - ``google.appengine.api.memcached`` - ``memcached`` - ``libmc`` Implementation notes: This cache backend works around some limitations in memcached to simplify the interface. For example unicode keys are encoded to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return the keys in the same format as passed. Furthermore all get methods silently ignore key errors to not cause problems when untrusted user data is passed to the get methods which is often the case in web applications. :param servers: a list or tuple of server addresses or alternatively a :class:`memcache.Client` or a compatible client. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param key_prefix: a prefix that is added before all keys. This makes it possible to use the same memcached server for different applications. Keep in mind that :meth:`~BaseCache.clear` will also clear keys with a different prefix. """ def __init__( self, servers: _t.Any = None, default_timeout: int = 300, key_prefix: _t.Optional[str] = None, ): BaseCache.__init__(self, default_timeout) if servers is None or isinstance(servers, (list, tuple)): if servers is None: servers = ["127.0.0.1:11211"] self._client = self.import_preferred_memcache_lib(servers) if self._client is None: raise RuntimeError("no memcache module found") else: # NOTE: servers is actually an already initialized memcache # client. self._client = servers self.key_prefix = key_prefix def _normalize_key(self, key: str) -> str: if self.key_prefix: key = self.key_prefix + key return key def _normalize_timeout(self, timeout: _t.Optional[int]) -> int: timeout = BaseCache._normalize_timeout(self, timeout) if timeout > 0: timeout = int(time()) + timeout return timeout def get(self, key: str) -> _t.Any: key = self._normalize_key(key) # memcached doesn't support keys longer than that. Because often # checks for so long keys can occur because it's tested from user # submitted data etc we fail silently for getting. if _test_memcached_key(key): return self._client.get(key) def get_dict(self, *keys: str) -> _t.Dict[str, _t.Any]: key_mapping = {} for key in keys: encoded_key = self._normalize_key(key) if _test_memcached_key(key): key_mapping[encoded_key] = key _keys = list(key_mapping) d = rv = self._client.get_multi(_keys) # type: _t.Dict[str, _t.Any] if self.key_prefix: rv = {} for key, value in d.items(): rv[key_mapping[key]] = value if len(rv) < len(keys): for key in keys: if key not in rv: rv[key] = None return rv def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool: key = self._normalize_key(key) timeout = self._normalize_timeout(timeout) return bool(self._client.add(key, value, timeout)) def set( self, key: str, value: _t.Any, timeout: _t.Optional[int] = None ) -> _t.Optional[bool]: key = self._normalize_key(key) timeout = self._normalize_timeout(timeout) return bool(self._client.set(key, value, timeout)) def get_many(self, *keys: str) -> _t.List[_t.Any]: d = self.get_dict(*keys) return [d[key] for key in keys] def set_many( self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None ) -> _t.List[_t.Any]: new_mapping = {} for key, value in mapping.items(): key = self._normalize_key(key) new_mapping[key] = value timeout = self._normalize_timeout(timeout) failed_keys = self._client.set_multi( new_mapping, timeout ) # type: _t.List[_t.Any] k_normkey = zip(mapping.keys(), new_mapping.keys()) # noqa: B905 return [k for k, nkey in k_normkey if nkey not in failed_keys] def delete(self, key: str) -> bool: key = self._normalize_key(key) if _test_memcached_key(key): return bool(self._client.delete(key)) return False def delete_many(self, *keys: str) -> _t.List[_t.Any]: new_keys = [] for key in keys: key = self._normalize_key(key) if _test_memcached_key(key): new_keys.append(key) self._client.delete_multi(new_keys) return [k for k in new_keys if not self.has(k)] def has(self, key: str) -> bool: key = self._normalize_key(key) if _test_memcached_key(key): return bool(self._client.append(key, "")) return False def clear(self) -> bool: return bool(self._client.flush_all()) def inc(self, key: str, delta: int = 1) -> _t.Optional[int]: key = self._normalize_key(key) value = (self._client.get(key) or 0) + delta return value if self.set(key, value) else None def dec(self, key: str, delta: int = 1) -> _t.Optional[int]: key = self._normalize_key(key) value = (self._client.get(key) or 0) - delta return value if self.set(key, value) else None def import_preferred_memcache_lib(self, servers: _t.Any) -> _t.Any: """Returns an initialized memcache client. Used by the constructor.""" try: import pylibmc # type: ignore except ImportError: pass else: return pylibmc.Client(servers) try: from google.appengine.api import memcache # type: ignore except ImportError: pass else: return memcache.Client() try: import memcache # type: ignore except ImportError: pass else: return memcache.Client(servers) try: import libmc # type: ignore except ImportError: pass else: return libmc.Client(servers) cachelib-0.13.0/src/cachelib/mongodb.py000066400000000000000000000153651460651207200176620ustar00rootroot00000000000000import datetime import logging import typing as _t from cachelib.base import BaseCache from cachelib.serializers import BaseSerializer class MongoDbCache(BaseCache): """ Implementation of cachelib.BaseCache that uses mongodb collection as the backend. Limitations: maximum MongoDB document size is 16mb :param client: mongodb client or connection string :param db: mongodb database name :param collection: mongodb collection name :param default_timeout: Set the timeout in seconds after which cache entries expire :param key_prefix: A prefix that should be added to all keys. """ serializer = BaseSerializer() def __init__( self, client: _t.Any = None, db: _t.Optional[str] = "cache-db", collection: _t.Optional[str] = "cache-collection", default_timeout: int = 300, key_prefix: _t.Optional[str] = None, **kwargs: _t.Any ): super().__init__(default_timeout) try: import pymongo # type: ignore except ImportError: logging.warning("no pymongo module found") if client is None or isinstance(client, str): client = pymongo.MongoClient(host=client) self.client = client[db][collection] index_info = self.client.index_information() all_keys = { subkey[0] for value in index_info.values() for subkey in value["key"] } if "id" not in all_keys: self.client.create_index("id", unique=True) self.key_prefix = key_prefix or "" self.collection = collection def _utcnow(self) -> _t.Any: """Return a tz-aware UTC datetime representing the current time""" return datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) def _expire_records(self) -> _t.Any: res = self.client.delete_many({"expiration": {"$lte": self._utcnow()}}) return res def get(self, key: str) -> _t.Any: """ Get a cache item :param key: The cache key of the item to fetch :return: cache value if not expired, else None """ self._expire_records() record = self.client.find_one({"id": self.key_prefix + key}) value = None if record: value = self.serializer.loads(record["val"]) return value def delete(self, key: str) -> bool: """ Deletes an item from the cache. This is a no-op if the item doesn't exist :param key: Key of the item to delete. :return: True if the key existed and was deleted """ res = self.client.delete_one({"id": self.key_prefix + key}) deleted = bool(res.deleted_count > 0) return deleted def _set( self, key: str, value: _t.Any, timeout: _t.Optional[int] = None, overwrite: _t.Optional[bool] = True, ) -> _t.Any: """ Store a cache item, with the option to not overwrite existing items :param key: Cache key to use :param value: a serializable object :param timeout: The timeout in seconds for the cached item, to override the default :param overwrite: If true, overwrite any existing cache item with key. If false, the new value will only be stored if no non-expired cache item exists with key. :return: True if the new item was stored. """ timeout = self._normalize_timeout(timeout) now = self._utcnow() if not overwrite: # fail if a non-expired item with this key # already exists if self.has(key): return False dump = self.serializer.dumps(value) record = {"id": self.key_prefix + key, "val": dump} if timeout > 0: record["expiration"] = now + datetime.timedelta(seconds=timeout) self.client.update_one({"id": self.key_prefix + key}, {"$set": record}, True) return True def set(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any: self._expire_records() return self._set(key, value, timeout=timeout, overwrite=True) def set_many( self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None ) -> _t.List[_t.Any]: self._expire_records() from pymongo import UpdateOne operations = [] now = self._utcnow() timeout = self._normalize_timeout(timeout) for key, val in mapping.items(): dump = self.serializer.dumps(val) record = {"id": self.key_prefix + key, "val": dump} if timeout > 0: record["expiration"] = now + datetime.timedelta(seconds=timeout) operations.append( UpdateOne({"id": self.key_prefix + key}, {"$set": record}, upsert=True), ) result = self.client.bulk_write(operations) keys = list(mapping.keys()) if result.bulk_api_result["nUpserted"] != len(keys): query = self.client.find( {"id": {"$in": [self.key_prefix + key for key in keys]}} ) keys = [] for item in query: keys.append(item["id"]) return keys def get_many(self, *keys: str) -> _t.List[_t.Any]: results = self.get_dict(*keys) values = [] for key in keys: values.append(results.get(key, None)) return values def get_dict(self, *keys: str) -> _t.Dict[str, _t.Any]: self._expire_records() query = self.client.find( {"id": {"$in": [self.key_prefix + key for key in keys]}} ) results = dict.fromkeys(keys, None) for item in query: value = self.serializer.loads(item["val"]) results[item["id"][len(self.key_prefix) :]] = value return results def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any: self._expire_records() return self._set(key, value, timeout=timeout, overwrite=False) def has(self, key: str) -> bool: self._expire_records() record = self.get(key) return record is not None def delete_many(self, *keys: str) -> _t.List[_t.Any]: self._expire_records() res = list(keys) filter = {"id": {"$in": [self.key_prefix + key for key in keys]}} result = self.client.delete_many(filter) if result.deleted_count != len(keys): existing_keys = [ item["id"][len(self.key_prefix) :] for item in self.client.find(filter) ] res = [item for item in keys if item not in existing_keys] return res def clear(self) -> bool: self.client.drop() return True cachelib-0.13.0/src/cachelib/py.typed000066400000000000000000000000001460651207200173360ustar00rootroot00000000000000cachelib-0.13.0/src/cachelib/redis.py000066400000000000000000000142271460651207200173370ustar00rootroot00000000000000import typing as _t from cachelib.base import BaseCache from cachelib.serializers import RedisSerializer class RedisCache(BaseCache): """Uses the Redis key-value store as a cache backend. The first argument can be either a string denoting address of the Redis server or an object resembling an instance of a redis.Redis class. Note: Python Redis API already takes care of encoding unicode strings on the fly. :param host: address of the Redis server or an object which API is compatible with the official Python Redis client (redis-py). :param port: port number on which Redis server listens for connections. :param password: password authentication for the Redis server. :param db: db (zero-based numeric index) on Redis Server to connect. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param key_prefix: A prefix that should be added to all keys. Any additional keyword arguments will be passed to ``redis.Redis``. """ _read_client: _t.Any = None _write_client: _t.Any = None serializer = RedisSerializer() def __init__( self, host: _t.Any = "localhost", port: int = 6379, password: _t.Optional[str] = None, db: int = 0, default_timeout: int = 300, key_prefix: _t.Optional[_t.Union[str, _t.Callable[[], str]]] = None, **kwargs: _t.Any, ): BaseCache.__init__(self, default_timeout) if host is None: raise ValueError("RedisCache host parameter may not be None") if isinstance(host, str): try: import redis except ImportError as err: raise RuntimeError("no redis module found") from err if kwargs.get("decode_responses", None): raise ValueError("decode_responses is not supported by RedisCache.") self._write_client = self._read_client = redis.Redis( host=host, port=port, password=password, db=db, **kwargs ) else: self._read_client = self._write_client = host self.key_prefix = key_prefix or "" def _get_prefix(self) -> str: return ( self.key_prefix if isinstance(self.key_prefix, str) else self.key_prefix() ) def _normalize_timeout(self, timeout: _t.Optional[int]) -> int: """Normalize timeout by setting it to default of 300 if not defined (None) or -1 if explicitly set to zero. :param timeout: timeout to normalize. """ timeout = BaseCache._normalize_timeout(self, timeout) if timeout == 0: timeout = -1 return timeout def get(self, key: str) -> _t.Any: return self.serializer.loads( self._read_client.get(f"{self._get_prefix()}{key}") ) def get_many(self, *keys: str) -> _t.List[_t.Any]: if self.key_prefix: prefixed_keys = [f"{self._get_prefix()}{key}" for key in keys] else: prefixed_keys = list(keys) return [self.serializer.loads(x) for x in self._read_client.mget(prefixed_keys)] def set(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any: timeout = self._normalize_timeout(timeout) dump = self.serializer.dumps(value) if timeout == -1: result = self._write_client.set( name=f"{self._get_prefix()}{key}", value=dump ) else: result = self._write_client.setex( name=f"{self._get_prefix()}{key}", value=dump, time=timeout ) return result def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any: timeout = self._normalize_timeout(timeout) dump = self.serializer.dumps(value) created = self._write_client.setnx( name=f"{self._get_prefix()}{key}", value=dump ) # handle case where timeout is explicitly set to zero if created and timeout != -1: self._write_client.expire(name=f"{self._get_prefix()}{key}", time=timeout) return created def set_many( self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None ) -> _t.List[_t.Any]: timeout = self._normalize_timeout(timeout) # Use transaction=False to batch without calling redis MULTI # which is not supported by twemproxy pipe = self._write_client.pipeline(transaction=False) for key, value in mapping.items(): dump = self.serializer.dumps(value) if timeout == -1: pipe.set(name=f"{self._get_prefix()}{key}", value=dump) else: pipe.setex(name=f"{self._get_prefix()}{key}", value=dump, time=timeout) results = pipe.execute() return [k for k, was_set in zip(mapping.keys(), results) if was_set] def delete(self, key: str) -> bool: return bool(self._write_client.delete(f"{self._get_prefix()}{key}")) def delete_many(self, *keys: str) -> _t.List[_t.Any]: if not keys: return [] if self.key_prefix: prefixed_keys = [f"{self._get_prefix()}{key}" for key in keys] else: prefixed_keys = [k for k in keys] self._write_client.delete(*prefixed_keys) return [k for k in prefixed_keys if not self.has(k)] def has(self, key: str) -> bool: return bool(self._read_client.exists(f"{self._get_prefix()}{key}")) def clear(self) -> bool: status = 0 if self.key_prefix: keys = self._read_client.keys(self._get_prefix() + "*") if keys: status = self._write_client.delete(*keys) else: status = self._write_client.flushdb() return bool(status) def inc(self, key: str, delta: int = 1) -> _t.Any: return self._write_client.incr(name=f"{self._get_prefix()}{key}", amount=delta) def dec(self, key: str, delta: int = 1) -> _t.Any: return self._write_client.incr(name=f"{self._get_prefix()}{key}", amount=-delta) cachelib-0.13.0/src/cachelib/serializers.py000066400000000000000000000064171460651207200205670ustar00rootroot00000000000000import logging import pickle import typing as _t class BaseSerializer: """This is the base interface for all default serializers. BaseSerializer.load and BaseSerializer.dump will default to pickle.load and pickle.dump. This is currently used only by FileSystemCache which dumps/loads to/from a file stream. """ def _warn(self, e: pickle.PickleError) -> None: logging.warning( f"An exception has been raised during a pickling operation: {e}" ) def dump( self, value: int, f: _t.IO, protocol: int = pickle.HIGHEST_PROTOCOL ) -> None: try: pickle.dump(value, f, protocol) except (pickle.PickleError, pickle.PicklingError) as e: self._warn(e) def load(self, f: _t.BinaryIO) -> _t.Any: try: data = pickle.load(f) except pickle.PickleError as e: self._warn(e) return None else: return data """BaseSerializer.loads and BaseSerializer.dumps work on top of pickle.loads and pickle.dumps. Dumping/loading strings and byte strings is the default for most cache types. """ def dumps(self, value: _t.Any, protocol: int = pickle.HIGHEST_PROTOCOL) -> bytes: try: serialized = pickle.dumps(value, protocol) except (pickle.PickleError, pickle.PicklingError) as e: self._warn(e) return serialized def loads(self, bvalue: bytes) -> _t.Any: try: data = pickle.loads(bvalue) except pickle.PickleError as e: self._warn(e) return None else: return data """Default serializers for each cache type. The following classes can be used to further customize serialiation behaviour. Alternatively, any serializer can be overriden in order to use a custom serializer with a different strategy altogether. """ class UWSGISerializer(BaseSerializer): """Default serializer for UWSGICache.""" class SimpleSerializer(BaseSerializer): """Default serializer for SimpleCache.""" class FileSystemSerializer(BaseSerializer): """Default serializer for FileSystemCache.""" class RedisSerializer(BaseSerializer): """Default serializer for RedisCache.""" def dumps(self, value: _t.Any, protocol: int = pickle.HIGHEST_PROTOCOL) -> bytes: """Dumps an object into a string for redis, using pickle by default.""" return b"!" + pickle.dumps(value, protocol) def loads(self, value: _t.Optional[bytes]) -> _t.Any: """The reversal of :meth:`dump_object`. This might be called with None. """ if value is None: return None if value.startswith(b"!"): try: return pickle.loads(value[1:]) except pickle.PickleError: return None try: return int(value) except ValueError: # before 0.8 we did not have serialization. Still support that. return value class DynamoDbSerializer(RedisSerializer): """Default serializer for DynamoDbCache.""" def loads(self, value: _t.Any) -> _t.Any: """The reversal of :meth:`dump_object`. This might be called with None. """ value = value.value return super().loads(value) cachelib-0.13.0/src/cachelib/simple.py000066400000000000000000000065361460651207200175260ustar00rootroot00000000000000import typing as _t from time import time from cachelib.base import BaseCache from cachelib.serializers import SimpleSerializer class SimpleCache(BaseCache): """Simple memory cache for single process environments. This class exists mainly for the development server and is not 100% thread safe. It tries to use as many atomic operations as possible and no locks for simplicity but it could happen under heavy load that keys are added multiple times. :param threshold: the maximum number of items the cache stores before it starts deleting some. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. """ serializer = SimpleSerializer() def __init__( self, threshold: int = 500, default_timeout: int = 300, ): BaseCache.__init__(self, default_timeout) self._cache: _t.Dict[str, _t.Any] = {} self._threshold = threshold or 500 # threshold = 0 def _over_threshold(self) -> bool: return len(self._cache) > self._threshold def _remove_expired(self, now: float) -> None: toremove = [k for k, (expires, _) in self._cache.items() if expires < now] for k in toremove: self._cache.pop(k, None) def _remove_older(self) -> None: k_ordered = ( k for k, v in sorted(self._cache.items(), key=lambda item: item[1][0]) ) for k in k_ordered: self._cache.pop(k, None) if not self._over_threshold(): break def _prune(self) -> None: if self._over_threshold(): now = time() self._remove_expired(now) # remove older items if still over threshold if self._over_threshold(): self._remove_older() def _normalize_timeout(self, timeout: _t.Optional[int]) -> int: timeout = BaseCache._normalize_timeout(self, timeout) if timeout > 0: timeout = int(time()) + timeout return timeout def get(self, key: str) -> _t.Any: try: expires, value = self._cache[key] if expires == 0 or expires > time(): return self.serializer.loads(value) except KeyError: return None def set( self, key: str, value: _t.Any, timeout: _t.Optional[int] = None ) -> _t.Optional[bool]: expires = self._normalize_timeout(timeout) self._prune() self._cache[key] = (expires, self.serializer.dumps(value)) return True def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool: expires = self._normalize_timeout(timeout) self._prune() item = (expires, self.serializer.dumps(value)) if key in self._cache: return False self._cache.setdefault(key, item) return True def delete(self, key: str) -> bool: return self._cache.pop(key, None) is not None def has(self, key: str) -> bool: try: expires, value = self._cache[key] return bool(expires == 0 or expires > time()) except KeyError: return False def clear(self) -> bool: self._cache.clear() return not bool(self._cache) cachelib-0.13.0/src/cachelib/uwsgi.py000066400000000000000000000050031460651207200173570ustar00rootroot00000000000000import platform import typing as _t from cachelib.base import BaseCache from cachelib.serializers import UWSGISerializer class UWSGICache(BaseCache): """Implements the cache using uWSGI's caching framework. .. note:: This class cannot be used when running under PyPy, because the uWSGI API implementation for PyPy is lacking the needed functionality. :param default_timeout: The default timeout in seconds. :param cache: The name of the caching instance to connect to, for example: mycache@localhost:3031, defaults to an empty string, which means uWSGI will cache in the local instance. If the cache is in the same instance as the werkzeug app, you only have to provide the name of the cache. """ serializer = UWSGISerializer() def __init__( self, default_timeout: int = 300, cache: str = "", ): BaseCache.__init__(self, default_timeout) if platform.python_implementation() == "PyPy": raise RuntimeError( "uWSGI caching does not work under PyPy, see " "the docs for more details." ) try: import uwsgi # type: ignore self._uwsgi = uwsgi except ImportError as err: raise RuntimeError( "uWSGI could not be imported, are you running under uWSGI?" ) from err self.cache = cache def get(self, key: str) -> _t.Any: rv = self._uwsgi.cache_get(key, self.cache) if rv is None: return return self.serializer.loads(rv) def delete(self, key: str) -> bool: return bool(self._uwsgi.cache_del(key, self.cache)) def set( self, key: str, value: _t.Any, timeout: _t.Optional[int] = None ) -> _t.Optional[bool]: result = self._uwsgi.cache_update( key, self.serializer.dumps(value), self._normalize_timeout(timeout), self.cache, ) # type: bool return result def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> bool: return bool( self._uwsgi.cache_set( key, self.serializer.dumps(value), self._normalize_timeout(timeout), self.cache, ) ) def clear(self) -> bool: return bool(self._uwsgi.cache_clear(self.cache)) def has(self, key: str) -> bool: return self._uwsgi.cache_exists(key, self.cache) is not None cachelib-0.13.0/tests/000077500000000000000000000000001460651207200144725ustar00rootroot00000000000000cachelib-0.13.0/tests/clear.py000066400000000000000000000005141460651207200161320ustar00rootroot00000000000000from conftest import TestData class ClearTests(TestData): """Tests for the optional 'clear' method specified by BaseCache""" def test_clear(self): cache = self.cache_factory() assert cache.set_many(self.sample_pairs) assert cache.clear() assert not any(cache.get_many(*self.sample_pairs)) cachelib-0.13.0/tests/common.py000066400000000000000000000057421460651207200163440ustar00rootroot00000000000000from time import sleep import pytest from conftest import TestData from conftest import under_uwsgi class CommonTests(TestData): """A base set of tests to be run for all cache types""" def test_set_get(self): cache = self.cache_factory() for k, v in self.sample_pairs.items(): assert cache.set(k, v) assert cache.get(k) == v def test_set_get_many(self): cache = self.cache_factory() result = cache.set_many(self.sample_pairs) assert result == list(self.sample_pairs.keys()) values = cache.get_many(*self.sample_pairs) assert values == list(self.sample_pairs.values()) def test_get_dict(self): cache = self.cache_factory() cache.set_many(self.sample_pairs) d = cache.get_dict(*self.sample_pairs) assert d == self.sample_pairs def test_delete(self): cache = self.cache_factory() for k, v in self.sample_pairs.items(): cache.set(k, v) assert cache.delete(k) assert not cache.get(k) def test_delete_many(self): cache = self.cache_factory() cache.set_many(self.sample_pairs) result = cache.delete_many(*self.sample_pairs) assert result == list(self.sample_pairs.keys()) assert not any(cache.get_many(*self.sample_pairs)) def test_delete_many_ignore_errors(self): cache = self.cache_factory() cache.set("bacon", "spam") cache.delete_many("eggs", "bacon") assert cache.get("bacon") is None def test_add(self): cache = self.cache_factory() cache.set_many(self.sample_pairs) for k in self.sample_pairs: assert not cache.add(k, "updated") assert cache.get_many(*self.sample_pairs) == list(self.sample_pairs.values()) for k, v in self.sample_pairs.items(): assert cache.add(f"{k}-new", v) assert cache.get(f"{k}-new") == v def test_inc_dec(self): cache = self.cache_factory() for n in self.sample_numbers: assert not cache.get(f"{n}-key-inc") assert cache.inc(f"{n}-key-inc", n) == n assert cache.get(f"{n}-key-inc") == n assert cache.dec(f"{n}-key-dec", n) == -n assert cache.get(f"{n}-key-dec") == -n assert cache.dec(f"{n}-key-inc", 5) == n - 5 def test_expiration(self): if under_uwsgi(): pytest.skip( "uwsgi uses a separate sweeper thread to clean" " expired chache entries, thus the testing" " of such feature must be handled differently" " from other cache types." ) cache = self.cache_factory() for k, v in self.sample_pairs.items(): cache.set(f"{k}-t0", v, timeout=0) cache.set(f"{k}-t1", v, timeout=1) sleep(4) for k, v in self.sample_pairs.items(): assert cache.get(f"{k}-t0") == v assert not cache.get(f"{k}-t1") cachelib-0.13.0/tests/conftest.py000066400000000000000000000050031460651207200166670ustar00rootroot00000000000000import os import subprocess import warnings from pathlib import Path import pytest from xprocess import ProcessStarter def under_uwsgi(): try: import uwsgi # noqa: F401 except ImportError: return False else: return True @pytest.hookimpl() def pytest_sessionfinish(session, exitstatus): if under_uwsgi(): try: script_path = Path(os.environ["TMPDIR"], "return_pytest_exit_code.py") except KeyError: warnings.warn( "Pytest could not find tox 'TMPDIR' in the environment," " make sure the variable is set in the project tox.ini" " file if you are running under tox.", stacklevel=2, ) else: with open(script_path, mode="w") as f: f.write(f"import sys; sys.exit({exitstatus})") @pytest.fixture(scope="class") def redis_server(xprocess): package_name = "redis" pytest.importorskip( modname=package_name, reason=f"could not find python package {package_name}" ) class Starter(ProcessStarter): pattern = "[Rr]eady to accept connections" args = ["redis-server", "--port 6360"] def startup_check(self): out = subprocess.run( ["redis-cli", "-p", "6360", "ping"], stdout=subprocess.PIPE ) return out.stdout == b"PONG\n" xprocess.ensure(package_name, Starter) yield xprocess.getinfo(package_name).terminate() @pytest.fixture(scope="class") def memcached_server(xprocess): package_name = "pylibmc" pytest.importorskip( modname=package_name, reason=f"could not find python package {package_name}" ) class Starter(ProcessStarter): pattern = "server listening" args = ["memcached", "-vv"] def startup_check(self): out = subprocess.run(["memcached"], stderr=subprocess.PIPE) return b"Address already" in out.stderr xprocess.ensure(package_name, Starter) yield xprocess.getinfo(package_name).terminate() class TestData: """This class centralizes all data samples used in tests""" sample_numbers = [0, 10, 1024000, 9, 5000000000000, 99, 738, 2000000] sample_pairs = { "128": False, "beef": True, "crevettes": {}, "1024": "spam", "bacon": "eggs", "sausage": 2048, "3072": [], "brandy": [{}, "fried eggs"], "lobster": ["baked beans", [512]], "4096": {"sauce": [], 256: "truffle"}, } cachelib-0.13.0/tests/has.py000066400000000000000000000005321460651207200156170ustar00rootroot00000000000000from conftest import TestData class HasTests(TestData): """Tests for the optional 'has' method specified by BaseCache""" def test_has(self): cache = self.cache_factory() assert cache.set_many(self.sample_pairs) for k in self.sample_pairs: assert cache.has(k) assert not cache.has("unknown") cachelib-0.13.0/tests/test_base_cache.py000066400000000000000000000035361460651207200201470ustar00rootroot00000000000000import pytest from cachelib import BaseCache @pytest.fixture(autouse=True) def cache_factory(request): def _factory(self, *args, **kwargs): return BaseCache(*args, **kwargs) request.cls.cache_factory = _factory class TestBaseCache: def test_get(self): cache = self.cache_factory() assert cache.get("bacon") is None def test_delete(self): cache = self.cache_factory() assert cache.delete("eggs") def test_get_many(self): cache = self.cache_factory() keys = ["bacon", "spam", "eggs"] expected = [None] * 3 assert cache.get_many(*keys) == expected def test_get_dict(self): cache = self.cache_factory() keys = ["bacon", "spam", "eggs"] expected = dict.fromkeys(keys, None) assert cache.get_dict(*keys) == expected def test_set(self): cache = self.cache_factory() assert cache.set("sausage", "tomato") def test_add(self): cache = self.cache_factory() assert cache.add("baked", "beans") def test_set_many(self): cache = self.cache_factory() keys = ["bacon", "spam", "eggs"] mapping = dict.fromkeys(keys, None) assert cache.set_many(mapping) == keys def test_delete_many(self): cache = self.cache_factory() keys = ["bacon", "spam", "eggs"] assert cache.delete_many(*keys) == keys def test_has(self): cache = self.cache_factory() with pytest.raises(NotImplementedError): cache.has("lobster") def test_clear(self): cache = self.cache_factory() assert cache.clear() def test_inc(self): cache = self.cache_factory() assert cache.inc("crevettes", delta=10) == 10 def test_dec(self): cache = self.cache_factory() assert cache.dec("truffle", delta=10) == -10 cachelib-0.13.0/tests/test_dynamodb_cache.py000066400000000000000000000015231460651207200210240ustar00rootroot00000000000000import pytest from clear import ClearTests from common import CommonTests from has import HasTests from cachelib import DynamoDbCache @pytest.fixture(autouse=True, params=[DynamoDbCache]) def cache_factory(request): import warnings warnings.filterwarnings( action="ignore", message="unclosed", category=ResourceWarning ) def _factory(self, *args, **kwargs): import os os.environ.setdefault("AWS_ACCESS_KEY_ID", "RANDOM") os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "RANDOM") kwargs["endpoint_url"] = "http://localhost:8000" kwargs["region_name"] = "us-west-2" rc = request.param(*args, **kwargs) rc.clear() return rc if request.cls: request.cls.cache_factory = _factory class TestDynamoDbCache(CommonTests, ClearTests, HasTests): pass cachelib-0.13.0/tests/test_file_system_cache.py000066400000000000000000000076161460651207200215630ustar00rootroot00000000000000import hashlib import os from time import sleep import pytest from clear import ClearTests from common import CommonTests from has import HasTests from cachelib import FileSystemCache class SillySerializer: """A pointless serializer only for testing""" def dump(self, value, fs): fs.write(f"{repr(value)}{os.linesep}".encode()) def load(self, fs): try: loaded = eval(fs.readline().decode()) # When all file content has been read eval will # turn the EOFError into SyntaxError wich is not # handled by cachelib except SyntaxError as e: raise EOFError from e return loaded class CustomSerializerCache(FileSystemCache): """Our custom cache client with non-default serializer""" # overwrite serializer serializer = SillySerializer() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) class CustomHashingMethodCache(FileSystemCache): def __init__(self, *args, **kwargs): super().__init__(*args, hash_method=hashlib.sha256, **kwargs) class CustomDefaultHashingMethodCache(FileSystemCache): _default_hash_method = hashlib.sha256 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @pytest.fixture( autouse=True, params=[ FileSystemCache, CustomSerializerCache, CustomHashingMethodCache, CustomDefaultHashingMethodCache, ], ) def cache_factory(request, tmpdir): def _factory(self, *args, **kwargs): client = request.param(tmpdir, *args, **kwargs) return client request.cls.cache_factory = _factory class TestFileSystemCache(CommonTests, ClearTests, HasTests): # override parent sample since these must implement buffer interface sample_pairs = { "bacon": "eggs", "sausage": "spam", "brandy": "lobster", "truffle": "wine", "sauce": "truffle pate", "cravettes": "mournay sauce", } def test_EOFError(self, caplog): cache = self.cache_factory(threshold=1) assert cache.set_many(self.sample_pairs) file_names = [cache._get_filename(k) for k in self.sample_pairs.keys()] # truncate files to erase content for fpath in file_names: open(fpath, "w").close() assert cache.set("test", "EOFError") assert "Exception raised" in caplog.text def test_threshold(self): threshold = len(self.sample_pairs) // 2 cache = self.cache_factory(threshold=threshold) assert cache.set_many(self.sample_pairs) assert cache._file_count == 4 # due to autouse=True a single tmpdir is used # for each test so we need to clear it assert cache.clear() cache = self.cache_factory(threshold=0) assert cache.set_many(self.sample_pairs) assert not cache._file_count def test_file_counting(self): cache = self.cache_factory() assert cache.set_many(self.sample_pairs) assert cache._file_count == len(self.sample_pairs) assert cache.clear() assert cache._file_count == 0 def test_file_counting_on_override(self): cache = self.cache_factory() assert cache.set_many(self.sample_pairs) assert cache._file_count == len(self.sample_pairs) assert cache.set_many(self.sample_pairs) # count should remain the same assert cache._file_count == len(self.sample_pairs) def test_prune_old_entries(self): threshold = 2 * len(self.sample_pairs) - 1 cache = self.cache_factory(threshold=threshold) for k, v in self.sample_pairs.items(): assert cache.set(f"{k}-t1", v, timeout=1) assert cache.set(f"{k}-t10", v, timeout=10) sleep(3) for k, v in self.sample_pairs.items(): assert cache.set(k, v) assert cache.has(f"{k}-t10") assert not cache.has(f"{k}-t1") cachelib-0.13.0/tests/test_interface_uniformity.py000066400000000000000000000017101460651207200223270ustar00rootroot00000000000000# type: ignore import inspect import pytest from cachelib import BaseCache from cachelib import FileSystemCache from cachelib import MemcachedCache from cachelib import RedisCache from cachelib import SimpleCache @pytest.fixture(autouse=True) def create_cache_list(request, tmpdir): mc = MemcachedCache() mc._client.flush_all() rc = RedisCache(port=6360) rc._write_client.flushdb() request.cls.cache_list = [FileSystemCache(tmpdir), mc, rc, SimpleCache()] @pytest.mark.usefixtures("redis_server", "memcached_server") class TestInterfaceUniformity: def test_types_have_all_base_methods(self): public_api_methods = [ meth for meth in inspect.getmembers(BaseCache, predicate=inspect.isfunction) if not meth[0].startswith("_") ] for cache_type in self.cache_list: for meth in public_api_methods: assert hasattr(cache_type, meth[0]) and callable(meth[1]) cachelib-0.13.0/tests/test_memcached_cache.py000066400000000000000000000007521460651207200211400ustar00rootroot00000000000000import pytest from clear import ClearTests from common import CommonTests from has import HasTests from cachelib import MemcachedCache @pytest.fixture(autouse=True) def cache_factory(request): def _factory(self, *args, **kwargs): mc = MemcachedCache(*args, **kwargs) mc._client.flush_all() return mc request.cls.cache_factory = _factory @pytest.mark.usefixtures("memcached_server") class TestMemcachedCache(CommonTests, ClearTests, HasTests): pass cachelib-0.13.0/tests/test_mongodb_cache.py000066400000000000000000000015121460651207200206520ustar00rootroot00000000000000import pytest from clear import ClearTests from common import CommonTests from has import HasTests from cachelib.mongodb import MongoDbCache @pytest.fixture(autouse=True, params=[MongoDbCache]) def cache_factory(request): def _factory(self, *args, **kwargs): kwargs["db"] = "test-db" kwargs["collection"] = "test-collection" kwargs["key_prefix"] = "prefix" rc = request.param(*args, **kwargs) index_info = rc.client.index_information() all_keys = { subkey[0] for value in index_info.values() for subkey in value["key"] } assert "id" in all_keys, "Failed to create index on 'id' field" rc.clear() return rc if request.cls: request.cls.cache_factory = _factory class TestMongoDbCache(CommonTests, ClearTests, HasTests): pass cachelib-0.13.0/tests/test_redis_cache.py000066400000000000000000000025141460651207200203360ustar00rootroot00000000000000import pytest from clear import ClearTests from common import CommonTests from has import HasTests from cachelib import RedisCache class SillySerializer: """A pointless serializer only for testing""" def dumps(self, value): return repr(value).encode() def loads(self, bvalue): if bvalue is None: return None return eval(bvalue.decode()) class CustomCache(RedisCache): """Our custom cache client with non-default serializer""" # overwrite serializer serializer = SillySerializer() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @pytest.fixture(autouse=True, params=[RedisCache, CustomCache]) def cache_factory(request): def _factory(self, *args, **kwargs): rc = request.param(*args, port=6360, **kwargs) rc._write_client.flushdb() return rc request.cls.cache_factory = _factory def my_callable_key() -> str: return "bacon" @pytest.mark.usefixtures("redis_server") class TestRedisCache(CommonTests, ClearTests, HasTests): def test_callable_key(self): cache = self.cache_factory() assert cache.set(my_callable_key, "sausages") assert cache.get(my_callable_key) == "sausages" assert cache.set(lambda: "spam", "sausages") assert cache.get(lambda: "spam") == "sausages" cachelib-0.13.0/tests/test_simple_cache.py000066400000000000000000000031611460651207200205200ustar00rootroot00000000000000from time import sleep import pytest from clear import ClearTests from common import CommonTests from has import HasTests from cachelib import SimpleCache class SillySerializer: """A pointless serializer only for testing""" def dumps(self, value): return repr(value).encode() def loads(self, bvalue): return eval(bvalue.decode()) class CustomCache(SimpleCache): """Our custom cache client with non-default serializer""" # overwrite serializer serializer = SillySerializer() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @pytest.fixture(autouse=True, params=[SimpleCache, CustomCache]) def cache_factory(request): def _factory(self, *args, **kwargs): return request.param(*args, **kwargs) request.cls.cache_factory = _factory class TestSimpleCache(CommonTests, HasTests, ClearTests): def test_threshold(self): threshold = len(self.sample_pairs) // 2 cache = self.cache_factory(threshold=threshold) assert cache.set_many(self.sample_pairs) assert abs(len(cache._cache) - threshold) <= 1 def test_prune_old_entries(self): threshold = 2 * len(self.sample_pairs) - 1 cache = self.cache_factory(threshold=threshold) for k, v in self.sample_pairs.items(): assert cache.set(f"{k}-t0.1", v, timeout=0.1) assert cache.set(f"{k}-t5.0", v, timeout=5.0) sleep(2) for k, v in self.sample_pairs.items(): assert cache.set(k, v) assert f"{k}-t5.0" in cache._cache.keys() assert f"{k}-t0.1" not in cache._cache.keys() cachelib-0.13.0/tests/test_uwsgi_cache.py000066400000000000000000000020471460651207200203670ustar00rootroot00000000000000import pytest from clear import ClearTests from common import CommonTests from has import HasTests from cachelib import UWSGICache class SillySerializer: """A pointless serializer only for testing""" def dumps(self, value): return repr(value).encode() def loads(self, bvalue): return eval(bvalue.decode()) class CustomCache(UWSGICache): """Our custom cache client with non-default serializer""" # overwrite serializer serializer = SillySerializer() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @pytest.fixture(autouse=True, params=[UWSGICache, CustomCache]) def cache_factory(request): def _factory(self, *args, **kwargs): uwc = request.param(*args, **kwargs) uwc.clear() return uwc request.cls.cache_factory = _factory class TestUwsgiCache(CommonTests, ClearTests, HasTests): pytest.importorskip( "uwsgi", reason="could not import 'uwsgi'. Make sure to " "run pytest under uwsgi for testing UWSGICache", ) cachelib-0.13.0/tox.ini000066400000000000000000000014501460651207200146430ustar00rootroot00000000000000[tox] envlist = py{38,39,310,311} style typing docs skip_missing_interpreters = true [testenv] setenv = TMPDIR={envtmpdir} deps = -r requirements/tests.txt commands = pytest -rs --capture=tee-sys --tb=short --basetemp={envtmpdir} {posargs} uwsgi --python {envbindir}/pytest --pyargv '-rs --capture=tee-sys \ --tb=short --basetemp={envtmpdir} {posargs} -kUwsgi' \ --cache2 name=default,items=100 --master python {envtmpdir}/return_pytest_exit_code.py [testenv:style] deps = pre-commit skip_install = true commands = pre-commit run --all-files --show-diff-on-failure [testenv:typing] deps = -r requirements/typing.txt commands = mypy [testenv:docs] deps = -r requirements/docs.txt commands = sphinx-build -W -b html -d {envtmpdir}/doctrees docs {envtmpdir}/html