pax_global_header00006660000000000000000000000064140244263470014520gustar00rootroot0000000000000052 comment=31f6949c499dc8b57ce1db04c1841105d4631eb1 flask-caching-1.10.1/000077500000000000000000000000001402442634700142725ustar00rootroot00000000000000flask-caching-1.10.1/.github/000077500000000000000000000000001402442634700156325ustar00rootroot00000000000000flask-caching-1.10.1/.github/workflows/000077500000000000000000000000001402442634700176675ustar00rootroot00000000000000flask-caching-1.10.1/.github/workflows/publish_to_pypi.yml000066400000000000000000000016341402442634700236270ustar00rootroot00000000000000name: Publish Python packages to PyPI and TestPyPI on: release jobs: build-n-publish: name: Build and publish Python packages to PyPI and TestPyPI runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python 3.9 uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install pypa/build run: python -m pip install build --user - name: Build a binary wheel and a source tarball run: python -m build --sdist --wheel --outdir dist/ . - name: Publish package to Test PyPI uses: pypa/gh-action-pypi-publish@master with: password: ${{ secrets.TEST_PYPI_API_TOKEN }} repository_url: https://test.pypi.org/legacy/ - name: Publish package to PyPI if: startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@master with: password: ${{ secrets.PYPI_API_TOKEN }} flask-caching-1.10.1/.github/workflows/tests.yml000066400000000000000000000035351402442634700215620ustar00rootroot00000000000000# This workflow will install Python dependencies, run tests and lint with a variety of Python versions # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions name: tests on: push: branches: [ master ] paths-ignore: - 'docs/**' - '**.md' - '**.rst' - '**/CHANGES' - '**/AUTHORS' - '**/LICENSE' pull_request: branches: [ master ] paths-ignore: - 'docs/**' - '**.md' - '**.rst' - '**/CHANGES' - '**/AUTHORS' - '**/LICENSE' jobs: build: runs-on: ubuntu-latest strategy: matrix: python-version: [3.7, 3.8, 3.9, pypy-3.7] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install APT dependencies run: | sudo apt install libmemcached-dev memcached redis sudo systemctl stop memcached sudo systemctl stop redis-server - name: Install Python dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt pip install -e . - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with pytest run: | pytest --cov=./ --cov-report=xml - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 with: files: ./coverage.xml flags: unittests fail_ci_if_error: true flask-caching-1.10.1/.gitignore000066400000000000000000000002031402442634700162550ustar00rootroot00000000000000/.venv/ *.py[co] .*.swp .tox docs/_build/* *.egg-info build/* dist/ .idea/ .idea/* .coverage .cache .xprocess .vscode coverage.xml flask-caching-1.10.1/.pre-commit-config.yaml000066400000000000000000000001331402442634700205500ustar00rootroot00000000000000repos: - repo: https://github.com/psf/black rev: 20.8b1 hooks: - id: black flask-caching-1.10.1/AUTHORS000066400000000000000000000005551402442634700153470ustar00rootroot00000000000000Original author: thadeusb (Thadeus Burgess) Maintainers: sh4nks (Peter Justin) gergelypolonkai (Gergely Polonkai) Contributors (in no particular order): dag (Dag Odenhall) sean- singingwolfboy (David Baumgold) nocko (Shawn Nock) mimamu dahlia (Hong Minhee) jab (Joshua Bronson) kennethreitz (Kenneth Reitz) Thomas Waldmann David Buckley flask-caching-1.10.1/CHANGES000066400000000000000000000373161402442634700152770ustar00rootroot00000000000000Changelog ========= Version 1.10.1 -------------- Released 2021-03-17 - A ``GoogleCloudStorageCache`` backend has been added to the user contributed caching backends. PR `#214 `_. - Fix a regression introduced in the last release which broke all applications subclassing the ``Cache`` class. - Add test_generic_get_bytes test case. PR `#236 `_. - Various improvements and fixes. Version 1.10.0 -------------- Released 2021-03-04 - **Important**: The way caching backends are loaded have been refactored. Instead of passing the name of the initialization function one can now use the full path to the caching backend class. For example: ``CACHE_TYPE="flask_caching.backends.SimpleCache"``. In the next major release (2.0), this will be the only supported way. - UWSGICache is not officially supported anymore and moved to the user contributed backends. - Switch from Travis-CI to GitHub Actions - Fix add() in RedisCache without a timeout. PR `#218 `_. - Fix error in how the FileSystemCache counts the number of files. PR `#210 `_. - Type Annotations have been added. PR `#198 `_. - Add some basic logging to SimpleCache and FileSystemCache for better observability. PR `#203 `_. - Add option in memoize to ignore args PR `#201 `_. - Stop marking wheels as Python 2 compatible. PR `#196 `_. - Fix ``default_timeout`` not being properly passed to its super constructor. PR `#187 `_. - Fix ``kwargs`` not being passed on in function ``_memoize_make_cache_key``. PR `#184 `_. - Add a Redis Cluster Mode caching backend. PR `#173 `_. - Do not let PIP install this package on unsupported Python Versions. PR `#179 `_. - Fix uWSGI initialization by checking if uWSGI has the 'cache2' option enabled. PR `#176 `_. - Documentation updates and fixes. Version 1.9.0 ------------- Released 2020-06-02 - Add an option to include the functions source code when generating the cache key. PR `#156 `_. - Add an feature that allows one to completely control the way how cache keys are generated. For example, one can now implement a function that generates a cache key the based on POST requests. PR `#159 `_. - Fix the cache backend naming collisions by renaming them from ``simple`` to ``simplecache``, ``null`` to ``nullcache`` and ``filesystem`` to ``filesystemcache``. - Explicitly pass the ``default_timeout`` to ``RedisCache`` from ``RedisSentinelCache``. - Use ``os.replace`` instead of werkzeug's ``rename`` due to Windows raising an ``OSError`` if the dst file already exist. - Documentation updates and fixes. Version 1.8.0 ------------- Released 2019-11-24 - **BREAKING:** Removed support for Python 2. Python 3.5 and upwards are supported as of now. - Add option to specify if ``None`` is a cached value or not. See PR `#140 `_ and `#141 `_. - Allow to use ``__caching_id__`` rather than ``__repr__`` as an object caching key. PR `#123 `_. - The RedisCache backend now support generating the key_prefix via a callable. PR `#109 `_. - Emit a warning if the ``CACHE_TYPE`` is set to ``filesystem`` but no ``CACHE_DIR`` is set. - Fixes Google App Engine Memcache backend. See issue `#120 `_ for more details. - Various documentation updates and fixes. Version 1.7.2 ------------- Released 2019-05-28 **This is the last version supporting Python 2!** - Do not run a cached/memoized function if the cached return value is None. PR `#108 `_. Version 1.7.1 ------------- Released 2019-04-16 - Fix introspecting Python 3 functions by using varkw. PR `#101 `_. - Remove leftover files (``uwsgi.py``) in PyPI package. See issue `#102 `_ for more details. Version 1.7.0 ------------- Released 2019-03-29 - Added a feature called 'response_filter' which enables one to only cache views depending on the response code. PR `#99 `_. - A DeprecationWarning got turned into a TypeError. Version 1.6.0 ------------- Released 2019-03-06 - The ``delete_many`` function is now able to ignore any errors and continue deleting the cache. However, in order to preserve backwards compatibility, the default mode is to abort the deletion process. In order to use the new deletion mode, one has to flip the config setting ``CACHE_IGNORE_ERRORS`` to ``True``. This was and still is only relevant for the **filesystem** and **simple** cache backends. PR `#94 `_. - Re-added the ``gaememcached`` CACHE_TYPE for improved backwards compatibility. - Documentation improvements Version 1.5.0 ------------- Released 2019-02-23 - Add support for a Redis Sentinel Cluster. PR `#90 `_. - Parameterize the hash function so alternatives can be used. PR `#77 `_. - Include the deprecated ``werkzeug.contrib.cache`` module in Flask-Caching. PR `#75 `_. Version 1.4.0 ------------- Released 2018-04-16 - Fix logic for creating key for var args in memoize. PR `#70 `_. - Allow older Werkzeug versions by making the UWSGICache backend conditional. PR `#55 `_. - Some documentation improvements. PR `#48 `_, `#51 `_, `#56 `_, `#67 `_. - Some CI improvements. PR `#49 `_, `#50 `_. Version 1.3.3 ------------- Released 2017-06-25 - Add support for multiple query params and use md5 for consistent hashing. PR `#43 `_. Version 1.3.2 ------------- Released 2017-06-25 - Fix ``spreadsaslmemcached`` backend when using Python 3. - Fix kwargs order when memoizing a function using Python 3.6 or greater. See `#27 `_. Version 1.3.1 ------------- Released 2017-06-20 - Avoid breakage for environments with Werkzeug<0.12 installed because the uwsgi backend depends on Werkzeug >=0.12. See `#38 `_. Version 1.3.0 ------------- Released 2017-06-17 - Add uWSGI Caching backend (requires Werkzeug >= 0.12) - Provide a keyword `query_string` to the cached decorator in order to create the same cache key for different query string requests, so long as they have the same key/value (order does not matter). PR `#35 `_. - Use pytest as test suite and test runner. Additionally, the tests have been split up into multiple files instead of having one big file. Version 1.2.0 ------------- Released 2017-02-02 - Allows functions with kwargs to be memoized correctly. See `#18 `_. Version 1.1.1 ------------- Released 2016-12-09 - Fix PyPI Package distribution. See `#15 `_. Version 1.1.0 ------------- Released 2016-12-09 - Fix 'redis' backend import mechanisim. See `#14 `_. - Made backends a module to better control which cache backends to expose and moved our custom clients into a own module inside of the backends module. See also `#14 `_ (and partly some own changes). - Some docs and test changes. See `#8 `_ and `#12 `_. Version 1.0.1 ------------- Released 2016-08-30 - The caching wrappers like `add`, `set`, etc are now returning the wrapped result as someone would expect. See `#5 `_. Version 1.0.0 ------------- Released 2016-07-05 - Changed the way of importing Flask-Cache. Instead of using the depreacted method for importing Flask Extensions (via ``flask.ext.cache``), the name of the extension, ``flask_cache`` is used. Have a look at `Flask's documentation `_ for more information regarding this matter. This also fixes the deprecation warning from Flask. - Lots of PEP8 and Documentation fixes. - Renamed this fork Flask-Caching (``flask_caching``) as it will now be available on PyPI for download. In addition to the above mentioned fixes, following pull requests have been merged into this fork of `Flask-Cache `_: - `#90 Update documentation: route decorator before cache `_ - `#95 Pass the memoize parameters into unless(). `_ - `#109 wrapped function called twice `_ - `#117 Moves setting the app attribute to the _set_cache method `_ - `#121 fix doc for delete_memoized `_ - `#122 Added proxy for werkzeug get_dict `_ - `#123 "forced_update" option to 'cache' and 'memoize' decorators `_ - `#124 Fix handling utf8 key args `_ (cherry-picked) - `#125 Fix unittest failing for redis unittest `_ - `#127 Improve doc for using @cached on view `_ - `#128 Doc for delete_memoized `_ - `#129 tries replacing inspect.getargspec with either signature or getfullargspec if possible `_ - `make_cache_key() returning incorrect key `_ (cherry-picked) Version 0.13 ------------ Released 2014-04-21 - Port to Python >= 3.3 (requiring Python 2.6/2.7 for 2.x). - Fixed bug with using per-memoize timeouts greater than the default timeout - Added better support for per-instance memoization. - Various bug fixes Version 0.12 ------------ Released 2013-04-29 - Changes jinja2 cache templates to use stable predictable keys. Previously the key for a cache tag included the line number of the template, which made it difficult to predict what the key would be outside of the application. - Adds config variable `CACHE_NO_NULL_WARNING` to silence warning messages when using 'null' cache as part of testing. - Adds passthrough to clear entire cache backend. Version 0.11.1 -------------- Released 2013-04-7 - Bugfix for using memoize on instance methods. The previous key was id(self), the new key is repr(self) Version 0.11 ------------ Released 2013-03-23 - Fail gracefully in production if cache backend raises an exception. - Support for redis DB number - Jinja2 templatetag cache now concats all args together into a single key instead of treating each arg as a separate key name. - Added delete memcache version hash function - Support for multiple cache objects on a single app again. - Added SpreadSASLMemcached, if a value is greater than the memcached threshold which defaults to 1MB, this splits the value across multiple keys. - Added support to use URL to connect to redis. Version 0.10.1 -------------- Released 2013-01-13 - Added warning message when using cache type of 'null' - Changed imports to relative instead of absolute for AppEngine compatibility Version 0.10.0 -------------- Released 2013-01-05 - Added `saslmemcached` backend to support Memcached behind SASL authentication. - Fixes a bug with memoize when the number of args != number of kwargs Version 0.9.2 ------------- Released 2012-11-18 - Bugfix with default kwargs Version 0.9.1 ------------- Released 2012-11-16 - Fixes broken memoized on functions that use default kwargs Version 0.9.0 ------------- Released 2012-10-14 - Fixes memoization to work on methods. Version 0.8.0 ------------- Released 2012-09-30 - Migrated to the new flask extension naming convention of flask_cache instead of flaskext.cache - Removed unnecessary dependencies in setup.py file. - Documentation updates Version 0.7.0 ------------- Released 2012-08-25 - Allows multiple cache objects to be instantiated with different configuration values. Version 0.6.0 ------------- Released 2012-08-12 - Memoization is now safer for multiple applications using the same backing store. - Removed the explicit set of NullCache if the Flask app is set testing=True - Swapped Conditional order for key_prefix Version 0.5.0 ------------- Released 2012-02-03 - Deleting memoized functions now properly functions in production environments where multiple instances of the application are running. - get_memoized_names and get_memoized_keys have been removed. - Added ``make_name`` to memoize, make_name is an optional callable that can be passed to memoize to modify the cache_key that gets generated. - Added ``unless`` to memoize, this is the same as the unless parameter in ``cached`` - memoization now converts all kwargs to positional arguments, this is so that when a function is called multiple ways, it would evaluate to the same cache_key Version 0.4.0 ------------- Released 2011-12-11 - Added attributes for uncached, make_cache_key, cache_timeout to the decorated functions. Version 0.3.4 ------------- Released 2011-09-10 - UTF-8 encoding of cache key - key_prefix argument of the cached decorator now supports callables. Version 0.3.3 ------------- Released 2011-06-03 Uses base64 for memoize caching. This fixes rare issues where the cache_key was either a tuple or larger than the caching backend would be able to support. Adds support for deleting memoized caches optionally based on function parameters. Python 2.5 compatibility, plus bugfix with string.format. Added the ability to retrieve memoized function names or cache keys. Version 0.3.2 ------------- Bugfix release. Fixes a bug that would cause an exception if no ``CACHE_TYPE`` was supplied. Version 0.3.1 ------------- Pypi egg fix. Version 0.3 ----------- - CACHE_TYPE changed. Now one of ['null', 'simple', 'memcached', 'gaememcached', 'filesystem'], or an import string to a function that will instantiate a cache object. This allows Flask-Cache to be much more extensible and configurable. Version 0.2 ----------- - CACHE_TYPE now uses an import_string. - Added CACHE_OPTIONS and CACHE_ARGS configuration values. - Added delete_memoized Version 0.1 ----------- - Initial public release flask-caching-1.10.1/CONTRIBUTING.md000066400000000000000000000043651402442634700165330ustar00rootroot00000000000000# How to contribute The preferred (and easiest) way is to fork the repository, add your changes based on the `master` branch and create a pull request. We use Travis CI for continuous integration so all pull requests are automatically tested for all supported Python versions. # Setting up the project After cloning the repository you should need to create a Python virtualenv: $ cd flask-caching $ python3 -m venv .venv/ This will create a directory `.venv` that you can activate with one of these commands (depending on your shell): $ source .venv/bin/activate # for Bash and compatible $ source .venv/bin/activate.csh # for C shells like CSH or TCSH $ source .venv/bin/activate.fish # For Fish The `.venv` directory is on the ignore list, so you can’t accidentally add it to version control. Next, you should install the project’s dependencies: $ pip install -r requirements.txt # Formatting code To avoid getting a “please reformat/break this line” type of message during reviews, you should run the modified files through [black](https://github.com/python/black). We usually run it as `black -l 80`, but a plain `black` will suffice, too. If you have [pre-commit](https://pre-commit.com/) installed, this is done automatically for you (just don’t forget to activate it with `pre-commit install`). If you don’t, you can install `black` manually using $ pip install black # Writing and running tests Newly added code should be fully tested; otherwise we won’t merge your pull requests. Running tests is as easy as $ pytest We have the coverage module enabled so you will see your tests’ coverage immediately. If you want to test your changes for other Python versions, install and run `tox`: $ pip install tox $ tox # To run for every supported version $ tox -e py35 # To run only for Python 3.5 # Helping out without writing code Besides code, we’re happy to accept pull requests to update documentation, triage issues, and testing new features before they get merged. # First-time contributors If you are a new contributor, look for the [good first issue](labels/good+first+issue) label. It marks issues that are easy to solve without thoroughly understanding the code base. flask-caching-1.10.1/LICENSE000066400000000000000000000060571402442634700153070ustar00rootroot00000000000000Copyright (c) 2010 by Thadeus Burgess. Copyright (c) 2016 by Peter Justin. Some rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The names of the contributors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The "cache" module from werkzeug is licensed under a BSD-3 Clause license as is stated below: Copyright (c) 2017, Pallets Team All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. flask-caching-1.10.1/MANIFEST.in000066400000000000000000000003721402442634700160320ustar00rootroot00000000000000include LICENSE include CONTRIBUTORS include CHANGES include README.md include requirements.txt include tox.ini include setup.cfg graft docs graft tests prune docs/_build global-exclude __pycache__ global-exclude *.py[co] global-exclude *.sw[a-z] flask-caching-1.10.1/Makefile000066400000000000000000000005401402442634700157310ustar00rootroot00000000000000.PHONY: test tox clean dist upload docs test: pytest tox: @tox clean: find . -name '*.pyc' -exec rm -f {} + find . -name '*.pyo' -exec rm -f {} + find . -name '*~' -exec rm -f {} + find . -name '__pycache__' -exec rm -rf {} + dist: python setup.py sdist bdist_wheel upload: twine upload dist/* --skip-existing docs: $(MAKE) -C docs html flask-caching-1.10.1/README.md000066400000000000000000000047151402442634700155600ustar00rootroot00000000000000Flask-Caching ============= [![Build Status](https://github.com/sh4nks/flask-caching/actions/workflows/tests.yml/badge.svg)](https://github.com/sh4nks/flask-caching/actions) [![codecov](https://codecov.io/gh/sh4nks/flask-caching/branch/master/graph/badge.svg?token=6Cp6Y0BitB)](https://codecov.io/gh/sh4nks/flask-caching) [![PyPI Version](https://img.shields.io/pypi/v/Flask-Caching.svg)](https://pypi.python.org/pypi/Flask-Caching) [![Documentation Status](https://readthedocs.org/projects/flask-caching/badge/?version=latest)](https://flask-caching.readthedocs.io/en/latest/?badge=latest) [![License](https://img.shields.io/badge/license-BSD-yellow.svg)](https://github.com/sh4nks/flask-caching) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black) Adds easy cache support to Flask. This is a fork of the [Flask-Cache](https://github.com/thadeusb/flask-cache) extension. Flask-Caching also includes the ``cache`` module from werkzeug licensed under a BSD-3 Clause License. Setup ----- Flask-Caching is available on PyPI and can be installed with: pip install flask-caching The Cache Extension can either be initialized directly: ```python from flask import Flask from flask_caching import Cache app = Flask(__name__) # For more configuration options, check out the documentation cache = Cache(app, config={'CACHE_TYPE': 'simple'}) ``` Or through the factory method: ```python cache = Cache(config={'CACHE_TYPE': 'simple'}) app = Flask(__name__) cache.init_app(app) ``` Compatibility with Flask-Cache ----- There are no known incompatibilities or breaking changes between the latest [Flask-Cache](https://github.com/thadeusb/flask-cache) release (version 0.13, April 2014) and the current version of Flask-Caching. Due to the change to the Flask-Caching name and the [extension import transition](http://flask.pocoo.org/docs/0.11/extensiondev/#extension-import-transition), Python import lines like: ```from flask.ext.cache import Cache``` will need to be changed to: ```from flask_caching import Cache``` Python versions ----- Starting with version 1.8, Flask-Caching dropped Python 2 support. The library is tested against Python 3.5, 3.6, and PyPy 3.5. Links ===== * [Documentation](https://flask-caching.readthedocs.io) * [Source Code](https://github.com/sh4nks/flask-caching) * [Issues](https://github.com/sh4nks/flask-caching/issues) * [Original Flask-Cache Extension](https://github.com/thadeusb/flask-cache) flask-caching-1.10.1/docs/000077500000000000000000000000001402442634700152225ustar00rootroot00000000000000flask-caching-1.10.1/docs/Makefile000066400000000000000000000167221402442634700166720ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " epub3 to make an epub3" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" @echo " dummy to check syntax errors of document sources" .PHONY: clean clean: rm -rf $(BUILDDIR)/* .PHONY: html html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." .PHONY: dirhtml dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." .PHONY: singlehtml singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." .PHONY: pickle pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." .PHONY: json json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." .PHONY: htmlhelp htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." .PHONY: qthelp qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Flask-Caching.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Flask-Caching.qhc" .PHONY: applehelp applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." .PHONY: devhelp devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Flask-Caching" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Flask-Caching" @echo "# devhelp" .PHONY: epub epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." .PHONY: epub3 epub3: $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 @echo @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." .PHONY: latex latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." .PHONY: latexpdf latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: latexpdfja latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: text text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." .PHONY: man man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." .PHONY: texinfo texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." .PHONY: info info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." .PHONY: gettext gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." .PHONY: changes changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." .PHONY: linkcheck linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." .PHONY: doctest doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." .PHONY: coverage coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." .PHONY: xml xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." .PHONY: pseudoxml pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." .PHONY: dummy dummy: $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy @echo @echo "Build finished. Dummy builder generates no files." flask-caching-1.10.1/docs/_static/000077500000000000000000000000001402442634700166505ustar00rootroot00000000000000flask-caching-1.10.1/docs/_static/flask-cache.png000066400000000000000000000351051402442634700215230ustar00rootroot00000000000000PNG  IHDRb5 +vsRGBbKGD pHYsa(atIME/+Otn IDATxy|TߙL& a_EdTmuMZ.jӽZG۟ZڪպL4ᆸcD$$3|3fr', r sg2{A <+UuaMjD TyQѨ^ZV}72$STxXD& "ȱӦϬ] >|[ +%AeVCT K+Ot`KDQ7UFA;b1GTu"?b?@E""wU͟RbyF_ Ϝ(.gU;/WATE2ZЍ#IAnA ! UuI܄@e:Fo_VVYP0(cE ˜]+gTѪEG6* U&oevDDAUΨVDJ*F)4UvD)P "5p >.RUފVWmpCQwaEղh.Q^ppŽS5nK4rAQ5Ѫw݌:8888_$r"> @7Q(|C<=XD~ j~4~"}#Z]t3CQZ^9[`Q|;Zi0p0pJ:U}GlI n18 3 #CiQX܍rU4ͮG[tOA '+!*5Pݦkn:8E}(7,65XY6\UA@gDTUzM4WQȯ~%Ȁ]h$X@@UD$^_.4m&ZͶ6f|p~DiYOg,(+ʮA)ND$ V+V )x]QQaS^(xkkycӨMZZZ&b&R<D쵫ܪV}fa,sP9п!1YɁ PS]Zr@ZwAPUTp8bOLuKgѮa[*&ђsEmmHƼO?]Ae˖~/'q(d&mBvU#\tׂ\nApoAy?^aK*vjlUU-,wӟcX*HRY`0 ;[[S _yϽwXD"H`9^R@j:J  $ہ70P7d 8` Kc:oF#QK(&+>dĞC 燗MsNd) --X55~UUdЂosZ#5?I$cN7nl IEQQA2? vm'''$qY5++zs788lu xmj7,.aȁm)AUc-but&(@ w~믿殩S @ ȠH0<أ566' |j蒒Mu cX>pMr|9*zщ'"\YZVHT/`$9<;`W7TL1۾# yneGY>NZx?D#j$R G39MR瞗ImOg XRХP.)Kp_hjj:$@SMMS_v5׼,JLĢ *я1&JV~`f[Fo&}pa @3QMGKG%Ar~4ip3-K+8.~4*H% 1\Ț9xOHR x_- xk .L]}r!? B,1XlEӲRwW88xj@).~=H`gCl 8lA=,ĶrSVGZr0C:v/0RXXO{g~뽺O`NPU-,O6Y0MhW?E/~鋇2??y$tv7%e;¡=jvql쎳/v#[z z"cdoW0pRnp`)Q}},_гTQgH[[(<&NL3l͍lhh ]x_?c~ >IR>1 jKۨ5qݾ# ne.^T»x ` W>o{2'0 8l1(E/!(p4!"tt ǤC|~˫++-i겍A]\oY/  {גLiR_ }K_:yyy|-6ISJ*flC{uv~z/W`+E`#`e|^>VA۞܂Ö-媛NR TG#ZV^Q Qex^{}gv]]K]J=ͻ{IC8^{]bGnOk sM<&iSͱ}T"r60b"1YޭV|8Sn71o 8x 8 Xc\ IJA*}gb=8߶at]H:JCc鬄?3+{C`7L&Ks )yo᭭zt| Ddsϋ\rnذΙ yriY5n{Ƭc:=mMs-)N g6ZHe<{]mTl n;m""mmm̘c){/J+JHs1='a<&g=ݷپ{v$% V#3LәZ9 \dIb٭wd?{‚C?^4ISuSNY b$8?K={Pt$pD7<o`I`XMiۖlC21Q1L?0;Zb5li϶g݊,X-@iyX(VEU6m]3gNsY8LwvH5>v~t}pYf}"{Νո;kO'7 !ʒeIVV<8ھɏH99_4yvx6`JQ`R?R \m> 8lAzLEՑu8J˿3 ]`ҥԧAUSC Y2e>䢡58oχBa߃>5sܷ x`4`]{0fL_ ~C3 >bW R (+2Q qTb "|msg=A3~qL>.VDT>{Uƍ{?G~(c ;h=YTWW7яI!2e.a]?LO0UFn*ƥ$a!&+ඤKg0̾cDRcf`}Kn_ŸzdL&[$8 B|[@Oշ( .Vt{--}r$KYP ɕɡis,éE`~KRa;Yo_!؞v)|+~nI/M[&heSǔz{pOVg/%ac7:A0qWѱS"%Xi~懋AOAYŜv+]]l[5a %{hI7% 0tX~[4ZG,{5x;;81lP(/J}}]^ z "ښy8//&3PQ9+Gl|=S9["c,[Oo،10-C7^8 &zp[k(?أGǂЊ#%Oc@&g9^obUQk1IgAضVJbn}si'°̃Y] )hm}$ 6-vHK#~ݗ.]rdKK.Ox7_¢T21QI$d3 b-3Fn[0! ~KXue8p&%K`܃ L= 0:du~~X[[[{0th(q˭wlhhbf%KC9YX,6MU T?{ys|>z(...i;m,B=:LヾA0)Ǒ+m5|s l~Kְ |#0f~Ւ\%cA881Ar( +~id(Ӕˉ!%CG+T*d.d_~ydrm7O/CKJ>]0n3dLZ)q6E{0sHm`P d\-isU8 cO+c*d'`13xNٶREASJ"iMu|GJ$+B[qqQqy..~ѝ֯_9"/L@9d駟~CC"ԗ(*.oz[TU\.Uyާh7$!)v$)ru 1߻)|j ([,d{%y]rpEfv mtPupm 5UK*JEh`hDFajLW³R"P(u m)/9{ 4*c^8m{wlIZ0{h`-z7O)I0F+DƘ]0^Lzcr+&sWu1itWVzqj7𗘠g@fLp.8q%Wx+LUGA :eATCVOߨIt@}*;C>C\$%KJ?ʷ~]]cGa@Xvh:je%>Cp!o=8(L@ߡVp.ՅZlߖ Zۆ1'eSLqAgf g1O_1KI/X,۟{z}33y자[~qn#'Omٸ1>:x6 B[A3c ~Yw 0L\ dVg`&5bzo϶61jsgAĞgtSo}HSI~б_AtA,KF8$c9TLW4?g?K6Ӌ:;:/$Ĺ `:DdC6\7Ye@)(zR\ )%%%@\ .iƍ}#Bu,"ܶA8cVwWw`̴Ga|O+o]qn>Vh? Bew`L_d;?,m lPEf +eo 555Hzu F|G{4n|h=n3zXk}},/փM8)isV6iʟyMV{p)t z81;0{1.0ilz%{?Ǥ}/C qF DdS0x!Gғq&;̤ceԖ Uߞ{4\́#V%hUa0&JAU5a|k"n0DuBsWBѣs^ohhsP_:KiB(^]LKb }_W?½?,Ѿ^c)a0Et T˫/+-="ؗϛTPX,6)ἦz[-M"?jQk5|^<h1H` 0 v]'`|=s}fu^DӋ >V_\ >Cz1/ۘą8 (\ qӢ.MtlyԏV*Jz |vlm.?Hj^;?qeCC,>|UU^|aDm ~63p>DLJ.V/v7V[=YNƴ#о ߯~t @F&e]ItEnXSFй8{ۗ8r D\chEA=l:i"e7&{dv ~k$Tcƌ{ٗB( 䐃nkkKL-xT*Y#_ٌq=b [DIn% bzx1St5L4xq?o1)ڵkŤJxQٜZY&&pK\0iz-wcp!HմW-v¤^Dqs>ֹ#ڙ]_~04qy~~m0Y a+A"4+% "' +UU֮]s1i.-XzxX_[Zz-@ #Ƽ?'JxfºÏ8%%X^_+nns V]1OZ^0D\LL汘27T{2N Llk>rqm}?&?ӽ7Wwm2[K #J΂Ma)PSnZ吲ʑV~Y`Ms^U%'~ m\ymncEx<.+W(C 6 SOYaCk~>^xn^"ة.-Q^RzʜByV {qcp1݅w31O|'5nYJ{ͷ+cTb8LU˴Dt~ɴ{LK@W~<&k'ekv|Aؾ ͒JJLW4Y$.Җaɒ߬ái ,) {,/HR? B) `;o_bŊH(T:Ҹ Mo_Iz C]yj,=&LlDS^72:90A'xC܁tm]o|=MLvNY,L=({ATL+@f9u˜L&hn~?)V[-n]X?CP;qkt吋N8~h]n4L'&azLiލ]Ω=-8I,levH|:볭9|Ş*.~]1abl~褉#۫!HGY1?okNy衚 e$֮m(X#kv/((|Ox5dyC_tذpr3k#onc3Xrp[uLk]/B} db&faV jh&>%>s:gۑs^q՘}BLw`7'8&k)Ru&ht.:?US9\m8-9]nU]ț\ʆ-Xd_v,ppa{D4ZG;'r?DScX,>HPH"(@QQ q?wv<"Һ;?^lEW ='LRAP@o [v793a!&5^X9GEdZ:IxA2s>^W.kN]DW{Wcz3c5Ն?V}?st('W"LB6q5uyna}8ӽk1;yL?K_d}o-ɍvmXA'"ZYV^!<*I^ziW\qG(lM@Z\kZz#?e=MԐI zÍ7V666~׬hu=NLL%lh"oøҕﵖtѠoGe>D 4cZ/s1Z2n|^3YW5 ,VV̆wo:D_ѠP-lE301 V# *7ug"[w﹀%l l1L?^ "~y؉ G>S{DS#,B.?"ԘT 9h[[ۨGyÆ|͒ I$[ASo_p s6+Űaߴ瞽_YKHwn%{Iq?ÖG:++bUq߀D3b8|%ЎÇ$R)|Iڼ1XtXmڂ0wܛ'M_ߜsذpko']"1 *lq c*>dk&&[--8_!r:MkƘFs_lf|L ߨ0?`'lh nQU^VKVc]w<Db,imkU6kӺ,S1fr+Uyjr`> i76bL҇qŗj„Z։ni*|?}oÆ g$?қU9&Zh iׅ2 $l@y^+P˵/wj4[վ֊җ&FE;8b:n"r6x&7kk.n6}Ƌ1v  ͻ-|c᯼0{/3fhk,X~]?-6< 6̛7-]ښAa­>\Q|o.Ƃ/y ݣq<( DiKqֳm!o\fL|HL `L_MuZ l;Ġ"_rL~T>(VGc**U,g¢7O~QG>YÏL|5k>m˖`7h  //† k^}e.F|re@Ad>ףU.J١+L aLL; #*9'*CE $H 7N,1Ǫ-"9!~pG,eҤwx%<ڱ_@y{O>YO}}DbU%l:O G#;AUZ9ДD$\Um}ISՑJ*'!'AV漼P(Ӣ rذkl($746ԴsssˤDu|2ce#ct[ eM 8rP^ N];;gSՑUz.jRH[QM4+KAALj$hu}\*$Z]| RPVFVf'R=W@gE=]K*N_J^n]}(6GUzcOEO(EV44b؂(-IDB]ܢ٭ ӦX][%u Aƚ ݓ)Q֋ppa4ZcM9GD] *E , LBdWK WT_E!3J#ׁ\\ʾ ٤o7\5Ց@iyHQT }D]f# /a;RG#hٳPfdKcoE#9qc8F8@0pCj_^'̵5$IzRMu}qppp8Ē:mE9!UЇEȓ}jk6mDD/-'# uKj`wajدSH*ZQt Y@ ԜNMQkԚh䏵Wx" :JX~Şi +]و|ܧDw)+E#AHZE)_ՑO27t2>(Ȟ"Ȱ. eȋ VDU33EMá4!3@P^pArh4y1 *W j?TbG69n.b***ZnVGTu LD8Wt Uu] e啻a:Pҁ$IR$P~&!y h4- 8tO,`jw;RL6!ŴU}X 5Ѿ(+ CQv@8D,@tfj~g"("gdbAW4rHo=E6H[s )AwEd*v짛Jɀo#] 9D9fo2RƁFY9J"ڲMCdB@J><7/FZD#wwpppp!W`?^dPCCNtԃWxIۏ_/pT:qGzFf踭jz; U|A$[UQ*m6/\^ŒGA3)v8 Α?ꪆuӦ\j9HDQ(3MYS[-:8888 COa+6N88mw"eU49ఴ 6\~AM Qhu5srpppp(+pW}1O:WaȃH}Z^[׾Ƕn +nvApP^y>bڣ{a^ ,Eu"//Gs+e"YYn%>f[BˈV5`H!ZRVfEDd2UyO|4F#>\N wdɒPJuppp:88880ypTUWm^,t, 6H)=&H̍#H+1SMUg4iu#0HP[x3 %""ʏjU7B^BI eQj4YF!gOIENDB`flask-caching-1.10.1/docs/_templates/000077500000000000000000000000001402442634700173575ustar00rootroot00000000000000flask-caching-1.10.1/docs/_templates/sidebarintro.html000066400000000000000000000003221402442634700227270ustar00rootroot00000000000000

Useful Links

flask-caching-1.10.1/docs/api.rst000066400000000000000000000024321402442634700165260ustar00rootroot00000000000000API === This section contains the API documentation of the Flask-Caching extension and lists the backends which are supported out of the box. The `Configuration `_ section explains how the backends can be used. .. module:: flask_caching Cache API --------- .. autoclass:: Cache :members: init_app, get, set, add, delete, get_many, set_many, delete_many, clear, cached, memoize, delete_memoized, delete_memoized_verhash Backends -------- .. module:: flask_caching.backends BaseCache ````````` .. autoclass:: flask_caching.backends.base.BaseCache :members: NullCache ````````` .. autoclass:: NullCache :members: SimpleCache ``````````` .. autoclass:: SimpleCache :members: FileSystemCache ``````````````` .. autoclass:: FileSystemCache :members: RedisCache `````````` .. autoclass:: RedisCache :members: RedisSentinelCache `````````````````` .. autoclass:: RedisSentinelCache :members: UWSGICache `````````` .. autoclass:: UWSGICache :members: MemcachedCache `````````````` .. autoclass:: MemcachedCache :members: SASLMemcachedCache `````````````````` .. autoclass:: SASLMemcachedCache :members: SpreadSASLMemcachedCache ```````````````````````` .. autoclass:: SpreadSASLMemcachedCache :members: flask-caching-1.10.1/docs/changelog.rst000066400000000000000000000000301402442634700176740ustar00rootroot00000000000000.. include:: ../CHANGES flask-caching-1.10.1/docs/conf.py000066400000000000000000000244741402442634700165340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # Flask-Caching documentation build configuration file, created by # sphinx-quickstart on Mon Jul 4 22:58:53 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys import alabaster # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. # # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' autodoc_member_order = "bysource" # General information about the project. project = u'Flask-Caching' copyright = u'2016, Thadeus Burgess, Peter Justin' author = u'Thadeus Burgess, Peter Justin' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'1.0.0' # The full version, including alpha/beta/rc tags. release = u'1.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # # today = '' # # Else, today_fmt is used as the format for a strftime call. # # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The reST default role (used for this markup: `text`) to use for all # documents. # # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { 'description': 'Adds caching support to your Flask application.', 'logo': 'flask-cache.png', 'github_button': False, 'github_banner': True, 'github_user': 'sh4nks', 'github_repo': 'flask-caching', 'extra_nav_links': { 'Flask-Caching @ PyPI': 'https://pypi.python.org/pypi/Flask-Caching', 'Flask-Caching @ GitHub': 'https://github.com/sh4nks/Flask-Caching', } } # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [alabaster.get_path()] # The name for this set of Sphinx documents. # " v documentation" by default. # # html_title = u'Flask-Caching v1.0.0' # A shorter title for the navigation bar. Default is the same as html_title. # # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # # html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # # html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. # # html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': [ 'about.html', 'localtoc.html', 'sidebarintro.html', 'relations.html', 'searchbox.html', ] } # Additional templates that should be rendered to pages, maps page names to # template names. # # html_additional_pages = {} # If false, no module index is generated. # # html_domain_indices = True # If false, no index is generated. # # html_use_index = True # If true, the index is split into individual pages for each letter. # # html_split_index = False # If true, links to the reST sources are added to the pages. # # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' # # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. # # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'Flask-Cachingdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Flask-Caching.tex', u'Flask-Caching Documentation', u'Thadeus Burgess, Peter Justin', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # # latex_use_parts = False # If true, show page references after internal links. # # latex_show_pagerefs = False # If true, show URL addresses after external links. # # latex_show_urls = False # Documents to append as an appendix to all manuals. # # latex_appendices = [] # If false, no module index is generated. # # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'flask-caching', u'Flask-Caching Documentation', [author], 1) ] # If true, show URL addresses after external links. # # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Flask-Caching', u'Flask-Caching Documentation', author, 'Flask-Caching', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # # texinfo_appendices = [] # If false, no module index is generated. # # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # # texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/3/': None} flask-caching-1.10.1/docs/index.rst000066400000000000000000000573071402442634700170770ustar00rootroot00000000000000Flask-Caching ============= .. module:: flask_caching :noindex: Flask-Caching is an extension to `Flask`_ that adds caching support for various backends to any Flask application. Besides providing support for all `werkzeug`_'s original caching backends through a uniformed API, it is also possible to develop your own caching backend by subclassing :class:`flask_caching.backends.base.BaseCache` class. Version support --------------- Since 1.8, Flask-Caching supports only Python 3.5+. Installation ------------ Install the extension with the following command:: $ pip install Flask-Caching Set Up ------ Cache is managed through a ``Cache`` instance:: from flask import Flask from flask_caching import Cache config = { "DEBUG": True, # some Flask specific configs "CACHE_TYPE": "SimpleCache", # Flask-Caching related configs "CACHE_DEFAULT_TIMEOUT": 300 } app = Flask(__name__) # tell Flask to use the above defined config app.config.from_mapping(config) cache = Cache(app) You may also set up your ``Cache`` instance later at configuration time using **init_app** method:: cache = Cache(config={'CACHE_TYPE': 'SimpleCache'}) app = Flask(__name__) cache.init_app(app) You may also provide an alternate configuration dictionary, useful if there will be multiple ``Cache`` instances each with a different backend:: #: Method A: During instantiation of class cache = Cache(config={'CACHE_TYPE': 'SimpleCache'}) #: Method B: During init_app call cache.init_app(app, config={'CACHE_TYPE': 'simple'}) .. versionadded:: 0.7 Caching View Functions ---------------------- To cache view functions you will use the :meth:`~Cache.cached` decorator. This decorator will use request.path by default for the cache_key:: @app.route("/") @cache.cached(timeout=50) def index(): return render_template('index.html') The cached decorator has another optional argument called ``unless``. This argument accepts a callable that returns True or False. If ``unless`` returns ``True`` then it will bypass the caching mechanism entirely. .. warning:: When using ``cached`` on a view, take care to put it between Flask's ``@route`` decorator and your function definition. Example:: @app.route('/') @cache.cached(timeout=50) def index(): return 'Cached for 50s' If you reverse both decorator, what will be cached is the result of ``@route`` decorator, and not the result of your view function. Caching Other Functions ----------------------- Using the same ``@cached`` decorator you are able to cache the result of other non-view related functions. The only stipulation is that you replace the ``key_prefix``, otherwise it will use the request.path cache_key. Keys control what should be fetched from the cache. If, for example, a key does not exist in the cache, a new key-value entry will be created in the cache. Otherwise the the value (i.e. the cached result) of the key will be returned:: @cache.cached(timeout=50, key_prefix='all_comments') def get_all_comments(): comments = do_serious_dbio() return [x.author for x in comments] cached_comments = get_all_comments() Memoization ----------- See :meth:`~Cache.memoize` In memoization, the functions arguments are also included into the cache_key. .. note:: With functions that do not receive arguments, :meth:`~Cache.cached` and :meth:`~Cache.memoize` are effectively the same. Memoize is also designed for methods, since it will take into account the `identity `_. of the 'self' or 'cls' argument as part of the cache key. The theory behind memoization is that if you have a function you need to call several times in one request, it would only be calculated the first time that function is called with those arguments. For example, an sqlalchemy object that determines if a user has a role. You might need to call this function many times during a single request. To keep from hitting the database every time this information is needed you might do something like the following:: class Person(db.Model): @cache.memoize(50) def has_membership(self, role_id): return Group.query.filter_by(user=self, role_id=role_id).count() >= 1 .. warning:: Using mutable objects (classes, etc) as part of the cache key can become tricky. It is suggested to not pass in an object instance into a memoized function. However, the memoize does perform a repr() on the passed in arguments so that if the object has a __repr__ function that returns a uniquely identifying string for that object, that will be used as part of the cache key. For example, an sqlalchemy person object that returns the database id as part of the unique identifier:: class Person(db.Model): def __repr__(self): return "%s(%s)" % (self.__class__.__name__, self.id) Deleting memoize cache `````````````````````` .. versionadded:: 0.2 You might need to delete the cache on a per-function basis. Using the above example, lets say you change the user's permissions and assign them to a role, but now you need to re-calculate if they have certain memberships or not. You can do this with the :meth:`~Cache.delete_memoized` function:: cache.delete_memoized(user_has_membership) .. note:: If only the function name is given as parameter, all the memoized versions of it will be invalidated. However, you can delete specific cache by providing the same parameter values as when caching. In following example only the ``user``-role cache is deleted: .. code-block:: python user_has_membership('demo', 'admin') user_has_membership('demo', 'user') cache.delete_memoized(user_has_membership, 'demo', 'user') .. warning:: If a classmethod is memoized, you must provide the ``class`` as the first ``*args`` argument. .. code-block:: python class Foobar(object): @classmethod @cache.memoize(5) def big_foo(cls, a, b): return a + b + random.randrange(0, 100000) cache.delete_memoized(Foobar.big_foo, Foobar, 5, 2) Caching Jinja2 Snippets ----------------------- Usage:: {% cache [timeout [,[key1, [key2, ...]]]] %} ... {% endcache %} By default, the value of "path to template file" + "block start line" is used as the cache key. Also, the key name can be set manually. Keys are concatenated together into a single string, that can be used to avoid the same block evaluating in different templates. Set the timeout to ``None`` for no timeout, but with custom keys:: {% cache None, "key" %} ... {% endcache %} Set timeout to ``del`` to delete cached value:: {% cache 'del', key1 %} ... {% endcache %} If keys are provided, you may easily generate the template fragment key and delete it from outside of the template context:: from flask_caching import make_template_fragment_key key = make_template_fragment_key("key1", vary_on=["key2", "key3"]) cache.delete(key) Considering we have ``render_form_field`` and ``render_submit`` macros:: {% cache 60*5 %}
{% render_form_field(form.username) %} {% render_submit() %}
{% endcache %} Clearing Cache -------------- See :meth:`~Cache.clear`. Here's an example script to empty your application's cache: .. code-block:: python from flask_caching import Cache from yourapp import app, your_cache_config cache = Cache() def main(): cache.init_app(app, config=your_cache_config) with app.app_context(): cache.clear() if __name__ == '__main__': main() .. warning:: Some backend implementations do not support completely clearing the cache. Also, if you're not using a key prefix, some implementations (e.g. Redis) will flush the whole database. Make sure you're not storing any other data in your caching database. Explicitly Caching Data ----------------------- Data can be cached explicitly by using the proxy methods like :meth:`Cache.set`, and :meth:`Cache.get` directly. There are many other proxy methods available via the :class:`Cache` class. For example: .. code-block:: python @app.route("/html") @app.route("/html/") def html(foo=None): if foo is not None: cache.set("foo", foo) bar = cache.get("foo") return render_template_string( "foo cache: {{bar}}", bar=bar ) Configuring Flask-Caching ------------------------- The following configuration values exist for Flask-Caching: .. tabularcolumns:: |p{6.5cm}|p{8.5cm}| =============================== ================================================================== ``CACHE_TYPE`` Specifies which type of caching object to use. This is an import string that will be imported and instantiated. It is assumed that the import object is a function that will return a cache object that adheres to the cache API. For flask_caching.backends.cache objects, you do not need to specify the entire import string, just one of the following names. Built-in cache types: * **NullCache** (default; old name is **null**) * **SimpleCache** (old name is **simple**) * **FileSystemCache** (old name is **filesystem**) * **RedisCache** (redis required; old name is **redis**) * **RedisSentinelCache** (redis required; old name is **redissentinel**) * **RedisClusterCache** (redis and rediscluster required; old name is **rediscluster**) * **UWSGICache** (uwsgi required; old name is **uwsgi**) * **MemcachedCache** (pylibmc or memcache required; old name is **memcached** or **gaememcached**) * **SASLMemcachedCache** (pylibmc required; old name is **saslmemcached**) * **SpreadSASLMemcachedCache** (pylibmc required; old name is **spreadsaslmemcached**) ``CACHE_NO_NULL_WARNING`` Silence the warning message when using cache type of 'null'. ``CACHE_ARGS`` Optional list to unpack and pass during the cache class instantiation. ``CACHE_OPTIONS`` Optional dictionary to pass during the cache class instantiation. ``CACHE_DEFAULT_TIMEOUT`` The default timeout that is used if no timeout is specified. Unit of time is seconds. ``CACHE_IGNORE_ERRORS`` If set to any errors that occurred during the deletion process will be ignored. However, if it is set to ``False`` it will stop on the first error. This option is only relevant for the backends **filesystem** and **simple**. Defaults to ``False``. ``CACHE_THRESHOLD`` The maximum number of items the cache will store before it starts deleting some. Used only for SimpleCache and FileSystemCache ``CACHE_KEY_PREFIX`` A prefix that is added before all keys. This makes it possible to use the same memcached server for different apps. Used only for RedisCache and MemcachedCache ``CACHE_SOURCE_CHECK`` The default condition applied to function decorators which controls if the source code of the function should be included when forming the hash which is used as the cache key. This ensures that if the source code changes, the cached value will not be returned when the new function is called even if the arguments are the same. Defaults to ``False``. ``CACHE_UWSGI_NAME`` The name of the uwsgi caching instance to connect to, for example: mycache@localhost:3031, defaults to an empty string, which means uWSGI will cache in the local instance. If the cache is in the same instance as the werkzeug app, you only have to provide the name of the cache. ``CACHE_MEMCACHED_SERVERS`` A list or a tuple of server addresses. Used only for MemcachedCache ``CACHE_MEMCACHED_USERNAME`` Username for SASL authentication with memcached. Used only for SASLMemcachedCache ``CACHE_MEMCACHED_PASSWORD`` Password for SASL authentication with memcached. Used only for SASLMemcachedCache ``CACHE_REDIS_HOST`` A Redis server host. Used only for RedisCache. ``CACHE_REDIS_PORT`` A Redis server port. Default is 6379. Used only for RedisCache. ``CACHE_REDIS_PASSWORD`` A Redis password for server. Used only for RedisCache and RedisSentinelCache. ``CACHE_REDIS_DB`` A Redis db (zero-based number index). Default is 0. Used only for RedisCache and RedisSentinelCache. ``CACHE_REDIS_SENTINELS`` A list or a tuple of Redis sentinel addresses. Used only for RedisSentinelCache. ``CACHE_REDIS_SENTINEL_MASTER`` The name of the master server in a sentinel configuration. Used only for RedisSentinelCache. ``CACHE_REDIS_CLUSTER`` A string of comma-separated Redis cluster node addresses. e.g. host1:port1,host2:port2,host3:port3 . Used only for RedisClusterCache. ``CACHE_DIR`` Directory to store cache. Used only for FileSystemCache. ``CACHE_REDIS_URL`` URL to connect to Redis server. Example ``redis://user:password@localhost:6379/2``. Supports protocols ``redis://``, ``rediss://`` (redis over TLS) and ``unix://``. See more info about URL support [here](http://redis-py.readthedocs.io/en/latest/index.html#redis.ConnectionPool.from_url). Used only for RedisCache. =============================== ================================================================== Built-in Cache Backends ----------------------- NullCache ````````` Set ``CACHE_TYPE`` to ``NullCache`` to use this type. The old name, ``null`` is deprecated and will be removed in Flask-Caching 2.0. Cache that doesn't cache - CACHE_DEFAULT_TIMEOUT .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. SimpleCache ``````````` Set ``CACHE_TYPE`` to ``SimpleCache`` to use this type. The old name, ``simple`` is deprecated and will be removed in Flask-Caching 2.0. Uses a local python dictionary for caching. This is not really thread safe. Relevant configuration values - CACHE_DEFAULT_TIMEOUT - CACHE_IGNORE_ERRORS - CACHE_THRESHOLD .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. FileSystemCache ``````````````` Set ``CACHE_TYPE`` to ``FileSystem`` to use this type. The old name, ``filesystem`` is deprecated and will be removed in Flask-Caching 2.0. Uses the filesystem to store cached values - CACHE_DEFAULT_TIMEOUT - CACHE_IGNORE_ERRORS - CACHE_DIR - CACHE_THRESHOLD - CACHE_OPTIONS There is a single valid entry in CACHE_OPTIONS: *mode*, which should be a 3 digit linux-style permissions octal mode. .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. RedisCache `````````` Set ``CACHE_TYPE`` to ``RedisCache`` to use this type. The old name, ``redis`` is deprecated and will be removed in Flask-Caching 2.0. - CACHE_DEFAULT_TIMEOUT - CACHE_KEY_PREFIX - CACHE_OPTIONS - CACHE_REDIS_HOST - CACHE_REDIS_PORT - CACHE_REDIS_PASSWORD - CACHE_REDIS_DB - CACHE_REDIS_URL Entries in CACHE_OPTIONS are passed to the redis client as ``**kwargs`` .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. RedisSentinelCache `````````````````` Set ``CACHE_TYPE`` to ``RedisSentinel`` to use this type. The old name, ``redissentinel`` is deprecated and will be removed in Flask-Caching 2.0. - CACHE_KEY_PREFIX - CACHE_REDIS_SENTINELS - CACHE_REDIS_SENTINEL_MASTER - CACHE_REDIS_PASSWORD - CACHE_REDIS_DB Entries in CACHE_OPTIONS are passed to the redis client as ``**kwargs`` .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. RedisClusterCache `````````````````` Set ``CACHE_TYPE`` to ``RedisClusterCache`` to use this type. The old name, ``rediscluster`` is deprecated and will be removed in Flask-Caching 2.0. - CACHE_KEY_PREFIX - CACHE_REDIS_CLUSTER - CACHE_REDIS_PASSWORD Entries in CACHE_OPTIONS are passed to the redis client as ``**kwargs`` .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. MemcachedCache `````````````` Set ``CACHE_TYPE`` to ``MemcachedCache`` to use this type. The old names, ``memcached`` and ``gaememcached`` are deprecated and will be removed in Flask-Caching 2.0. Uses a memcached server as a backend. Supports either pylibmc or memcache or google app engine memcache library. Relevant configuration values - CACHE_DEFAULT_TIMEOUT - CACHE_KEY_PREFIX - CACHE_MEMCACHED_SERVERS .. note:: Flask-Caching does not pass additional configuration options to memcached backends. To add additional configuration to these caches, directly set the configuration options on the object after instantiation:: from flask_caching import Cache cache = Cache() # Can't configure the client yet... cache.init_app(flask_app, {"CACHE_TYPE": "memcached"}) # Break convention and set options on the _client object # directly. For pylibmc behaviors: cache.cache._client.behaviors({"tcp_nodelay": True}) Alternatively, see `Custom Cache Backends`_. .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. SASLMemcachedCache `````````````````` Set ``CACHE_TYPE`` to ``SASLMemcachedCache`` to use this type. The old name, ``saslmemcached`` is deprecated and will be removed in Flask-Caching 2.0. Uses a memcached server as a backend. Intended to be used with a SASL enabled connection to the memcached server. pylibmc is required and SASL must be supported by libmemcached. Relevant configuration values - CACHE_DEFAULT_TIMEOUT - CACHE_KEY_PREFIX - CACHE_OPTIONS - CACHE_MEMCACHED_SERVERS - CACHE_MEMCACHED_USERNAME - CACHE_MEMCACHED_PASSWORD .. note:: Unlike MemcachedCache, SASLMemcachedCache can be configured with CACHE_OPTIONS. .. versionadded:: 0.10 .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. SpreadSASLMemcachedCache ```````````````````````` Set ``CACHE_TYPE`` to ``SpreadSASLMemcachedCache`` to use this type. The old name, ``spreadsaslmemcached`` is deprecated and will be removed in Flask-Caching 2.0. Same as SASLMemcachedCache however, it has the ablity to spread value across multiple keys if it is bigger than the memcached treshold which by default is 1M. Uses pickle. .. versionadded:: 0.11 .. versionchanged:: 1.1.0 Renamed ``spreadsaslmemcachedcache`` to ``spreadsaslmemcached`` for the sake of consistency. .. versionchanged:: 1.9.1 Deprecated the old name in favour of just using the class name. UWSGICache ````````` .. warning:: ``UWSGICache`` is not maintained nor tested. Use at your own risk. Set ``CACHE_TYPE`` to ``flask_caching.contrib.uwsgicache.UWSGICache`` to use this type. You also have to set ``CACHE_UWSGI_NAME`` to the cache name you set in your uWSGI configuration. Custom Cache Backends --------------------- You are able to easily add your own custom cache backends by exposing a function that can instantiate and return a cache object. ``CACHE_TYPE`` will be the import string to your custom cache type. If not a subclass of :class:`flask_caching.backends.cache.BaseCache`, Flask-Caching will call it with three arguments: * ``app``, the Flask application object the cache is being initialized for * ``args``, the value of the CACHE_ARGS configuration option * ``kwargs``, the value of the CACHE_OPTIONS configuration option .. note:: ``args`` and ``kwargs`` are not expanded when instantiating the cache object, i.e. they are not passed in as ``*args`` and ``**kwargs``, but they are the exact value of the CACHE_ARGS and CACHE_OPTIONS configuration options (CACHE_ARGS, however, is converted to a list). Your custom cache should, however, subclass the :class:`flask_caching.backends.cache.BaseCache` class so it provides all the necessary methods to be usable. .. versionchanged:: 1.9.1 If your custom cache type *is* a subclass of :class:`flask_caching.backends.cache.BaseCache`, Flask-Caching will, instead of directly instantiating the class, call its ``factory`` class method with the same args as listed above. Unless overridden, ``BaseCache.factory`` simply instantiates the object without passing any arguments to it. Built-in cache classes have overridden this to mimic the old, function based cache isntantiation, so if you subclassed something that is not :class:`flask_caching.backends.cache.BaseCache`, you may want to consult the source code to see if your class is still compatible. An example implementation:: #: the_app/custom.py class RedisCache(BaseCache): def __init__(self, servers, default_timeout=500): pass @classmethod def factory(cls, app, args, kwargs): args.append(app.config['REDIS_SERVERS']) return cls(*args, **kwargs) With this example, your ``CACHE_TYPE`` might be ``the_app.custom.RedisCache`` CACHE_TYPE doesn’t have to directly point to a cache class, though. An example PylibMC cache implementation to change binary setting and provide username/password if SASL is enabled on the library:: #: the_app/custom.py def pylibmccache(app, config, args, kwargs): return pylibmc.Client(servers=config['CACHE_MEMCACHED_SERVERS'], username=config['CACHE_MEMCACHED_USERNAME'], password=config['CACHE_MEMCACHED_PASSWORD'], binary=True) With this example, your ``CACHE_TYPE`` might be ``the_app.custom.pylibmccache`` API --- .. toctree:: :maxdepth: 2 api Additional Information ---------------------- .. toctree:: :maxdepth: 2 changelog license * :ref:`search` .. _Flask: http://flask.pocoo.org/ .. _werkzeug: http://werkzeug.pocoo.org/ flask-caching-1.10.1/docs/license.rst000066400000000000000000000001031402442634700173700ustar00rootroot00000000000000License ======= .. literalinclude:: ../LICENSE :language: text flask-caching-1.10.1/docs/make.bat000066400000000000000000000164511402442634700166360ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. epub3 to make an epub3 echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled echo. coverage to run coverage check of the documentation if enabled echo. dummy to check syntax errors of document sources goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 1>NUL 2>NUL if errorlevel 9009 goto sphinx_python goto sphinx_ok :sphinx_python set SPHINXBUILD=python -m sphinx.__init__ %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) :sphinx_ok if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Flask-Caching.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Flask-Caching.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "epub3" ( %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "coverage" ( %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage if errorlevel 1 exit /b 1 echo. echo.Testing of coverage in the sources finished, look at the ^ results in %BUILDDIR%/coverage/python.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) if "%1" == "dummy" ( %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy if errorlevel 1 exit /b 1 echo. echo.Build finished. Dummy builder generates no files. goto end ) :end flask-caching-1.10.1/examples/000077500000000000000000000000001402442634700161105ustar00rootroot00000000000000flask-caching-1.10.1/examples/hello.cfg000066400000000000000000000001631402442634700176740ustar00rootroot00000000000000 SECRET_KEY = '\xfb\x12\xdf\xa1@i\xd6>V\xc0\xbb\x8fp\x16#Z\x0b\x81\xeb\x16' DEBUG = True CACHE_TYPE = 'SimpleCache'flask-caching-1.10.1/examples/hello.py000066400000000000000000000031611402442634700175660ustar00rootroot00000000000000import random from datetime import datetime from flask import Flask, jsonify, render_template_string from flask_caching import Cache app = Flask(__name__) app.config.from_pyfile("hello.cfg") cache = Cache(app) #: This is an example of a cached view @app.route("/api/now") @cache.cached(50) def current_time(): return str(datetime.now()) #: This is an example of a cached function @cache.cached(key_prefix="binary") def random_binary(): return [random.randrange(0, 2) for i in range(500)] @app.route("/api/get/binary") def get_binary(): return jsonify({"data": random_binary()}) #: This is an example of a memoized function @cache.memoize(60) def _add(a, b): return a + b + random.randrange(0, 1000) @cache.memoize(60) def _sub(a, b): return a - b - random.randrange(0, 1000) @app.route("/api/add//") def add(a, b): return str(_add(a, b)) @app.route("/api/sub//") def sub(a, b): return str(_sub(a, b)) @app.route("/api/cache/delete") def delete_cache(): cache.delete_memoized("_add", "_sub") return "OK" @app.route("/html") @app.route("/html/") def html(foo=None): if foo is not None: cache.set("foo", foo) return render_template_string( "foo cache: {{foo}}", foo=cache.get("foo") ) @app.route("/template") def template(): dt = str(datetime.now()) return render_template_string( """foo cache: {% cache 60, "random" %} {{ range(1, 42) | random }} {% endcache %} """ ) if __name__ == "__main__": app.run() flask-caching-1.10.1/flask_caching/000077500000000000000000000000001402442634700170465ustar00rootroot00000000000000flask-caching-1.10.1/flask_caching/__init__.py000066400000000000000000001233211402442634700211610ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching ~~~~~~~~~~~~~ Adds cache support to your application. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ import base64 import functools import hashlib import inspect import logging import string import uuid import warnings from collections import OrderedDict from flask import current_app, request, url_for, Flask from werkzeug.utils import import_string from flask_caching.backends.base import BaseCache from flask_caching.backends.simplecache import SimpleCache from markupsafe import Markup from typing import Any, Callable, List, Optional, Tuple, Union __version__ = "1.10.1" logger = logging.getLogger(__name__) TEMPLATE_FRAGMENT_KEY_TEMPLATE = "_template_fragment_cache_%s%s" SUPPORTED_HASH_FUNCTIONS = [ hashlib.sha1, hashlib.sha224, hashlib.sha256, hashlib.sha384, hashlib.sha512, hashlib.md5, ] # Used to remove control characters and whitespace from cache keys. valid_chars = set(string.ascii_letters + string.digits + "_.") delchars = "".join(c for c in map(chr, range(256)) if c not in valid_chars) null_control = (dict((k, None) for k in delchars),) def wants_args(f: Callable) -> bool: """Check if the function wants any arguments""" argspec = inspect.getfullargspec(f) return bool(argspec.args or argspec.varargs or argspec.varkw) def get_arg_names(f: Callable) -> List[str]: """Return arguments of function :param f: :return: String list of arguments """ sig = inspect.signature(f) return [ parameter.name for parameter in sig.parameters.values() if parameter.kind == parameter.POSITIONAL_OR_KEYWORD ] def get_arg_default(f: Callable, position: int): sig = inspect.signature(f) arg = list(sig.parameters.values())[position] arg_def = arg.default return arg_def if arg_def != inspect.Parameter.empty else None def get_id(obj): return getattr(obj, "__caching_id__", repr)(obj) def function_namespace(f, args=None): """Attempts to returns unique namespace for function""" m_args = get_arg_names(f) instance_token = None instance_self = getattr(f, "__self__", None) if instance_self and not inspect.isclass(instance_self): instance_token = get_id(f.__self__) elif m_args and m_args[0] == "self" and args: instance_token = get_id(args[0]) module = f.__module__ if m_args and m_args[0] == "cls" and not inspect.isclass(args[0]): raise ValueError( "When using `delete_memoized` on a " "`@classmethod` you must provide the " "class as the first argument" ) if hasattr(f, "__qualname__"): name = f.__qualname__ else: klass = getattr(f, "__self__", None) if klass and not inspect.isclass(klass): klass = klass.__class__ if not klass: klass = getattr(f, "im_class", None) if not klass: if m_args and args: if m_args[0] == "self": klass = args[0].__class__ elif m_args[0] == "cls": klass = args[0] if klass: name = klass.__name__ + "." + f.__name__ else: name = f.__name__ ns = ".".join((module, name)) ns = ns.translate(*null_control) if instance_token: ins = ".".join((module, name, instance_token)) ins = ins.translate(*null_control) else: ins = None return ns, ins def make_template_fragment_key( fragment_name: str, vary_on: List[str] = [] ) -> str: """Make a cache key for a specific fragment name.""" if vary_on: fragment_name = "%s_" % fragment_name return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, "_".join(vary_on)) class Cache(object): """This class is used to control the cache objects.""" def __init__( self, app: Optional[Flask] = None, with_jinja2_ext: bool = True, config=None, ) -> None: if not (config is None or isinstance(config, dict)): raise ValueError("`config` must be an instance of dict or None") self.with_jinja2_ext = with_jinja2_ext self.config = config self.source_check = None if app is not None: self.init_app(app, config) def init_app(self, app: Flask, config=None) -> None: """This is used to initialize cache with your app object""" if not (config is None or isinstance(config, dict)): raise ValueError("`config` must be an instance of dict or None") #: Ref PR #44. #: Do not set self.app in the case a single instance of the Cache #: object is being used for multiple app instances. #: Example use case would be Cache shipped as part of a blueprint #: or utility library. base_config = app.config.copy() if self.config: base_config.update(self.config) if config: base_config.update(config) config = base_config config.setdefault("CACHE_DEFAULT_TIMEOUT", 300) config.setdefault("CACHE_IGNORE_ERRORS", False) config.setdefault("CACHE_THRESHOLD", 500) config.setdefault("CACHE_KEY_PREFIX", "flask_cache_") config.setdefault("CACHE_MEMCACHED_SERVERS", None) config.setdefault("CACHE_DIR", None) config.setdefault("CACHE_OPTIONS", None) config.setdefault("CACHE_ARGS", []) config.setdefault("CACHE_TYPE", "null") config.setdefault("CACHE_NO_NULL_WARNING", False) config.setdefault("CACHE_SOURCE_CHECK", False) if ( config["CACHE_TYPE"] == "null" and not config["CACHE_NO_NULL_WARNING"] ): warnings.warn( "Flask-Caching: CACHE_TYPE is set to null, " "caching is effectively disabled." ) if config["CACHE_TYPE"] == "filesystem" and config["CACHE_DIR"] is None: warnings.warn( "Flask-Caching: CACHE_TYPE is set to filesystem but no " "CACHE_DIR is set." ) self.source_check = config["CACHE_SOURCE_CHECK"] if self.with_jinja2_ext: from .jinja2ext import CacheExtension, JINJA_CACHE_ATTR_NAME setattr(app.jinja_env, JINJA_CACHE_ATTR_NAME, self) app.jinja_env.add_extension(CacheExtension) self._set_cache(app, config) def _set_cache(self, app: Flask, config) -> None: import_me = config["CACHE_TYPE"] if "." not in import_me: plain_name_used = True import_me = "flask_caching.backends." + import_me else: plain_name_used = False cache_factory = import_string(import_me) cache_args = config["CACHE_ARGS"][:] cache_options = {"default_timeout": config["CACHE_DEFAULT_TIMEOUT"]} if isinstance(cache_factory, type) and issubclass(cache_factory, BaseCache): cache_factory = cache_factory.factory elif plain_name_used: warnings.warn( "Using the initialization functions in flask_caching.backend " "is deprecated. Use the a full path to backend classes " "directly.", category=DeprecationWarning, ) if config["CACHE_OPTIONS"]: cache_options.update(config["CACHE_OPTIONS"]) if not hasattr(app, "extensions"): app.extensions = {} app.extensions.setdefault("cache", {}) app.extensions["cache"][self] = cache_factory( app, config, cache_args, cache_options ) self.app = app @property def cache(self) -> SimpleCache: app = current_app or self.app return app.extensions["cache"][self] def get(self, *args, **kwargs) -> Optional[Union[str, Markup]]: """Proxy function for internal cache object.""" return self.cache.get(*args, **kwargs) def set(self, *args, **kwargs) -> bool: """Proxy function for internal cache object.""" return self.cache.set(*args, **kwargs) def add(self, *args, **kwargs) -> bool: """Proxy function for internal cache object.""" return self.cache.add(*args, **kwargs) def delete(self, *args, **kwargs) -> bool: """Proxy function for internal cache object.""" return self.cache.delete(*args, **kwargs) def delete_many(self, *args, **kwargs) -> bool: """Proxy function for internal cache object.""" return self.cache.delete_many(*args, **kwargs) # type: ignore def clear(self) -> None: """Proxy function for internal cache object.""" return self.cache.clear() def get_many(self, *args, **kwargs): """Proxy function for internal cache object.""" return self.cache.get_many(*args, **kwargs) def set_many(self, *args, **kwargs): """Proxy function for internal cache object.""" return self.cache.set_many(*args, **kwargs) def get_dict(self, *args, **kwargs): """Proxy function for internal cache object.""" return self.cache.get_dict(*args, **kwargs) def unlink(self, *args, **kwargs) -> bool: """Proxy function for internal cache object only support Redis """ unlink = getattr(self.cache, "unlink", None) if unlink is not None and callable(unlink): return unlink(*args, **kwargs) return self.delete_many(*args, **kwargs) def cached( self, timeout: Optional[int] = None, key_prefix: str = "view/%s", unless: Optional[Callable] = None, forced_update: Optional[Callable] = None, response_filter: Optional[Callable] = None, query_string: bool = False, hash_method: Callable = hashlib.md5, cache_none: bool = False, make_cache_key: Optional[Callable] = None, source_check: Optional[bool] = None, ) -> Callable: """Decorator. Use this to cache a function. By default the cache key is `view/request.path`. You are able to use this decorator with any function by changing the `key_prefix`. If the token `%s` is located within the `key_prefix` then it will replace that with `request.path` Example:: # An example view function @cache.cached(timeout=50) def big_foo(): return big_bar_calc() # An example misc function to cache. @cache.cached(key_prefix='MyCachedList') def get_list(): return [random.randrange(0, 1) for i in range(50000)] my_list = get_list() .. note:: You MUST have a request context to actually called any functions that are cached. .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. These attributes are readable/writable. **uncached** The original undecorated function **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. **make_cache_key** A function used in generating the cache_key used. readable and writable :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param key_prefix: Default 'view/%(request.path)s'. Beginning key to . use for the cache key. `request.path` will be the actual request path, or in cases where the `make_cache_key`-function is called from other views it will be the expected URL for the view as generated by Flask's `url_for()`. .. versionadded:: 0.3.4 Can optionally be a callable which takes no arguments but returns a string that will be used as the cache_key. :param unless: Default None. Cache will *always* execute the caching facilities unless this callable is true. This will bypass the caching entirely. :param forced_update: Default None. If this callable is true, cache value will be updated regardless cache is expired or not. Useful for background renewal of cached functions. :param response_filter: Default None. If not None, the callable is invoked after the cached funtion evaluation, and is given one arguement, the response content. If the callable returns False, the content will not be cached. Useful to prevent caching of code 500 responses. :param query_string: Default False. When True, the cache key used will be the result of hashing the ordered query string parameters. This avoids creating different caches for the same query just because the parameters were passed in a different order. See _make_cache_key_query_string() for more details. :param hash_method: Default hashlib.md5. The hash method used to generate the keys for cached results. :param cache_none: Default False. If set to True, add a key exists check when cache.get returns None. This will likely lead to wrongly returned None values in concurrent situations and is not recommended to use. :param make_cache_key: Default None. If set to a callable object, it will be called to generate the cache key :param source_check: Default None. If None will use the value set by CACHE_SOURCE_CHECK. If True, include the function's source code in the hash to avoid using cached values when the source code has changed and the input values remain the same. This ensures that the cache_key will be formed with the function's source code hash in addition to other parameters that may be included in the formation of the key. """ def decorator(f): @functools.wraps(f) def decorated_function(*args, **kwargs): #: Bypass the cache entirely. if self._bypass_cache(unless, f, *args, **kwargs): return f(*args, **kwargs) nonlocal source_check if source_check is None: source_check = self.source_check try: if make_cache_key is not None and callable(make_cache_key): cache_key = make_cache_key(*args, **kwargs) else: cache_key = _make_cache_key( args, kwargs, use_request=True ) if ( callable(forced_update) and ( forced_update(*args, **kwargs) if wants_args(forced_update) else forced_update() ) is True ): rv = None found = False else: rv = self.cache.get(cache_key) found = True # If the value returned by cache.get() is None, it # might be because the key is not found in the cache # or because the cached value is actually None if rv is None: # If we're sure we don't need to cache None values # (cache_none=False), don't bother checking for # key existence, as it can lead to false positives # if a concurrent call already cached the # key between steps. This would cause us to # return None when we shouldn't if not cache_none: found = False else: found = self.cache.has(cache_key) except Exception: if self.app.debug: raise logger.exception("Exception possibly due to cache backend.") return f(*args, **kwargs) if not found: rv = f(*args, **kwargs) if response_filter is None or response_filter(rv): try: self.cache.set( cache_key, rv, timeout=decorated_function.cache_timeout, ) except Exception: if self.app.debug: raise logger.exception( "Exception possibly due to cache backend." ) return rv def default_make_cache_key(*args, **kwargs): # Convert non-keyword arguments (which is the way # `make_cache_key` expects them) to keyword arguments # (the way `url_for` expects them) argspec_args = inspect.getfullargspec(f).args for arg_name, arg in zip(argspec_args, args): kwargs[arg_name] = arg return _make_cache_key(args, kwargs, use_request=False) def _make_cache_key_query_string(): """Create consistent keys for query string arguments. Produces the same cache key regardless of argument order, e.g., both `?limit=10&offset=20` and `?offset=20&limit=10` will always produce the same exact cache key. If func is provided and is callable it will be used to hash the function's source code and include it in the cache key. This will only be done is source_check is True. """ # Create a tuple of (key, value) pairs, where the key is the # argument name and the value is its respective value. Order # this tuple by key. Doing this ensures the cache key created # is always the same for query string args whose keys/values # are the same, regardless of the order in which they are # provided. args_as_sorted_tuple = tuple( sorted((pair for pair in request.args.items(multi=True))) ) # ... now hash the sorted (key, value) tuple so it can be # used as a key for cache. Turn them into bytes so that the # hash function will accept them args_as_bytes = str(args_as_sorted_tuple).encode() cache_hash = hash_method(args_as_bytes) # Use the source code if source_check is True and update the # cache_hash before generating the hashing and using it in # cache_key if source_check and callable(f): func_source_code = inspect.getsource(f) cache_hash.update(func_source_code.encode("utf-8")) cache_hash = str(cache_hash.hexdigest()) cache_key = request.path + cache_hash return cache_key def _make_cache_key(args, kwargs, use_request): if query_string: return _make_cache_key_query_string() else: if callable(key_prefix): cache_key = key_prefix() elif "%s" in key_prefix: if use_request: cache_key = key_prefix % request.path else: cache_key = key_prefix % url_for( f.__name__, **kwargs ) else: cache_key = key_prefix if source_check and callable(f): func_source_code = inspect.getsource(f) func_source_hash = hash_method( func_source_code.encode("utf-8") ) func_source_hash = str(func_source_hash.hexdigest()) cache_key += func_source_hash return cache_key decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = default_make_cache_key return decorated_function return decorator def _memvname(self, funcname: str) -> str: return funcname + "_memver" def _memoize_make_version_hash(self) -> str: return base64.b64encode(uuid.uuid4().bytes)[:6].decode("utf-8") def _memoize_version( self, f: Callable, args: Optional[Any] = None, kwargs=None, reset: bool = False, delete: bool = False, timeout: Optional[int] = None, forced_update: Optional[Union[bool, Callable]] = False, args_to_ignore: Optional[Any] = None, ) -> Union[Tuple[str, str], Tuple[str, None]]: """Updates the hash version associated with a memoized function or method. """ fname, instance_fname = function_namespace(f, args=args) version_key = self._memvname(fname) fetch_keys = [version_key] args_to_ignore = args_to_ignore or [] if "self" in args_to_ignore: instance_fname = None if instance_fname: instance_version_key = self._memvname(instance_fname) fetch_keys.append(instance_version_key) # Only delete the per-instance version key or per-function version # key but not both. if delete: self.cache.delete_many(fetch_keys[-1]) return fname, None version_data_list = list(self.cache.get_many(*fetch_keys)) dirty = False if ( callable(forced_update) and ( forced_update(*(args or ()), **(kwargs or {})) if wants_args(forced_update) else forced_update() ) is True ): # Mark key as dirty to update its TTL dirty = True if version_data_list[0] is None: version_data_list[0] = self._memoize_make_version_hash() dirty = True if instance_fname and version_data_list[1] is None: version_data_list[1] = self._memoize_make_version_hash() dirty = True # Only reset the per-instance version or the per-function version # but not both. if reset: fetch_keys = fetch_keys[-1:] version_data_list = [self._memoize_make_version_hash()] dirty = True if dirty: self.cache.set_many( dict(zip(fetch_keys, version_data_list)), timeout=timeout ) return fname, "".join(version_data_list) def _memoize_make_cache_key( self, make_name: None = None, timeout: Optional[Callable] = None, forced_update: bool = False, hash_method: Callable = hashlib.md5, source_check: Optional[bool] = False, args_to_ignore: Optional[Any] = None, ) -> Callable: """Function used to create the cache_key for memoized functions.""" def make_cache_key(f, *args, **kwargs): _timeout = getattr(timeout, "cache_timeout", timeout) fname, version_data = self._memoize_version( f, args=args, kwargs=kwargs, timeout=_timeout, forced_update=forced_update, args_to_ignore=args_to_ignore, ) #: this should have to be after version_data, so that it #: does not break the delete_memoized functionality. altfname = make_name(fname) if callable(make_name) else fname if callable(f): keyargs, keykwargs = self._memoize_kwargs_to_args( f, *args, **kwargs, args_to_ignore=args_to_ignore ) else: keyargs, keykwargs = args, kwargs updated = u"{0}{1}{2}".format(altfname, keyargs, keykwargs) cache_key = hash_method() cache_key.update(updated.encode("utf-8")) # Use the source code if source_check is True and update the # cache_key with the function's source. if source_check and callable(f): func_source_code = inspect.getsource(f) cache_key.update(func_source_code.encode("utf-8")) cache_key = base64.b64encode(cache_key.digest())[:16] cache_key = cache_key.decode("utf-8") cache_key += version_data return cache_key return make_cache_key def _memoize_kwargs_to_args(self, f: Callable, *args, **kwargs) -> Any: #: Inspect the arguments to the function #: This allows the memoization to be the same #: whether the function was called with #: 1, b=2 is equivilant to a=1, b=2, etc. new_args = [] arg_num = 0 args_to_ignore = kwargs.pop("args_to_ignore", None) or [] # If the function uses VAR_KEYWORD type of parameters, # we need to pass these further kw_keys_remaining = list(kwargs.keys()) arg_names = get_arg_names(f) args_len = len(arg_names) for i in range(args_len): arg_default = get_arg_default(f, i) if arg_names[i] in args_to_ignore: arg = None arg_num += 1 elif i == 0 and arg_names[i] in ("self", "cls"): #: use the id func of the class instance #: this supports instance methods for #: the memoized functions, giving more #: flexibility to developers arg = get_id(args[0]) arg_num += 1 elif arg_names[i] in kwargs: arg = kwargs[arg_names[i]] kw_keys_remaining.pop(kw_keys_remaining.index(arg_names[i])) elif arg_num < len(args): arg = args[arg_num] arg_num += 1 elif arg_default: arg = arg_default arg_num += 1 else: arg = None arg_num += 1 #: Attempt to convert all arguments to a #: hash/id or a representation? #: Not sure if this is necessary, since #: using objects as keys gets tricky quickly. # if hasattr(arg, '__class__'): # try: # arg = hash(arg) # except: # arg = get_id(arg) #: Or what about a special __cacherepr__ function #: on an object, this allows objects to act normal #: upon inspection, yet they can define a representation #: that can be used to make the object unique in the #: cache key. Given that a case comes across that #: an object "must" be used as a cache key # if hasattr(arg, '__cacherepr__'): # arg = arg.__cacherepr__ new_args.append(arg) new_args.extend(args[len(arg_names) :]) return ( tuple(new_args), OrderedDict( sorted( (k, v) for k, v in kwargs.items() if k in kw_keys_remaining ) ), ) def _bypass_cache( self, unless: Optional[Callable], f: Callable, *args, **kwargs ) -> bool: """Determines whether or not to bypass the cache by calling unless(). Supports both unless() that takes in arguments and unless() that doesn't. """ bypass_cache = False if callable(unless): argspec = inspect.getfullargspec(unless) has_args = len(argspec.args) > 0 or argspec.varargs or argspec.varkw # If unless() takes args, pass them in. if has_args: if unless(f, *args, **kwargs) is True: bypass_cache = True elif unless() is True: bypass_cache = True return bypass_cache def memoize( self, timeout: Optional[int] = None, make_name: None = None, unless: None = None, forced_update: Optional[Callable] = None, response_filter: None = None, hash_method: Callable = hashlib.md5, cache_none: bool = False, source_check: Optional[bool] = None, args_to_ignore: Optional[Any] = None, ) -> Callable: """Use this to cache the result of a function, taking its arguments into account in the cache key. Information on `Memoization `_. Example:: @cache.memoize(timeout=50) def big_foo(a, b): return a + b + random.randrange(0, 1000) .. code-block:: pycon >>> big_foo(5, 2) 753 >>> big_foo(5, 3) 234 >>> big_foo(5, 2) 753 .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. **uncached** The original undecorated function. readable only **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. readable and writable **make_cache_key** A function used in generating the cache_key used. readable and writable :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param make_name: Default None. If set this is a function that accepts a single argument, the function name, and returns a new string to be used as the function name. If not set then the function name is used. :param unless: Default None. Cache will *always* execute the caching facilities unless this callable is true. This will bypass the caching entirely. :param forced_update: Default None. If this callable is true, cache value will be updated regardless cache is expired or not. Useful for background renewal of cached functions. :param response_filter: Default None. If not None, the callable is invoked after the cached funtion evaluation, and is given one arguement, the response content. If the callable returns False, the content will not be cached. Useful to prevent caching of code 500 responses. :param hash_method: Default hashlib.md5. The hash method used to generate the keys for cached results. :param cache_none: Default False. If set to True, add a key exists check when cache.get returns None. This will likely lead to wrongly returned None values in concurrent situations and is not recommended to use. :param source_check: Default None. If None will use the value set by CACHE_SOURCE_CHECK. If True, include the function's source code in the hash to avoid using cached values when the source code has changed and the input values remain the same. This ensures that the cache_key will be formed with the function's source code hash in addition to other parameters that may be included in the formation of the key. :param args_to_ignore: List of arguments that will be ignored while generating the cache key. Default to None. This means that those arguments may change without affecting the cache value that will be returned. .. versionadded:: 0.5 params ``make_name``, ``unless`` .. versionadded:: 1.10 params ``args_to_ignore`` """ def memoize(f): @functools.wraps(f) def decorated_function(*args, **kwargs): #: bypass cache if self._bypass_cache(unless, f, *args, **kwargs): return f(*args, **kwargs) nonlocal source_check if source_check is None: source_check = self.source_check try: cache_key = decorated_function.make_cache_key( f, *args, **kwargs ) if ( callable(forced_update) and ( forced_update(*args, **kwargs) if wants_args(forced_update) else forced_update() ) is True ): rv = None found = False else: rv = self.cache.get(cache_key) found = True # If the value returned by cache.get() is None, it # might be because the key is not found in the cache # or because the cached value is actually None if rv is None: # If we're sure we don't need to cache None values # (cache_none=False), don't bother checking for # key existence, as it can lead to false positives # if a concurrent call already cached the # key between steps. This would cause us to # return None when we shouldn't if not cache_none: found = False else: found = self.cache.has(cache_key) except Exception: if self.app.debug: raise logger.exception("Exception possibly due to cache backend.") return f(*args, **kwargs) if not found: rv = f(*args, **kwargs) if response_filter is None or response_filter(rv): try: self.cache.set( cache_key, rv, timeout=decorated_function.cache_timeout, ) except Exception: if self.app.debug: raise logger.exception( "Exception possibly due to cache backend." ) return rv decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = self._memoize_make_cache_key( make_name=make_name, timeout=decorated_function, forced_update=forced_update, hash_method=hash_method, source_check=source_check, args_to_ignore=args_to_ignore, ) decorated_function.delete_memoized = lambda: self.delete_memoized(f) return decorated_function return memoize def delete_memoized(self, f, *args, **kwargs) -> None: """Deletes the specified functions caches, based by given parameters. If parameters are given, only the functions that were memoized with them will be erased. Otherwise all versions of the caches will be forgotten. Example:: @cache.memoize(50) def random_func(): return random.randrange(1, 50) @cache.memoize() def param_func(a, b): return a+b+random.randrange(1, 50) .. code-block:: pycon >>> random_func() 43 >>> random_func() 43 >>> cache.delete_memoized(random_func) >>> random_func() 16 >>> param_func(1, 2) 32 >>> param_func(1, 2) 32 >>> param_func(2, 2) 47 >>> cache.delete_memoized(param_func, 1, 2) >>> param_func(1, 2) 13 >>> param_func(2, 2) 47 Delete memoized is also smart about instance methods vs class methods. When passing a instancemethod, it will only clear the cache related to that instance of that object. (object uniqueness can be overridden by defining the __repr__ method, such as user id). When passing a classmethod, it will clear all caches related across all instances of that class. Example:: class Adder(object): @cache.memoize() def add(self, b): return b + random.random() .. code-block:: pycon >>> adder1 = Adder() >>> adder2 = Adder() >>> adder1.add(3) 3.23214234 >>> adder2.add(3) 3.60898509 >>> cache.delete_memoized(adder1.add) >>> adder1.add(3) 3.01348673 >>> adder2.add(3) 3.60898509 >>> cache.delete_memoized(Adder.add) >>> adder1.add(3) 3.53235667 >>> adder2.add(3) 3.72341788 :param fname: The memoized function. :param \\*args: A list of positional parameters used with memoized function. :param \\**kwargs: A dict of named parameters used with memoized function. .. note:: Flask-Caching uses inspect to order kwargs into positional args when the function is memoized. If you pass a function reference into ``fname``, Flask-Caching will be able to place the args/kwargs in the proper order, and delete the positional cache. However, if ``delete_memoized`` is just called with the name of the function, be sure to pass in potential arguments in the same order as defined in your function as args only, otherwise Flask-Caching will not be able to compute the same cache key and delete all memoized versions of it. .. note:: Flask-Caching maintains an internal random version hash for the function. Using delete_memoized will only swap out the version hash, causing the memoize function to recompute results and put them into another key. This leaves any computed caches for this memoized function within the caching backend. It is recommended to use a very high timeout with memoize if using this function, so that when the version hash is swapped, the old cached results would eventually be reclaimed by the caching backend. """ if not callable(f): raise TypeError( "Deleting messages by relative name is not supported, please " "use a function reference." ) if not (args or kwargs): self._memoize_version(f, reset=True) else: cache_key = f.make_cache_key(f.uncached, *args, **kwargs) self.cache.delete(cache_key) def delete_memoized_verhash(self, f: Callable, *args) -> None: """Delete the version hash associated with the function. .. warning:: Performing this operation could leave keys behind that have been created with this version hash. It is up to the application to make sure that all keys that may have been created with this version hash at least have timeouts so they will not sit orphaned in the cache backend. """ if not callable(f): raise TypeError( "Deleting messages by relative name is not supported, please" "use a function reference." ) self._memoize_version(f, delete=True) flask-caching-1.10.1/flask_caching/backends/000077500000000000000000000000001402442634700206205ustar00rootroot00000000000000flask-caching-1.10.1/flask_caching/backends/__init__.py000066400000000000000000000041531402442634700227340ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching.backends ~~~~~~~~~~~~~~~~~~~~~~ Various caching backends. :copyright: (c) 2018 by Peter Justin. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ from flask_caching.backends.filesystemcache import FileSystemCache from flask_caching.backends.memcache import ( MemcachedCache, SASLMemcachedCache, SpreadSASLMemcachedCache, ) from flask_caching.backends.nullcache import NullCache # TODO: Rename to "redis" when python2 support is removed from flask_caching.backends.rediscache import ( RedisCache, RedisSentinelCache, RedisClusterCache, ) from flask_caching.backends.simplecache import SimpleCache from flask_caching.backends.uwsgicache import UWSGICache __all__ = ( "null", "simple", "filesystem", "redis", "redissentinel", "rediscluster", "uwsgi", "memcached", "gaememcached", "saslmemcached", "spreadsaslmemcached", ) def null(app, config, args, kwargs): return NullCache.factory(app, config, args, kwargs) def simple(app, config, args, kwargs): return SimpleCache.factory(app, config, args, kwargs) def filesystem(app, config, args, kwargs): return FileSystemCache.factory(app, config, args, kwargs) def redis(app, config, args, kwargs): return RedisCache.factory(app, config, args, kwargs) def redissentinel(app, config, args, kwargs): return RedisSentinelCache.factory(app, config, args, kwargs) def rediscluster(app, config, args, kwargs): return RedisClusterCache.factory(app, config, args, kwargs) def uwsgi(app, config, args, kwargs): return UWSGICache.factory(app, config, args, kwargs) def memcached(app, config, args, kwargs): return MemcachedCache.factory(app, config, args, kwargs) def gaememcached(app, config, args, kwargs): return memcached(app, config, args, kwargs) def saslmemcached(app, config, args, kwargs): return SASLMemcachedCache.factory(app, config, args, kwargs) def spreadsaslmemcached(app, config, args, kwargs): return SpreadSASLMemcachedCache.factory(app, config, args, kwargs) flask-caching-1.10.1/flask_caching/backends/base.py000066400000000000000000000154251402442634700221130ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching.backends.base ~~~~~~~~~~~~~~~~~~~~~~~~~~~ This module contains the BaseCache that other caching backends have to implement. :copyright: (c) 2018 by Peter Justin. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ def iteritems_wrapper(mappingorseq): """Wrapper for efficient iteration over mappings represented by dicts or sequences:: >>> for k, v in iteritems_wrapper((i, i*i) for i in xrange(5)): ... assert k*k == v >>> for k, v in iteritems_wrapper(dict((i, i*i) for i in xrange(5))): ... assert k*k == v """ if hasattr(mappingorseq, "items"): return mappingorseq.items() return mappingorseq class BaseCache(object): """Baseclass for the cache systems. All the cache systems implement this API or a superset of it. :param default_timeout: The default timeout (in seconds) that is used if no timeout is specified on :meth:`set`. A timeout of 0 indicates that the cache never expires. """ def __init__(self, default_timeout=300): self.default_timeout = default_timeout self.ignore_errors = False @classmethod def factory(cls, app, config, args, kwargs): return cls() def _normalize_timeout(self, timeout): if timeout is None: timeout = self.default_timeout return timeout def get(self, key): """Look up key in the cache and return the value for it. :param key: the key to be looked up. :returns: The value if it exists and is readable, else ``None``. """ return None def delete(self, key): """Delete `key` from the cache. :param key: the key to delete. :returns: Whether the key existed and has been deleted. :rtype: boolean """ return True def get_many(self, *keys): """Returns a list of values for the given keys. For each key an item in the list is created:: foo, bar = cache.get_many("foo", "bar") Has the same error handling as :meth:`get`. :param keys: The function accepts multiple keys as positional arguments. """ return [self.get(k) for k in keys] def get_dict(self, *keys): """Like :meth:`get_many` but return a dict:: d = cache.get_dict("foo", "bar") foo = d["foo"] bar = d["bar"] :param keys: The function accepts multiple keys as positional arguments. """ return dict(zip(keys, self.get_many(*keys))) def set(self, key, value, timeout=None): """Add a new key/value to the cache (overwrites value, if key already exists in the cache). :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 indicates that the cache never expires. :returns: ``True`` if key has been updated, ``False`` for backend errors. Pickling errors, however, will raise a subclass of ``pickle.PickleError``. :rtype: boolean """ return True def add(self, key, value, timeout=None): """Works like :meth:`set` but does not overwrite the values of already existing keys. :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 idicates that the cache never expires. :returns: Same as :meth:`set`, but also ``False`` for already existing keys. :rtype: boolean """ return True def set_many(self, mapping, timeout=None): """Sets multiple keys and values from a mapping. :param mapping: a mapping with the keys/values to set. :param timeout: the cache timeout for the key in seconds (if not specified, it uses the default timeout). A timeout of 0 idicates that the cache never expires. :returns: Whether all given keys have been set. :rtype: boolean """ rv = True for key, value in iteritems_wrapper(mapping): if not self.set(key, value, timeout): rv = False return rv def delete_many(self, *keys): """Deletes multiple keys at once. :param keys: The function accepts multiple keys as positional arguments. :returns: Whether all given keys have been deleted. :rtype: boolean """ if self.ignore_errors: return all([self.delete(key) for key in keys]) return all(self.delete(key) for key in keys) def has(self, key): """Checks if a key exists in the cache without returning it. This is a cheap operation that bypasses loading the actual data on the backend. This method is optional and may not be implemented on all caches. :param key: the key to check """ raise NotImplementedError( "%s doesn't have an efficient implementation of `has`. That " "means it is impossible to check whether a key exists without " "fully loading the key's data. Consider using `self.get` " "explicitly if you don't care about performance." ) def clear(self): """Clears the cache. Keep in mind that not all caches support completely clearing the cache. :returns: Whether the cache has been cleared. :rtype: boolean """ return True def inc(self, key, delta=1): """Increments the value of a key by `delta`. If the key does not yet exist it is initialized with `delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to add. :returns: The new value or ``None`` for backend errors. """ value = (self.get(key) or 0) + delta return value if self.set(key, value) else None def dec(self, key, delta=1): """Decrements the value of a key by `delta`. If the key does not yet exist it is initialized with `-delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to subtract. :returns: The new value or `None` for backend errors. """ value = (self.get(key) or 0) - delta return value if self.set(key, value) else None flask-caching-1.10.1/flask_caching/backends/filesystemcache.py000066400000000000000000000217051402442634700243470ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching.backends.filesystem ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The filesystem caching backend. :copyright: (c) 2018 by Peter Justin. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ import errno import hashlib import logging import os import tempfile from time import time from flask_caching.backends.base import BaseCache try: import cPickle as pickle except ImportError: # pragma: no cover import pickle # type: ignore logger = logging.getLogger(__name__) class FileSystemCache(BaseCache): """A cache that stores the items on the file system. This cache depends on being the only user of the `cache_dir`. Make absolutely sure that nobody but this cache stores files there or otherwise the cache will randomly delete files therein. :param cache_dir: the directory where cache files are stored. :param threshold: the maximum number of items the cache stores before it starts deleting some. A threshold value of 0 indicates no threshold. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param mode: the file mode wanted for the cache files, default 0600 :param hash_method: Default hashlib.md5. The hash method used to generate the filename for cached results. :param ignore_errors: If set to ``True`` the :meth:`~BaseCache.delete_many` method will ignore any errors that occurred during the deletion process. However, if it is set to ``False`` it will stop on the first error. Defaults to ``False``. """ #: used for temporary files by the FileSystemCache _fs_transaction_suffix = ".__wz_cache" #: keep amount of files in a cache element _fs_count_file = "__wz_cache_count" def __init__( self, cache_dir, threshold=500, default_timeout=300, mode=0o600, hash_method=hashlib.md5, ignore_errors=False, ): super(FileSystemCache, self).__init__(default_timeout) self._path = cache_dir self._threshold = threshold self._mode = mode self._hash_method = hash_method self.ignore_errors = ignore_errors try: os.makedirs(self._path) except OSError as ex: if ex.errno != errno.EEXIST: raise # If there are many files and a zero threshold, # the list_dir can slow initialisation massively if self._threshold != 0: self._update_count(value=len(self._list_dir())) @classmethod def factory(cls, app, config, args, kwargs): args.insert(0, config["CACHE_DIR"]) kwargs.update( dict( threshold=config["CACHE_THRESHOLD"], ignore_errors=config["CACHE_IGNORE_ERRORS"], ) ) return cls(*args, **kwargs) @property def _file_count(self): return self.get(self._fs_count_file) or 0 def _update_count(self, delta=None, value=None): # If we have no threshold, don't count files if self._threshold == 0: return if delta: new_count = self._file_count + delta else: new_count = value or 0 self.set(self._fs_count_file, new_count, mgmt_element=True) def _normalize_timeout(self, timeout): timeout = BaseCache._normalize_timeout(self, timeout) if timeout != 0: timeout = time() + timeout return int(timeout) def _list_dir(self): """return a list of (fully qualified) cache filenames""" mgmt_files = [ self._get_filename(name).split("/")[-1] for name in (self._fs_count_file,) ] return [ os.path.join(self._path, fn) for fn in os.listdir(self._path) if not fn.endswith(self._fs_transaction_suffix) and fn not in mgmt_files ] def _prune(self): if self._threshold == 0 or not self._file_count > self._threshold: return entries = self._list_dir() nremoved = 0 now = time() for idx, fname in enumerate(entries): try: remove = False with open(fname, "rb") as f: expires = pickle.load(f) remove = (expires != 0 and expires <= now) or idx % 3 == 0 if remove: os.remove(fname) nremoved += 1 except (IOError, OSError): pass self._update_count(value=len(self._list_dir())) logger.debug("evicted %d key(s)", nremoved) def clear(self): for fname in self._list_dir(): try: os.remove(fname) except (IOError, OSError): self._update_count(value=len(self._list_dir())) return False self._update_count(value=0) return True def _get_filename(self, key): if isinstance(key, str): key = key.encode("utf-8") # XXX unicode review hash = self._hash_method(key).hexdigest() return os.path.join(self._path, hash) def get(self, key): result = None expired = False hit_or_miss = "miss" filename = self._get_filename(key) try: with open(filename, "rb") as f: pickle_time = pickle.load(f) expired = pickle_time != 0 and pickle_time < time() if expired: self.delete(key) else: hit_or_miss = "hit" result = pickle.load(f) except FileNotFoundError: pass except (IOError, OSError, pickle.PickleError) as exc: logger.error("get key %r -> %s", key, exc) expiredstr = "(expired)" if expired else "" logger.debug("get key %r -> %s %s", key, hit_or_miss, expiredstr) return result def add(self, key, value, timeout=None): filename = self._get_filename(key) added = False should_add = not os.path.exists(filename) if should_add: added = self.set(key, value, timeout) addedstr = "added" if added else "not added" logger.debug("add key %r -> %s", key, addedstr) return should_add def set(self, key, value, timeout=None, mgmt_element=False): result = False # Management elements have no timeout if mgmt_element: timeout = 0 # Don't prune on management element update, to avoid loop else: self._prune() timeout = self._normalize_timeout(timeout) filename = self._get_filename(key) try: fd, tmp = tempfile.mkstemp( suffix=self._fs_transaction_suffix, dir=self._path ) with os.fdopen(fd, "wb") as f: pickle.dump(timeout, f, 1) pickle.dump(value, f, pickle.HIGHEST_PROTOCOL) is_new_file = not os.path.exists(filename) os.replace(tmp, filename) os.chmod(filename, self._mode) except (IOError, OSError) as exc: logger.error("set key %r -> %s", key, exc) else: result = True logger.debug("set key %r", key) # Management elements should not count towards threshold if not mgmt_element and is_new_file: self._update_count(delta=1) return result def delete(self, key, mgmt_element=False): deleted = False try: os.remove(self._get_filename(key)) except FileNotFoundError: logger.debug("delete key %r -> no such key") except (IOError, OSError) as exc: logger.error("delete key %r -> %s", key, exc) else: deleted = True logger.debug("deleted key %r", key) # Management elements should not count towards threshold if not mgmt_element: self._update_count(delta=-1) return deleted def has(self, key): result = False expired = False filename = self._get_filename(key) try: with open(filename, "rb") as f: pickle_time = pickle.load(f) expired = pickle_time != 0 and pickle_time < time() if expired: self.delete(key) else: result = True except FileNotFoundError: pass except (IOError, OSError, pickle.PickleError) as exc: logger.error("get key %r -> %s", key, exc) expiredstr = "(expired)" if expired else "" logger.debug("has key %r -> %s %s", key, result, expiredstr) return result flask-caching-1.10.1/flask_caching/backends/memcache.py000066400000000000000000000304241402442634700227370ustar00rootroot00000000000000import re # -*- coding: utf-8 -*- """ flask_caching.backends.memcache ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The memcache caching backend. :copyright: (c) 2018 by Peter Justin. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ from time import time from flask_caching.backends.base import BaseCache, iteritems_wrapper try: import cPickle as pickle except ImportError: # pragma: no cover import pickle # type: ignore _test_memcached_key = re.compile(r"[^\x00-\x21\xff]{1,250}$").match class MemcachedCache(BaseCache): """A cache that uses memcached as backend. The first argument can either be an object that resembles the API of a :class:`memcache.Client` or a tuple/list of server addresses. In the event that a tuple/list is passed, Werkzeug tries to import the best available memcache library. This cache looks into the following packages/modules to find bindings for memcached: - ``pylibmc`` - ``google.appengine.api.memcached`` - ``memcached`` - ``libmc`` Implementation notes: This cache backend works around some limitations in memcached to simplify the interface. For example unicode keys are encoded to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return the keys in the same format as passed. Furthermore all get methods silently ignore key errors to not cause problems when untrusted user data is passed to the get methods which is often the case in web applications. :param servers: a list or tuple of server addresses or alternatively a :class:`memcache.Client` or a compatible client. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param key_prefix: a prefix that is added before all keys. This makes it possible to use the same memcached server for different applications. Keep in mind that :meth:`~BaseCache.clear` will also clear keys with a different prefix. """ def __init__(self, servers=None, default_timeout=300, key_prefix=None): super(MemcachedCache, self).__init__(default_timeout) if servers is None or isinstance(servers, (list, tuple)): if servers is None: servers = ["127.0.0.1:11211"] self._client = self.import_preferred_memcache_lib(servers) if self._client is None: raise RuntimeError("no memcache module found") else: # NOTE: servers is actually an already initialized memcache # client. self._client = servers self.key_prefix = key_prefix or None @classmethod def factory(cls, app, config, args, kwargs): args.append(config["CACHE_MEMCACHED_SERVERS"]) kwargs.update(dict(key_prefix=config["CACHE_KEY_PREFIX"])) return cls(*args, **kwargs) def _normalize_key(self, key): key = str(key) if self.key_prefix: key = self.key_prefix + key return key def _normalize_timeout(self, timeout): timeout = BaseCache._normalize_timeout(self, timeout) if timeout > 0: # NOTE: pylibmc expect the timeout as delta time up to # 2592000 seconds (30 days) if not hasattr(self, "mc_library"): try: import pylibmc # noqa except ImportError: self.mc_library = None else: self.mc_library = "pylibmc" if self.mc_library != "pylibmc": timeout = int(time()) + timeout elif timeout > 2592000: timeout = 0 return timeout def get(self, key): key = self._normalize_key(key) # memcached doesn't support keys longer than that. Because often # checks for so long keys can occur because it's tested from user # submitted data etc we fail silently for getting. if _test_memcached_key(key): return self._client.get(key) def get_dict(self, *keys): key_mapping = {} have_encoded_keys = False for key in keys: encoded_key = self._normalize_key(key) if not isinstance(key, str): have_encoded_keys = True if _test_memcached_key(key): key_mapping[encoded_key] = key _keys = list(key_mapping) d = rv = self._client.get_multi(_keys) if have_encoded_keys or self.key_prefix: rv = {} for key, value in d.items(): rv[key_mapping[key]] = value if len(rv) < len(keys): for key in keys: if key not in rv: rv[key] = None return rv def add(self, key, value, timeout=None): key = self._normalize_key(key) timeout = self._normalize_timeout(timeout) return self._client.add(key, value, timeout) def set(self, key, value, timeout=None): key = self._normalize_key(key) timeout = self._normalize_timeout(timeout) return self._client.set(key, value, timeout) def get_many(self, *keys): d = self.get_dict(*keys) return [d[key] for key in keys] def set_many(self, mapping, timeout=None): new_mapping = {} for key, value in iteritems_wrapper(mapping): key = self._normalize_key(key) new_mapping[key] = value timeout = self._normalize_timeout(timeout) failed_keys = self._client.set_multi(new_mapping, timeout) return not failed_keys def delete(self, key): key = self._normalize_key(key) if _test_memcached_key(key): return self._client.delete(key) def delete_many(self, *keys): new_keys = [] for key in keys: key = self._normalize_key(key) if _test_memcached_key(key): new_keys.append(key) return self._client.delete_multi(new_keys) def has(self, key): key = self._normalize_key(key) if _test_memcached_key(key): try: return self._client.append(key, "") except AttributeError: # GAEMemecache has no 'append' function return True if self._client.get(key) is not None else False return False def clear(self): return self._client.flush_all() def inc(self, key, delta=1): key = self._normalize_key(key) return self._client.incr(key, delta) def dec(self, key, delta=1): key = self._normalize_key(key) return self._client.decr(key, delta) def import_preferred_memcache_lib(self, servers): """Returns an initialized memcache client. Used by the constructor.""" try: import pylibmc except ImportError: pass else: self.mc_library = "pylibmc" return pylibmc.Client(servers) try: from google.appengine.api import memcache except ImportError: pass else: self.mc_library = "google.appengine.api" return memcache.Client() try: import memcache except ImportError: pass else: self.mc_library = "memcache" return memcache.Client(servers) try: import libmc except ImportError: pass else: self.mc_library = "libmc" return libmc.Client(servers) class SASLMemcachedCache(MemcachedCache): def __init__( self, servers=None, default_timeout=300, key_prefix=None, username=None, password=None, **kwargs ): super(SASLMemcachedCache, self).__init__( default_timeout=default_timeout ) if servers is None: servers = ["127.0.0.1:11211"] import pylibmc self._client = pylibmc.Client( servers, username=username, password=password, binary=True, **kwargs ) self.key_prefix = key_prefix @classmethod def factory(cls, app, config, args, kwargs): args.append(config["CACHE_MEMCACHED_SERVERS"]) kwargs.update( dict( username=config["CACHE_MEMCACHED_USERNAME"], password=config["CACHE_MEMCACHED_PASSWORD"], key_prefix=config["CACHE_KEY_PREFIX"], ) ) return cls(*args, **kwargs) class SpreadSASLMemcachedCache(SASLMemcachedCache): """Simple Subclass of SASLMemcached client that will spread the value across multiple keys if they are bigger than a given treshhold. Spreading requires using pickle to store the value, which can significantly impact the performance. """ def __init__(self, *args, **kwargs): """ Kwargs: chunksize (int): max length of a pickled object that can fit in memcached (memcache has an upper limit of 1MB for values, default: 1048448) """ self.chunksize = kwargs.get("chunksize", 1048448) self.maxchunk = kwargs.get("maxchunk", 32) super(SpreadSASLMemcachedCache, self).__init__(*args, **kwargs) @classmethod def factory(cls, app, config, args, kwargs): args.append(config["CACHE_MEMCACHED_SERVERS"]) kwargs.update( dict( username=config.get("CACHE_MEMCACHED_USERNAME"), password=config.get("CACHE_MEMCACHED_PASSWORD"), key_prefix=config.get("CACHE_KEY_PREFIX"), ) ) return cls(*args, **kwargs) def delete(self, key): for skey in self._genkeys(key): super(SpreadSASLMemcachedCache, self).delete(skey) def set(self, key, value, timeout=None, chunk=True): """Set a value in cache, potentially spreading it across multiple key. :param key: The cache key. :param value: The value to cache. :param timeout: The timeout after which the cache will be invalidated. :param chunk: If set to `False`, then spreading across multiple keys is disabled. This can be faster, but it will fail if the value is bigger than the chunks. It requires you to get back the object by specifying that it is not spread. """ if chunk: return self._set(key, value, timeout=timeout) else: return super(SpreadSASLMemcachedCache, self).set( key, value, timeout=timeout ) def _set(self, key, value, timeout=None): # pickling/unpickling add an overhead, # I didn't found a good way to avoid pickling/unpickling if # key is smaller than chunksize, because in case or # getting the length consume the data iterator. serialized = pickle.dumps(value, 2) values = {} len_ser = len(serialized) chks = range(0, len_ser, self.chunksize) if len(chks) > self.maxchunk: raise ValueError( "Cannot store value in less than %s keys" % self.maxchunk ) for i in chks: values["%s.%s" % (key, i // self.chunksize)] = serialized[ i : i + self.chunksize ] super(SpreadSASLMemcachedCache, self).set_many(values, timeout) def get(self, key, chunk=True): """Get a cached value. :param chunk: If set to ``False``, it will return a cached value that is spread across multiple keys. """ if chunk: return self._get(key) else: return super(SpreadSASLMemcachedCache, self).get(key) def _genkeys(self, key): return ["%s.%s" % (key, i) for i in range(self.maxchunk)] def _get(self, key): to_get = ["%s.%s" % (key, i) for i in range(self.maxchunk)] result = super(SpreadSASLMemcachedCache, self).get_many(*to_get) serialized = b"".join(v for v in result if v is not None) if not serialized: return None return pickle.loads(serialized) flask-caching-1.10.1/flask_caching/backends/nullcache.py000066400000000000000000000012121402442634700231240ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching.backends.null ~~~~~~~~~~~~~~~~~~~~~~~~~~~ The null cache backend. A caching backend that doesn't cache. :copyright: (c) 2018 by Peter Justin. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ from flask_caching.backends.base import BaseCache class NullCache(BaseCache): """A cache that doesn't cache. This can be useful for unit testing. :param default_timeout: a dummy parameter that is ignored but exists for API compatibility with other caches. """ def has(self, key): return False flask-caching-1.10.1/flask_caching/backends/rediscache.py000066400000000000000000000326431402442634700232740ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching.backends.rediscache ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The redis caching backend. :copyright: (c) 2018 by Peter Justin. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ from flask_caching.backends.base import BaseCache, iteritems_wrapper try: import cPickle as pickle except ImportError: # pragma: no cover import pickle # type: ignore class RedisCache(BaseCache): """Uses the Redis key-value store as a cache backend. The first argument can be either a string denoting address of the Redis server or an object resembling an instance of a redis.Redis class. Note: Python Redis API already takes care of encoding unicode strings on the fly. :param host: address of the Redis server or an object which API is compatible with the official Python Redis client (redis-py). :param port: port number on which Redis server listens for connections. :param password: password authentication for the Redis server. :param db: db (zero-based numeric index) on Redis Server to connect. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param key_prefix: A prefix that should be added to all keys. Any additional keyword arguments will be passed to ``redis.Redis``. """ def __init__( self, host="localhost", port=6379, password=None, db=0, default_timeout=300, key_prefix=None, **kwargs ): super().__init__(default_timeout) if host is None: raise ValueError("RedisCache host parameter may not be None") if isinstance(host, str): try: import redis except ImportError: raise RuntimeError("no redis module found") if kwargs.get("decode_responses", None): raise ValueError( "decode_responses is not supported by " "RedisCache." ) client = redis.Redis( host=host, port=port, password=password, db=db, **kwargs ) else: client = host self._write_client = self._read_clients = client self.key_prefix = key_prefix or "" @classmethod def factory(cls, app, config, args, kwargs): try: from redis import from_url as redis_from_url except ImportError: raise RuntimeError("no redis module found") kwargs.update( dict( host=config.get("CACHE_REDIS_HOST", "localhost"), port=config.get("CACHE_REDIS_PORT", 6379), ) ) password = config.get("CACHE_REDIS_PASSWORD") if password: kwargs["password"] = password key_prefix = config.get("CACHE_KEY_PREFIX") if key_prefix: kwargs["key_prefix"] = key_prefix db_number = config.get("CACHE_REDIS_DB") if db_number: kwargs["db"] = db_number redis_url = config.get("CACHE_REDIS_URL") if redis_url: kwargs["host"] = redis_from_url( redis_url, db=kwargs.pop("db", None) ) return cls(*args, **kwargs) def _get_prefix(self): return ( self.key_prefix if isinstance(self.key_prefix, str) else self.key_prefix() ) def _normalize_timeout(self, timeout): timeout = BaseCache._normalize_timeout(self, timeout) if timeout == 0: timeout = -1 return timeout def dump_object(self, value): """Dumps an object into a string for redis. By default it serializes integers as regular string and pickle dumps everything else. """ t = type(value) if t == int: return str(value).encode("ascii") return b"!" + pickle.dumps(value) def load_object(self, value): """The reversal of :meth:`dump_object`. This might be called with None. """ if value is None: return None if value.startswith(b"!"): try: return pickle.loads(value[1:]) except pickle.PickleError: return None try: return int(value) except ValueError: # before 0.8 we did not have serialization. Still support that. return value def get(self, key): return self.load_object( self._read_clients.get(self._get_prefix() + key) ) def get_many(self, *keys): if self.key_prefix: keys = [self._get_prefix() + key for key in keys] return [self.load_object(x) for x in self._read_clients.mget(keys)] def set(self, key, value, timeout=None): timeout = self._normalize_timeout(timeout) dump = self.dump_object(value) if timeout == -1: result = self._write_client.set( name=self._get_prefix() + key, value=dump ) else: result = self._write_client.setex( name=self._get_prefix() + key, value=dump, time=timeout ) return result def add(self, key, value, timeout=None): timeout = self._normalize_timeout(timeout) dump = self.dump_object(value) created = self._write_client.setnx( name=self._get_prefix() + key, value=dump ) if created and timeout != -1: self._write_client.expire( name=self._get_prefix() + key, time=timeout ) return created def set_many(self, mapping, timeout=None): timeout = self._normalize_timeout(timeout) # Use transaction=False to batch without calling redis MULTI # which is not supported by twemproxy pipe = self._write_client.pipeline(transaction=False) for key, value in iteritems_wrapper(mapping): dump = self.dump_object(value) if timeout == -1: pipe.set(name=self._get_prefix() + key, value=dump) else: pipe.setex( name=self._get_prefix() + key, value=dump, time=timeout ) return pipe.execute() def delete(self, key): return self._write_client.delete(self._get_prefix() + key) def delete_many(self, *keys): if not keys: return if self.key_prefix: keys = [self._get_prefix() + key for key in keys] return self._write_client.delete(*keys) def has(self, key): return self._read_clients.exists(self._get_prefix() + key) def clear(self): status = False if self.key_prefix: keys = self._read_clients.keys(self._get_prefix() + "*") if keys: status = self._write_client.delete(*keys) else: status = self._write_client.flushdb(asynchronous=True) return status def inc(self, key, delta=1): return self._write_client.incr( name=self._get_prefix() + key, amount=delta ) def dec(self, key, delta=1): return self._write_client.decr( name=self._get_prefix() + key, amount=delta ) def unlink(self, *keys): """when redis-py >= 3.0.0 and redis > 4, support this operation""" if not keys: return if self.key_prefix: keys = [self.key_prefix + key for key in keys] unlink = getattr(self._write_client, "unlink", None) if unlink is not None and callable(unlink): return self._write_client.unlink(*keys) return self._write_client.delete(*keys) class RedisSentinelCache(RedisCache): """Uses the Redis key-value store as a cache backend. The first argument can be either a string denoting address of the Redis server or an object resembling an instance of a redis.Redis class. Note: Python Redis API already takes care of encoding unicode strings on the fly. :param sentinels: A list or a tuple of Redis sentinel addresses. :param master: The name of the master server in a sentinel configuration. :param password: password authentication for the Redis server. :param db: db (zero-based numeric index) on Redis Server to connect. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param key_prefix: A prefix that should be added to all keys. Any additional keyword arguments will be passed to ``redis.sentinel.Sentinel``. """ def __init__( self, sentinels=None, master=None, password=None, db=0, default_timeout=300, key_prefix=None, **kwargs ): super().__init__(default_timeout=default_timeout) try: import redis.sentinel except ImportError: raise RuntimeError("no redis module found") if kwargs.get("decode_responses", None): raise ValueError( "decode_responses is not supported by " "RedisCache." ) sentinels = sentinels or [("127.0.0.1", 26379)] sentinel_kwargs = { key[9:]: value for key, value in kwargs.items() if key.startswith("sentinel_") } kwargs = { key[9:]: value for key, value in kwargs.items() if not key.startswith("sentinel_") } sentinel = redis.sentinel.Sentinel( sentinels=sentinels, password=password, db=db, sentinel_kwargs=sentinel_kwargs, **kwargs ) self._write_client = sentinel.master_for(master) self._read_clients = sentinel.slave_for(master) self.key_prefix = key_prefix or "" @classmethod def factory(cls, app, config, args, kwargs): kwargs.update( dict( sentinels=config.get( "CACHE_REDIS_SENTINELS", [("127.0.0.1", 26379)] ), master=config.get("CACHE_REDIS_SENTINEL_MASTER", "mymaster"), password=config.get("CACHE_REDIS_PASSWORD", None), sentinel_password=config.get( "CACHE_REDIS_SENTINEL_PASSWORD", None ), key_prefix=config.get("CACHE_KEY_PREFIX", None), db=config.get("CACHE_REDIS_DB", 0), ) ) return cls(*args, **kwargs) class RedisClusterCache(RedisCache): """Uses the Redis key-value store as a cache backend. The first argument can be either a string denoting address of the Redis server or an object resembling an instance of a rediscluster.RedisCluster class. Note: Python Redis API already takes care of encoding unicode strings on the fly. :param cluster: The redis cluster nodes address separated by comma. e.g. host1:port1,host2:port2,host3:port3 . :param password: password authentication for the Redis server. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param key_prefix: A prefix that should be added to all keys. Any additional keyword arguments will be passed to ``rediscluster.RedisCluster``. """ def __init__( self, cluster="", password="", default_timeout=300, key_prefix="", **kwargs ): super().__init__(default_timeout=default_timeout) if kwargs.get("decode_responses", None): raise ValueError( "decode_responses is not supported by " "RedisCache." ) try: from rediscluster import RedisCluster except ImportError: raise RuntimeError("no rediscluster module found") try: nodes = [(node.split(":")) for node in cluster.split(",")] startup_nodes = [ {"host": node[0].strip(), "port": node[1].strip()} for node in nodes ] except IndexError: raise ValueError( "Please give the correct cluster argument " "e.g. host1:port1,host2:port2,host3:port3" ) # Skips the check of cluster-require-full-coverage config, # useful for clusters without the CONFIG command (like aws) skip_full_coverage_check = kwargs.pop("skip_full_coverage_check", True) cluster = RedisCluster( startup_nodes=startup_nodes, password=password, skip_full_coverage_check=skip_full_coverage_check, **kwargs ) self._write_client = self._read_clients = cluster self.key_prefix = key_prefix @classmethod def factory(cls, app, config, args, kwargs): kwargs.update( dict( cluster=config.get("CACHE_REDIS_CLUSTER", ""), password=config.get("CACHE_REDIS_PASSWORD", ""), default_timeout=config.get("CACHE_DEFAULT_TIMEOUT", 300), key_prefix=config.get("CACHE_KEY_PREFIX", ""), ) ) return cls(*args, **kwargs) flask-caching-1.10.1/flask_caching/backends/simplecache.py000066400000000000000000000113541402442634700234530ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching.backends.simple ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The simple cache backend. :copyright: (c) 2018 by Peter Justin. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ import logging from time import time from flask_caching.backends.base import BaseCache try: import cPickle as pickle except ImportError: # pragma: no cover import pickle # type: ignore logger = logging.getLogger(__name__) class SimpleCache(BaseCache): """Simple memory cache for single process environments. This class exists mainly for the development server and is not 100% thread safe. It tries to use as many atomic operations as possible and no locks for simplicity but it could happen under heavy load that keys are added multiple times. :param threshold: the maximum number of items the cache stores before it starts deleting some. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param ignore_errors: If set to ``True`` the :meth:`~BaseCache.delete_many` method will ignore any errors that occurred during the deletion process. However, if it is set to ``False`` it will stop on the first error. Defaults to ``False``. """ def __init__(self, threshold=500, default_timeout=300, ignore_errors=False): super(SimpleCache, self).__init__(default_timeout) self._cache = {} self.clear = self._cache.clear self._threshold = threshold self.ignore_errors = ignore_errors @classmethod def factory(cls, app, config, args, kwargs): kwargs.update( dict( threshold=config["CACHE_THRESHOLD"], ignore_errors=config["CACHE_IGNORE_ERRORS"], ) ) return cls(*args, **kwargs) def _prune(self): if len(self._cache) > self._threshold: now = time() toremove = [] for idx, (key, (expires, _)) in enumerate(self._cache.items()): if (expires != 0 and expires <= now) or idx % 3 == 0: toremove.append(key) for key in toremove: self._cache.pop(key, None) logger.debug("evicted %d key(s): %r", len(toremove), toremove) def _normalize_timeout(self, timeout): timeout = BaseCache._normalize_timeout(self, timeout) if timeout > 0: timeout = time() + timeout return timeout def get(self, key): result = None expired = False hit_or_miss = "miss" try: expires, value = self._cache[key] except KeyError: pass else: expired = expires != 0 and expires <= time() if not expired: hit_or_miss = "hit" try: result = pickle.loads(value) except Exception as exc: logger.error("get key %r -> %s", key, exc) expiredstr = "(expired)" if expired else "" logger.debug("get key %r -> %s %s", key, hit_or_miss, expiredstr) return result def set(self, key, value, timeout=None): expires = self._normalize_timeout(timeout) self._prune() item = (expires, pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) self._cache[key] = item logger.debug("set key %r", key) return True def add(self, key, value, timeout=None): expires = self._normalize_timeout(timeout) self._prune() item = (expires, pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) updated = False should_add = key not in self._cache if should_add: updated = self._cache.setdefault(key, item) != item updatedstr = "updated" if updated else "not updated" logger.debug("add key %r -> %s", key, updatedstr) return should_add def delete(self, key): deleted = self._cache.pop(key, None) is not None deletedstr = "deleted" if deleted else "not deleted" logger.debug("delete key %r -> %s", key, deletedstr) return deleted def has(self, key): result = False expired = False try: expires, value = self._cache[key] except KeyError: pass else: result = expires == 0 or expires > time() expired = not result expiredstr = "(expired)" if expired else "" logger.debug("has key %r -> %s %s", key, result, expiredstr) return result flask-caching-1.10.1/flask_caching/backends/uwsgicache.py000066400000000000000000000006361402442634700233210ustar00rootroot00000000000000import warnings from flask_caching.contrib.uwsgicache import UWSGICache as _UWSGICache class UWSGICache(_UWSGICache): def __init__(self, *args, **kwargs): warnings.warn( "Importing UWSGICache from flask_caching.backends is deprecated, use flask_caching.contrib.uwsgicache.UWSGICache instead", category=DeprecationWarning, ) super().__init__(*args, **kwargs) flask-caching-1.10.1/flask_caching/contrib/000077500000000000000000000000001402442634700205065ustar00rootroot00000000000000flask-caching-1.10.1/flask_caching/contrib/__init__.py000066400000000000000000000000001402442634700226050ustar00rootroot00000000000000flask-caching-1.10.1/flask_caching/contrib/googlecloudstoragecache.py000066400000000000000000000166201402442634700257410ustar00rootroot00000000000000# -*- coding: utf-8 -*- import datetime import json import logging from flask_caching.backends.base import BaseCache logger = logging.getLogger(__name__) try: from google.auth.credentials import AnonymousCredentials from google.cloud import storage, exceptions except ImportError: raise RuntimeError("no google-cloud-storage module found") class GoogleCloudStorageCache(BaseCache): """Uses an Google Cloud Storage bucket as a cache backend. Note: User-contributed functionality. This project does not guarantee that this functionality will be maintained or functional at any given time. Note: Cache keys must meet GCS criteria for a valid object name (a sequence of Unicode characters whose UTF-8 encoding is at most 1024 bytes long). Note: Expired cache objects are not automatically purged. If delete_expired_objects_on_read=True, they will be deleted following an attempted read (which reduces performance). Otherwise, you have to delete stale objects yourself. Consider an GCS bucket lifecycle rule or other out-of-band process. For example you can use the following rule. {"rule": [{"action": {"type": "Delete"}, "condition": {"daysSinceCustomTime": 0}}]} https://cloud.google.com/storage/docs/lifecycle#dayssincecustomtime :param bucket: Required. Name of the bucket to use. It must already exist. :param key_prefix: A prefix that should be added to all keys. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. A timeout of 0 indicates that the cache never expires. :param delete_expired_objects_on_read: If True, if a read finds a stale object, it will be deleted before a response is returned. Will slow down responses. :param anonymous: If true, use anonymous credentials. Useful for testing. Any additional keyword arguments will be passed to ``google.cloud.storage.Client``. """ def __init__( self, bucket, key_prefix=None, default_timeout=300, delete_expired_objects_on_read=False, anonymous=False, **kwargs ): super(GoogleCloudStorageCache, self).__init__(default_timeout) if not isinstance(bucket, str): raise ValueError("GCSCache bucket parameter must be a string") if anonymous: self._client = storage.Client( credentials=AnonymousCredentials(), project="test", **kwargs ) else: self._client = storage.Client(**kwargs) self.bucket = self._client.get_bucket(bucket) self.key_prefix = key_prefix or "" self.default_timeout = default_timeout self.delete_expired_objects_on_read = delete_expired_objects_on_read @classmethod def factory(cls, app, config, args, kwargs): args.insert(0, config["CACHE_GCS_BUCKET"]) key_prefix = config.get("CACHE_KEY_PREFIX") if key_prefix: kwargs["key_prefix"] = key_prefix return cls(*args, **kwargs) def get(self, key): result = None expired = False hit_or_miss = "miss" full_key = self.key_prefix + key blob = self.bucket.get_blob(full_key) if blob is not None: expired = blob.custom_time and self._now() > blob.custom_time if expired: # Object is stale if self.delete_expired_objects_on_read: self._delete(full_key) else: try: result = blob.download_as_bytes() hit_or_miss = "hit" if blob.content_type == "application/json": result = json.loads(result) except exceptions.NotFound: pass expiredstr = "(expired)" if expired else "" logger.debug("get key %r -> %s %s", full_key, hit_or_miss, expiredstr) return result def set(self, key, value, timeout=None): result = False full_key = self.key_prefix + key content_type = "application/json" try: value = json.dumps(value) except (UnicodeDecodeError, TypeError): content_type = "application/octet-stream" blob = self.bucket.blob(full_key) if timeout is None: timeout = self.default_timeout if timeout != 0: # Use 'Custom-Time' for expiry # https://cloud.google.com/storage/docs/metadata#custom-time blob.custom_time = self._now(delta=timeout) try: blob.upload_from_string(value, content_type=content_type) result = True except exceptions.TooManyRequests: pass logger.debug("set key %r -> %s", full_key, result) return result def add(self, key, value, timeout=None): full_key = self.key_prefix + key if self._has(full_key): logger.debug("add key %r -> not added", full_key) return False else: return self.set(key, value, timeout) def delete(self, key): full_key = self.key_prefix + key return self._delete(full_key) def delete_many(self, *keys): return self._delete_many(self.key_prefix + key for key in keys) def has(self, key): full_key = self.key_prefix + key return self._has(full_key) def clear(self): return self._prune(clear_all=True) def _prune(self, clear_all=False): # Delete in batches of 100 which is much faster than individual deletes nremoved = 0 now = self._now() response_iterator = self._client.list_blobs( self.bucket, prefix=self.key_prefix, fields="items(name,customTime),nextPageToken", ) to_delete = [] for blob in response_iterator: if clear_all or blob.custom_time and blob.custom_time < now: to_delete.append(blob.name) nremoved += 1 if len(to_delete) == 100: self._delete_many(to_delete) to_delete = [] # Delete the remainder if to_delete: self._delete_many(to_delete) logger.debug("evicted %d key(s)", nremoved) return True def _delete(self, key): return self._delete_many([key]) def _delete_many(self, keys): try: with self._client.batch(): for key in keys: self.bucket.delete_blob(key) except (exceptions.NotFound, exceptions.TooManyRequests): pass return True def _has(self, key): result = False expired = False blob = self.bucket.get_blob(key) if blob is not None: expired = blob.custom_time and self._now() > blob.custom_time if expired: # Exists but is stale if self.delete_expired_objects_on_read: self._delete(key) else: result = True expiredstr = "(expired)" if expired else "" logger.debug("has key %r -> %s %s", key, result, expiredstr) return result def _now(self, delta=0): return datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( seconds=delta ) flask-caching-1.10.1/flask_caching/contrib/uwsgicache.py000066400000000000000000000063241402442634700232070ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching.backends.uwsgicache ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The uWSGI caching backend. :copyright: (c) 2018 by Peter Justin. :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ import platform from flask_caching.backends.base import BaseCache try: import cPickle as pickle except ImportError: # pragma: no cover import pickle # type: ignore class UWSGICache(BaseCache): """Implements the cache using uWSGI's caching framework. .. note:: This class cannot be used when running under PyPy, because the uWSGI API implementation for PyPy is lacking the needed functionality. :param default_timeout: The default timeout in seconds. :param cache: The name of the caching instance to connect to, for example: mycache@localhost:3031, defaults to an empty string, which means uWSGI will use the first cache instance initialized. If the cache is in the same instance as the werkzeug app, you only have to provide the name of the cache. """ def __init__(self, default_timeout=300, cache=""): super(UWSGICache, self).__init__(default_timeout) if platform.python_implementation() == "PyPy": raise RuntimeError( "uWSGI caching does not work under PyPy, see " "the docs for more details." ) try: import uwsgi self._uwsgi = uwsgi except ImportError: raise RuntimeError( "uWSGI could not be imported, are you running under uWSGI?" ) if "cache2" not in uwsgi.opt: raise RuntimeError( "You must enable cache2 in uWSGI configuration: " "https://uwsgi-docs.readthedocs.io/en/latest/Caching.html" ) self.cache = cache @classmethod def factory(cls, app, config, args, kwargs): # The name of the caching instance to connect to, for # example: mycache@localhost:3031, defaults to an empty string, which # means uWSGI will cache in the local instance. If the cache is in the # same instance as the werkzeug app, you only have to provide the name # of the cache. uwsgi_cache_name = config.get("CACHE_UWSGI_NAME", "") kwargs.update(dict(cache=uwsgi_cache_name)) return cls(*args, **kwargs) def get(self, key): rv = self._uwsgi.cache_get(key, self.cache) if rv is None: return return pickle.loads(rv) def delete(self, key): return self._uwsgi.cache_del(key, self.cache) def set(self, key, value, timeout=None): return self._uwsgi.cache_update( key, pickle.dumps(value), self._normalize_timeout(timeout), self.cache, ) def add(self, key, value, timeout=None): return self._uwsgi.cache_set( key, pickle.dumps(value), self._normalize_timeout(timeout), self.cache, ) def clear(self): return self._uwsgi.cache_clear(self.cache) def has(self, key): return self._uwsgi.cache_exists(key, self.cache) is not None flask-caching-1.10.1/flask_caching/jinja2ext.py000066400000000000000000000056121402442634700213220ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ flask_caching.jinja2ext ~~~~~~~~~~~~~~~~~~~~~~~ Jinja2 extension that adds support for caching template fragments. Usage:: {% cache timeout key1[, [key2, ...]] %} ... {% endcache %} By default, the value of "path to template file" + "block start line" is used as the cache key. Also, the key name can be set manually. Keys are concatenated together into a single string, that can be used to avoid the same block evaluating in different templates. Set the timeout to ``None`` for no timeout, but with custom keys:: {% cache None "key" %} ... {% endcache %} Set timeout to ``del`` to delete cached value:: {% cache 'del' key1 %} ... {% endcache %} Considering we have ``render_form_field`` and ``render_submit`` macros:: {% cache 60*5 'myform' %}
{% render_form_field(form.username) %} {% render_submit() %}
{% endcache %} :copyright: (c) 2010 by Thadeus Burgess. :license: BSD, see LICENSE for more details. """ from jinja2 import nodes from jinja2.ext import Extension from flask_caching import make_template_fragment_key JINJA_CACHE_ATTR_NAME = "_template_fragment_cache" class CacheExtension(Extension): tags = set(["cache"]) def parse(self, parser): lineno = next(parser.stream).lineno #: Parse timeout args = [parser.parse_expression()] #: Parse fragment name #: Grab the fragment name if it exists #: otherwise, default to the old method of using the templates #: lineno to maintain backwards compatibility. if parser.stream.skip_if("comma"): args.append(parser.parse_expression()) else: args.append(nodes.Const("%s%s" % (parser.filename, lineno))) #: Parse vary_on parameters vary_on = [] while parser.stream.skip_if("comma"): vary_on.append(parser.parse_expression()) if vary_on: args.append(nodes.List(vary_on)) else: args.append(nodes.Const([])) body = parser.parse_statements(["name:endcache"], drop_needle=True) return nodes.CallBlock( self.call_method("_cache", args), [], [], body ).set_lineno(lineno) def _cache(self, timeout, fragment_name, vary_on, caller): try: cache = getattr(self.environment, JINJA_CACHE_ATTR_NAME) except AttributeError as e: raise e key = make_template_fragment_key(fragment_name, vary_on=vary_on) #: Delete key if timeout is 'del' if timeout == "del": cache.delete(key) return caller() rv = cache.get(key) if rv is None: rv = caller() cache.set(key, rv, timeout) return rv flask-caching-1.10.1/requirements.txt000066400000000000000000000001421402442634700175530ustar00rootroot00000000000000Flask Werkzeug Sphinx pytest pytest-cov pytest-xprocess redis pylibmc coverage flake8 twine wheel flask-caching-1.10.1/setup.cfg000066400000000000000000000017751402442634700161250ustar00rootroot00000000000000[build_sphinx] source-dir = docs/ build-dir = docs/_build all_files = 1 [upload_sphinx] upload-dir = docs/_build/html [tool:pytest] addopts = -vvl --strict --cov=flask_caching --cov-report=term-missing norecursedirs = .* _* scripts {args} [aliases] test = pytest [mypy] ignore_missing_imports = True [flake8] max-line-length = 88 ignore = # See the black style guide for more info: # https://github.com/psf/black/blob/master/docs/the_black_code_style.md # whitespace before ':' E203, # line break before binary operator W503 exclude = # No need to traverse our git directory .git, # There's no value in checking cache directories __pycache__, # The conf file is mostly autogenerated, ignore it docs/source/conf.py, # The old directory contains Flake8 2.0 old, # This contains our built documentation build, # This contains builds of flake8 that we don't want to check dist max-complexity = 10 [coverage:run] omit = flask_caching/contrib/* flask-caching-1.10.1/setup.py000077500000000000000000000037431402442634700160160ustar00rootroot00000000000000#!/usr/bin/env python import os import ast import re from setuptools import find_packages, setup _version_re = re.compile(r"__version__\s+=\s+(.*)") def read(*parts): here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, *parts), "r") as fp: return fp.read() version_line = re.search( r"__version__\s+=\s+(.*)", read("flask_caching", "__init__.py") ).group(1) version = str(ast.literal_eval(version_line)) long_description = read("README.md") setup( name="Flask-Caching", version=version, project_urls={ "Documentation": "https://flask-caching.readthedocs.io", "Source Code": "https://github.com/sh4nks/flask-caching", "Issue Tracker": "https://github.com/sh4nks/flask-caching", }, url="https://github.com/sh4nks/flask-caching", license="BSD", author="Peter Justin", author_email="peter.justin@outlook.com", description="Adds caching support to your Flask application", long_description=long_description, long_description_content_type="text/markdown", packages=find_packages(exclude=("tests",)), zip_safe=False, platforms="any", python_requires=">=3.5", install_requires=["Flask"], tests_require=[ "pytest", "pytest-cov", "pytest-xprocess", "pylibmc", "redis", ], classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed", ], ) flask-caching-1.10.1/tests/000077500000000000000000000000001402442634700154345ustar00rootroot00000000000000flask-caching-1.10.1/tests/conftest.py000066400000000000000000000044221402442634700176350ustar00rootroot00000000000000# -*- coding: utf-8 -*- import errno import os import flask import pytest import flask_caching as fsc try: __import__("pytest_xprocess") from xprocess import ProcessStarter except ImportError: @pytest.fixture(scope="session") def xprocess(): pytest.skip("pytest-xprocess not installed.") @pytest.fixture def app(request): app = flask.Flask( request.module.__name__, template_folder=os.path.dirname(__file__) ) app.testing = True app.config["CACHE_TYPE"] = "simple" return app @pytest.fixture def cache(app): return fsc.Cache(app) @pytest.fixture( params=[method for method in fsc.SUPPORTED_HASH_FUNCTIONS], ids=[method.__name__ for method in fsc.SUPPORTED_HASH_FUNCTIONS], ) def hash_method(request): return request.param @pytest.fixture(scope="class") def redis_server(xprocess): try: import redis # noqa except ImportError: pytest.skip("Python package 'redis' is not installed.") class Starter(ProcessStarter): pattern = "[Rr]eady to accept connections" args = ["redis-server"] try: xprocess.ensure("redis_server", Starter) except IOError as e: # xprocess raises FileNotFoundError if e.errno == errno.ENOENT: pytest.skip("Redis is not installed.") else: raise yield xprocess.getinfo("redis_server").terminate() @pytest.fixture(scope="class") def memcache_server(xprocess): try: import pylibmc as memcache except ImportError: try: from google.appengine.api import memcache except ImportError: try: import memcache # noqa except ImportError: pytest.skip( "Python package for memcache is not installed. Need one of " "pylibmc', 'google.appengine', or 'memcache'." ) class Starter(ProcessStarter): pattern = "" args = ["memcached", "-vv"] try: xprocess.ensure("memcached", Starter) except IOError as e: # xprocess raises FileNotFoundError if e.errno == errno.ENOENT: pytest.skip("Memcached is not installed.") else: raise yield xprocess.getinfo("memcached").terminate() flask-caching-1.10.1/tests/test_backend_cache.py000066400000000000000000000221051402442634700215570ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ tests.cache ~~~~~~~~~~~ Tests the cache system :copyright: (c) 2014 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import time import pytest from flask_caching import backends try: import redis except ImportError: redis = None try: import pylibmc as memcache except ImportError: try: from google.appengine.api import memcache except ImportError: try: import memcache except ImportError: memcache = None class CacheTestsBase(object): _can_use_fast_sleep = True _guaranteed_deletes = True @pytest.fixture def make_cache(self): """Return a cache class or factory.""" raise NotImplementedError() @pytest.fixture def c(self, make_cache): """Return a cache instance.""" return make_cache() class GenericCacheTests(CacheTestsBase): def test_generic_get_dict(self, c): assert c.set("a", "a") assert c.set("b", "b") d = c.get_dict("a", "b") assert "a" in d assert "a" == d["a"] assert "b" in d assert "b" == d["b"] def test_generic_set_get(self, c): for i in range(3): assert c.set(str(i), i * i) for i in range(3): result = c.get(str(i)) assert result == i * i, result def test_generic_get_set(self, c): assert c.set("foo", ["bar"]) assert c.get("foo") == ["bar"] def test_generic_get_many(self, c): assert c.set("foo", ["bar"]) assert c.set("spam", "eggs") assert c.get_many("foo", "spam") == [["bar"], "eggs"] def test_generic_set_many(self, c): assert c.set_many({"foo": "bar", "spam": ["eggs"]}) assert c.get("foo") == "bar" assert c.get("spam") == ["eggs"] def test_generic_add(self, c): # sanity check that add() works like set() assert c.add("foo", "bar") assert c.get("foo") == "bar" assert not c.add("foo", "qux") assert c.get("foo") == "bar" def test_generic_delete(self, c): assert c.add("foo", "bar") assert c.get("foo") == "bar" assert c.delete("foo") assert c.get("foo") is None def test_generic_delete_many(self, c): assert c.add("foo", "bar") assert c.add("spam", "eggs") assert c.delete_many("foo", "spam") assert c.get("foo") is None assert c.get("spam") is None def test_generic_inc_dec(self, c): assert c.set("foo", 1) assert c.inc("foo") == c.get("foo") == 2 assert c.dec("foo") == c.get("foo") == 1 assert c.delete("foo") def test_generic_true_false(self, c): assert c.set("foo", True) assert c.get("foo") in (True, 1) assert c.set("bar", False) assert c.get("bar") in (False, 0) def test_generic_timeout(self, c): c.set("foo", "bar", 0) assert c.get("foo") == "bar" c.set("baz", "qux", 1) assert c.get("baz") == "qux" time.sleep(3) # timeout of zero means no timeout assert c.get("foo") == "bar" if self._guaranteed_deletes: assert c.get("baz") is None def test_generic_has(self, c): assert c.has("foo") in (False, 0) assert c.has("spam") in (False, 0) assert c.set("foo", "bar") assert c.has("foo") in (True, 1) assert c.has("spam") in (False, 0) c.delete("foo") assert c.has("foo") in (False, 0) assert c.has("spam") in (False, 0) def test_generic_get_bytes(self, c): assert c.set("foo", b"bar") assert c.get("foo") == b"bar" class TestSimpleCache(GenericCacheTests): @pytest.fixture def make_cache(self): return backends.SimpleCache def test_purge(self): c = backends.SimpleCache(threshold=2) c.set("a", "a") c.set("b", "b") c.set("c", "c") c.set("d", "d") # Cache purges old items *before* it sets new ones. assert len(c._cache) == 3 class TestFileSystemCache(GenericCacheTests): @pytest.fixture def make_cache(self, tmpdir): return lambda **kw: backends.FileSystemCache( cache_dir=str(tmpdir), **kw ) def test_filesystemcache_hashes(self, make_cache, hash_method): cache = make_cache(hash_method=hash_method) self.test_count_file_accuracy(cache) def test_filesystemcache_prune(self, make_cache): THRESHOLD = 13 c = make_cache(threshold=THRESHOLD) for i in range(2 * THRESHOLD): assert c.set(str(i), i) nof_cache_files = c.get(c._fs_count_file) assert nof_cache_files <= THRESHOLD def test_filesystemcache_clear(self, c): assert c.set("foo", "bar") nof_cache_files = c.get(c._fs_count_file) assert nof_cache_files == 1 assert c.clear() nof_cache_files = c.get(c._fs_count_file) assert nof_cache_files == 0 cache_files = c._list_dir() assert len(cache_files) == 0 def test_no_threshold(self, make_cache): THRESHOLD = 0 c = make_cache(threshold=THRESHOLD) for i in range(10): assert c.set(str(i), i) cache_files = c._list_dir() assert len(cache_files) == 10 # File count is not maintained with threshold = 0 nof_cache_files = c.get(c._fs_count_file) assert nof_cache_files is None def test_filecount_caching_none(self, make_cache): c = make_cache() for i in range(3): assert c.set("a", None) assert c.get(c._fs_count_file) == 1 def test_filecount_after_deletion_in_has(self, make_cache): c = make_cache() assert c.set("foo", "bar", timeout=0.01) assert c.get(c._fs_count_file) == 1 time.sleep(0.1) assert c.has("foo") in (False, 0) assert c.get(c._fs_count_file) == 0 def test_filecount_after_deletion_in_get(self, make_cache): c = make_cache() assert c.set("foo", "bar", timeout=0.01) assert c.get(c._fs_count_file) == 1 time.sleep(0.1) assert c.get("foo") is None assert c.get(c._fs_count_file) == 0 def test_count_file_accuracy(self, c): assert c.set("foo", "bar") assert c.set("moo", "car") c.add("moo", "tar") assert c.get(c._fs_count_file) == 2 assert c.add("too", "far") assert c.get(c._fs_count_file) == 3 assert c.delete("moo") assert c.get(c._fs_count_file) == 2 assert c.clear() assert c.get(c._fs_count_file) == 0 # don't use pytest.mark.skipif on subclasses # https://bitbucket.org/hpk42/pytest/issue/568 # skip happens in requirements fixture instead class TestRedisCache(GenericCacheTests): _can_use_fast_sleep = False def gen_key_prefix(): return "werkzeug-test-case:" @pytest.fixture(scope="class", autouse=True) def requirements(self, redis_server): pass @pytest.fixture(params=(None, False, True, gen_key_prefix)) def make_cache(self, request): key_prefix = "werkzeug-test-case:" if request.param is None: host = "localhost" elif request.param: host = redis.StrictRedis() elif callable(request.param): key_prefix = gen_key_prefix # noqa (flake8 error: undefined) host = redis.Redis() else: host = redis.Redis() c = backends.RedisCache(host=host, key_prefix=key_prefix) yield lambda: c c.clear() def test_compat(self, c): assert c._write_client.set(c._get_prefix() + "foo", "Awesome") assert c.get("foo") == b"Awesome" assert c._write_client.set(c._get_prefix() + "foo", "42") assert c.get("foo") == 42 def test_empty_host(self): with pytest.raises(ValueError) as exc_info: backends.RedisCache(host=None) assert ( str(exc_info.value) == "RedisCache host parameter may not be None" ) class TestMemcachedCache(GenericCacheTests): _can_use_fast_sleep = False _guaranteed_deletes = False @pytest.fixture(scope="class", autouse=True) def requirements(self, memcache_server): pass @pytest.fixture def make_cache(self): c = backends.MemcachedCache(key_prefix="werkzeug-test-case:") yield lambda: c c.clear() def test_compat(self, c): assert c._client.set(c.key_prefix + "foo", "bar") assert c.get("foo") == "bar" def test_huge_timeouts(self, c): # Timeouts greater than epoch are interpreted as POSIX timestamps # (i.e. not relative to now, but relative to epoch) epoch = 2592000 c.set("foo", "bar", epoch + 100) assert c.get("foo") == "bar" def test_timeouts(self, c): c.set("foo", "bar", 1) assert c.get("foo") == "bar" time.sleep(1) assert c.has("foo") is False class TestNullCache(CacheTestsBase): @pytest.fixture(scope="class", autouse=True) def make_cache(self): return backends.NullCache def test_has(self, c): assert not c.has("foo") flask-caching-1.10.1/tests/test_basic_app.py000066400000000000000000000074271402442634700210000ustar00rootroot00000000000000# -*- coding: utf-8 -*- import pytest from flask import Flask from flask_caching import Cache from flask_caching.backends.simplecache import SimpleCache try: import redis # noqa HAS_NOT_REDIS = False except ImportError: HAS_NOT_REDIS = True class CustomCache(Cache): pass class CustomSimpleCache(SimpleCache): pass def newsimple(app, config, args, kwargs): return CustomSimpleCache(*args, **kwargs) def test_dict_config(app): cache = Cache(config={"CACHE_TYPE": "simple"}) cache.init_app(app) assert cache.config["CACHE_TYPE"] == "simple" def test_dict_config_initapp(app): cache = Cache() cache.init_app(app, config={"CACHE_TYPE": "simple"}) from flask_caching.backends.simplecache import SimpleCache assert isinstance(app.extensions["cache"][cache], SimpleCache) def test_dict_config_both(app): cache = Cache(config={"CACHE_TYPE": "null"}) cache.init_app(app, config={"CACHE_TYPE": "simple"}) from flask_caching.backends.simplecache import SimpleCache assert isinstance(app.extensions["cache"][cache], SimpleCache) def test_init_app_sets_app_attribute(app): cache = Cache() cache.init_app(app) assert cache.app == app @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") def test_init_app_multi_apps(app, redis_server): cache = Cache() app1 = Flask(__name__) app1.config.from_mapping({"CACHE_TYPE": "redis", "CACHE_KEY_PREFIX": "foo"}) app2 = Flask(__name__) app2.config.from_mapping({"CACHE_TYPE": "redis", "CACHE_KEY_PREFIX": "bar"}) cache.init_app(app1) cache.init_app(app2) # When we have the app context, the prefix should be # different for each app. with app1.app_context(): assert cache.cache.key_prefix == "foo" with app2.app_context(): assert cache.cache.key_prefix == "bar" @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") def test_app_redis_cache_backend_url_default_db(app, redis_server): config = { "CACHE_TYPE": "redis", "CACHE_REDIS_URL": "redis://localhost:6379", } cache = Cache() cache.init_app(app, config=config) from flask_caching.backends.rediscache import RedisCache assert isinstance(app.extensions["cache"][cache], RedisCache) rconn = app.extensions["cache"][ cache ]._write_client.connection_pool.get_connection("foo") assert rconn.db == 0 @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") def test_app_redis_cache_backend_url_custom_db(app, redis_server): config = { "CACHE_TYPE": "redis", "CACHE_REDIS_URL": "redis://localhost:6379/2", } cache = Cache() cache.init_app(app, config=config) rconn = app.extensions["cache"][ cache ]._write_client.connection_pool.get_connection("foo") assert rconn.db == 2 @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") def test_app_redis_cache_backend_url_explicit_db_arg(app, redis_server): config = { "CACHE_TYPE": "redis", "CACHE_REDIS_URL": "redis://localhost:6379", "CACHE_REDIS_DB": 1, } cache = Cache() cache.init_app(app, config=config) rconn = app.extensions["cache"][ cache ]._write_client.connection_pool.get_connection("foo") assert rconn.db == 1 def test_app_custom_cache_backend(app): cache = Cache() app.config["CACHE_TYPE"] = "test_basic_app.newsimple" cache.init_app(app) with app.app_context(): assert isinstance(cache.cache, CustomSimpleCache) def test_subclassed_cache_class(app): # just invoking it here proofs that everything worked when subclassing # otherwise an werkzeug.utils.ImportStringError exception will be raised # because flask-caching can't find the backend # testing for "not raises" looked more hacky like this.. CustomCache(app) flask-caching-1.10.1/tests/test_cache.py000066400000000000000000000214031402442634700201100ustar00rootroot00000000000000# -*- coding: utf-8 -*- import random import time import pytest from flask_caching import Cache try: import redis # noqa HAS_NOT_REDIS = False except ImportError: HAS_NOT_REDIS = True def test_cache_set(app, cache): cache.set("hi", "hello") assert cache.get("hi") == "hello" def test_cache_add(app, cache): cache.add("hi", "hello") assert cache.get("hi") == "hello" cache.add("hi", "foobar") assert cache.get("hi") == "hello" def test_cache_delete(app, cache): cache.set("hi", "hello") cache.delete("hi") assert cache.get("hi") is None def test_cache_delete_many(app, cache): cache.set("hi", "hello") cache.delete_many("ho", "hi") assert cache.get("hi") is not None @pytest.mark.skipif(HAS_NOT_REDIS, reason="requires Redis") def test_cache_unlink(app, redis_server): cache = Cache(config={"CACHE_TYPE": "redis"}) cache.init_app(app) cache.set("biggerkey", "test" * 100) cache.unlink("biggerkey") assert cache.get("biggerkey") is None cache.set("biggerkey1", "test" * 100) cache.set("biggerkey2", "test" * 100) cache.unlink("biggerkey1", "biggerkey2") assert cache.get("biggerkey1") is None assert cache.get("biggerkey2") is None def test_cache_unlink_if_not(app): cache = Cache(config={"CACHE_TYPE": "simple"}) cache.init_app(app) cache.set("biggerkey", "test" * 100) cache.unlink("biggerkey") assert cache.get("biggerkey") is None cache.set("biggerkey1", "test" * 100) cache.set("biggerkey2", "test" * 100) cache.unlink("biggerkey1", "biggerkey2") assert cache.get("biggerkey1") is None assert cache.get("biggerkey2") is None def test_cache_delete_many_ignored(app): cache = Cache(config={"CACHE_TYPE": "simple", "CACHE_IGNORE_ERRORS": True}) cache.init_app(app) cache.set("hi", "hello") assert cache.get("hi") == "hello" cache.delete_many("ho", "hi") assert cache.get("hi") is None def test_cache_cached_function(app, cache): with app.test_request_context(): @cache.cached(1, key_prefix="MyBits") def get_random_bits(): return [random.randrange(0, 2) for i in range(50)] my_list = get_random_bits() his_list = get_random_bits() assert my_list == his_list time.sleep(2) his_list = get_random_bits() assert my_list != his_list def test_cache_cached_function_with_source_check_enabled(app, cache): with app.test_request_context(): @cache.cached(key_prefix="MyBits", source_check=True) def get_random_bits(): return [random.randrange(0, 2) for i in range(50)] first_attempt = get_random_bits() second_attempt = get_random_bits() assert second_attempt == first_attempt # ... change the source to see if the return value changes when called @cache.cached(key_prefix="MyBits", source_check=True) def get_random_bits(): return {"val": [random.randrange(0, 2) for i in range(50)]} third_attempt = get_random_bits() assert third_attempt != first_attempt # We changed the return data type so we do a check to be sure assert isinstance(third_attempt, dict) # ... change the source back to what it was original and the data should # be the same @cache.cached(key_prefix="MyBits", source_check=True) def get_random_bits(): return [random.randrange(0, 2) for i in range(50)] forth_attempt = get_random_bits() assert forth_attempt == first_attempt def test_cache_cached_function_with_source_check_disabled(app, cache): with app.test_request_context(): @cache.cached(key_prefix="MyBits", source_check=False) def get_random_bits(): return [random.randrange(0, 2) for i in range(50)] first_attempt = get_random_bits() second_attempt = get_random_bits() assert second_attempt == first_attempt # ... change the source to see if the return value changes when called @cache.cached(key_prefix="MyBits", source_check=False) def get_random_bits(): return {"val": [random.randrange(0, 2) for i in range(50)]} third_attempt = get_random_bits() assert third_attempt == first_attempt def test_cache_accepts_multiple_ciphers(app, cache, hash_method): with app.test_request_context(): @cache.cached(1, key_prefix="MyBits", hash_method=hash_method) def get_random_bits(): return [random.randrange(0, 2) for i in range(50)] my_list = get_random_bits() his_list = get_random_bits() assert my_list == his_list time.sleep(2) his_list = get_random_bits() assert my_list != his_list def test_cached_none(app, cache): with app.test_request_context(): from collections import Counter call_counter = Counter() @cache.cached(cache_none=True) def cache_none(param): call_counter[param] += 1 return None cache_none(1) assert call_counter[1] == 1 assert cache_none(1) is None assert call_counter[1] == 1 cache.clear() cache_none(1) assert call_counter[1] == 2 def test_cached_doesnt_cache_none(app, cache): """Asserting that when cache_none is False, we always assume a None value returned from .get() means the key is not found """ with app.test_request_context(): from collections import Counter call_counter = Counter() @cache.cached() def cache_none(param): call_counter[param] += 1 return None cache_none(1) # The cached function should have been called assert call_counter[1] == 1 # Next time we call the function, the value should be coming from the cache… # But the value is None and so we treat it as uncached. assert cache_none(1) is None # …thus, the call counter should increment to 2 assert call_counter[1] == 2 cache.clear() cache_none(1) assert call_counter[1] == 3 def test_cache_forced_update(app, cache): from collections import Counter with app.test_request_context(): need_update = False call_counter = Counter() @cache.cached(1, forced_update=lambda: need_update) def cached_function(param): call_counter[param] += 1 return 1 cached_function(1) assert call_counter[1] == 1 assert cached_function(1) == 1 assert call_counter[1] == 1 need_update = True assert cached_function(1) == 1 assert call_counter[1] == 2 def test_cache_forced_update_params(app, cache): from collections import Counter with app.test_request_context(): cached_call_counter = Counter() call_counter = Counter() call_params = {} def need_update(param): """This helper function returns True if it has been called with the same params for more than 2 times """ call_counter[param] += 1 call_params[call_counter[param] - 1] = (param,) return call_counter[param] > 2 @cache.cached(1, forced_update=need_update) def cached_function(param): cached_call_counter[param] += 1 return 1 assert cached_function(1) == 1 # need_update should have been called once assert call_counter[1] == 1 # the parameters used to call need_update should be the same as the # parameters used to call cached_function assert call_params[0] == (1,) # the cached function should have been called once assert cached_call_counter[1] == 1 assert cached_function(1) == 1 # need_update should have been called twice by now as forced_update # should be called regardless of the arguments assert call_counter[1] == 2 # the parameters used to call need_update should be the same as the # parameters used to call cached_function assert call_params[1] == (1,) # this time the forced_update should have returned False, so # cached_function should not have been called again assert cached_call_counter[1] == 1 assert cached_function(1) == 1 # need_update should have been called thrice by now as forced_update # should be called regardless of the arguments assert call_counter[1] == 3 # the parameters used to call need_update should be the same as the # parameters used to call cached_function assert call_params[1] == (1,) # this time the forced_update should have returned True, so # cached_function should have been called again assert cached_call_counter[1] == 2 flask-caching-1.10.1/tests/test_init.py000066400000000000000000000021501402442634700200060ustar00rootroot00000000000000from flask import Flask import pytest from flask_caching import Cache from flask_caching.backends import ( FileSystemCache, MemcachedCache, NullCache, RedisCache, RedisSentinelCache, SASLMemcachedCache, SimpleCache, SpreadSASLMemcachedCache, ) @pytest.fixture def app(): app_ = Flask(__name__) return app_ @pytest.mark.parametrize( "cache_type", ( FileSystemCache, MemcachedCache, NullCache, RedisCache, RedisSentinelCache, SASLMemcachedCache, SimpleCache, SpreadSASLMemcachedCache, ), ) def test_init_nullcache(cache_type, app, tmp_path): extra_config = { FileSystemCache: { "CACHE_DIR": tmp_path, }, SASLMemcachedCache: { "CACHE_MEMCACHED_USERNAME": "test", "CACHE_MEMCACHED_PASSWORD": "test", }, } app.config["CACHE_TYPE"] = "flask_caching.backends." + cache_type.__name__ app.config.update(extra_config.get(cache_type, {})) cache = Cache(app=app) assert isinstance(app.extensions["cache"][cache], cache_type) flask-caching-1.10.1/tests/test_memoize.py000066400000000000000000000546211402442634700205220ustar00rootroot00000000000000# -*- coding: utf-8 -*- import sys import random import time import pytest from flask_caching import Cache, function_namespace def test_memoize(app, cache): with app.test_request_context(): @cache.memoize(3) def big_foo(a, b): return a + b + random.randrange(0, 100000) result = big_foo(5, 2) time.sleep(1) assert big_foo(5, 2) == result result2 = big_foo(5, 3) assert result2 != result time.sleep(3) assert big_foo(5, 2) != result time.sleep(1) assert big_foo(5, 3) != result2 def test_memoize_hashes(app, cache, hash_method): with app.test_request_context(): @cache.memoize(3, hash_method=hash_method) def big_foo(a, b): return a + b + random.randrange(0, 100000) result = big_foo(5, 2) time.sleep(1) assert big_foo(5, 2) == result result2 = big_foo(5, 3) assert result2 != result time.sleep(3) assert big_foo(5, 2) != result time.sleep(1) assert big_foo(5, 3) != result2 def test_memoize_timeout(app): app.config["CACHE_DEFAULT_TIMEOUT"] = 1 cache = Cache(app) with app.test_request_context(): @cache.memoize() def big_foo(a, b): return a + b + random.randrange(0, 100000) result = big_foo(5, 2) assert big_foo(5, 2) == result time.sleep(2) assert big_foo(5, 2) != result def test_memoize_annotated(app, cache): if sys.version_info >= (3, 0): with app.test_request_context(): @cache.memoize(50) def big_foo_annotated(a, b): return a + b + random.randrange(0, 100000) big_foo_annotated.__annotations__ = { "a": int, "b": int, "return": int, } result = big_foo_annotated(5, 2) time.sleep(1) assert big_foo_annotated(5, 2) == result def test_memoize_utf8_arguments(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a, b): return "{}-{}".format(a, b) big_foo("æøå", "chars") def test_memoize_unicode_arguments(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a, b): return u"{}-{}".format(a, b) big_foo(u"æøå", "chars") def test_memoize_delete(app, cache): with app.test_request_context(): @cache.memoize(5) def big_foo(a, b): return a + b + random.randrange(0, 100000) result = big_foo(5, 2) result2 = big_foo(5, 3) time.sleep(1) assert big_foo(5, 2) == result assert big_foo(5, 2) == result assert big_foo(5, 3) != result assert big_foo(5, 3) == result2 cache.delete_memoized(big_foo) assert big_foo(5, 2) != result assert big_foo(5, 3) != result2 def test_memoize_no_timeout_delete(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a, b): return a + b + random.randrange(0, 100000) result_a = big_foo(5, 1) result_b = big_foo(5, 2) assert big_foo(5, 1) == result_a assert big_foo(5, 2) == result_b cache.delete_memoized(big_foo, 5, 2) assert big_foo(5, 1) == result_a assert big_foo(5, 2) != result_b # Cleanup bigfoo 5,1 5,2 or it might conflict with # following run if it also uses memecache cache.delete_memoized(big_foo, 5, 2) cache.delete_memoized(big_foo, 5, 1) def test_memoize_verhash_delete(app, cache): with app.test_request_context(): @cache.memoize(5) def big_foo(a, b): return a + b + random.randrange(0, 100000) result = big_foo(5, 2) result2 = big_foo(5, 3) time.sleep(1) assert big_foo(5, 2) == result assert big_foo(5, 2) == result assert big_foo(5, 3) != result assert big_foo(5, 3) == result2 cache.delete_memoized_verhash(big_foo) _fname, _fname_instance = function_namespace(big_foo) version_key = cache._memvname(_fname) assert cache.get(version_key) is None assert big_foo(5, 2) != result assert big_foo(5, 3) != result2 assert cache.get(version_key) is not None def test_memoize_annotated_delete(app, cache): with app.test_request_context(): @cache.memoize(5) def big_foo_annotated(a, b): return a + b + random.randrange(0, 100000) big_foo_annotated.__annotations__ = {"a": int, "b": int, "return": int} result = big_foo_annotated(5, 2) result2 = big_foo_annotated(5, 3) time.sleep(1) assert big_foo_annotated(5, 2) == result assert big_foo_annotated(5, 2) == result assert big_foo_annotated(5, 3) != result assert big_foo_annotated(5, 3) == result2 cache.delete_memoized_verhash(big_foo_annotated) _fname, _fname_instance = function_namespace(big_foo_annotated) version_key = cache._memvname(_fname) assert cache.get(version_key) is None assert big_foo_annotated(5, 2) != result assert big_foo_annotated(5, 3) != result2 assert cache.get(version_key) is not None def test_memoize_args(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a, b): return sum(a) + sum(b) + random.randrange(0, 100000) result_a = big_foo([5, 3, 2], [1]) result_b = big_foo([3, 3], [3, 1]) assert big_foo([5, 3, 2], [1]) == result_a assert big_foo([3, 3], [3, 1]) == result_b cache.delete_memoized(big_foo, [5, 3, 2], [1]) assert big_foo([5, 3, 2], [1]) != result_a assert big_foo([3, 3], [3, 1]) == result_b # Cleanup bigfoo 5,1 5,2 or it might conflict with # following run if it also uses memecache cache.delete_memoized(big_foo, [5, 3, 2], [1]) cache.delete_memoized(big_foo, [3, 3], [1]) def test_memoize_kwargs(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a, b=None): return a + sum(b.values()) + random.randrange(0, 100000) result_a = big_foo(1, dict(one=1, two=2)) result_b = big_foo(5, dict(three=3, four=4)) assert big_foo(1, dict(one=1, two=2)) == result_a assert big_foo(5, dict(three=3, four=4)) == result_b cache.delete_memoized(big_foo, 1, dict(one=1, two=2)) assert big_foo(1, dict(one=1, two=2)) != result_a assert big_foo(5, dict(three=3, four=4)) == result_b def test_memoize_kwargonly(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a=None): if a is None: a = 0 return a + random.random() result_a = big_foo() result_b = big_foo(5) assert big_foo() == result_a assert big_foo() < 1 assert big_foo(5) == result_b assert big_foo(5) >= 5 and big_foo(5) < 6 def test_memoize_arg_kwarg(app, cache): with app.test_request_context(): @cache.memoize() def f(a, b, c=1): return a + b + c + random.randrange(0, 100000) assert f(1, 2) == f(1, 2, c=1) assert f(1, 2) == f(1, 2, 1) assert f(1, 2) == f(1, 2) assert f(1, 2, 3) != f(1, 2) with pytest.raises(TypeError): f(1) def test_memoize_arg_kwarg_var_keyword(app, cache): with app.test_request_context(): @cache.memoize() def f(a, b, c=1, **kwargs): return ( a + b + c + random.randrange(0, 100000) + sum(list(kwargs.values())) ) assert f(1, 2) == f(1, 2, c=1) assert f(1, 2) == f(1, 2, 1) assert f(1, 2) == f(1, 2) assert f(1, 2, d=5, e=8) == f(1, 2, e=8, d=5) assert f(1, b=2, c=3, d=5, e=8) == f(1, 2, e=8, d=5, b=2, c=3) assert f(1, 2, 3) != f(1, 2) assert f(1, 2, 3) != f(1, 2) with pytest.raises(TypeError): f(1) def test_memoize_classarg(app, cache): @cache.memoize() def bar(a): return a.value + random.random() class Adder(object): def __init__(self, value): self.value = value adder = Adder(15) adder2 = Adder(20) y = bar(adder) z = bar(adder2) assert y != z assert bar(adder) == y assert bar(adder) != z adder.value = 14 assert bar(adder) == y assert bar(adder) != z assert bar(adder) != bar(adder2) assert bar(adder2) == z def test_memoize_classfunc(app, cache): class Adder(object): def __init__(self, initial): self.initial = initial @cache.memoize() def add(self, b): return self.initial + b adder1 = Adder(1) adder2 = Adder(2) x = adder1.add(3) assert adder1.add(3) == x assert adder1.add(4) != x assert adder1.add(3) != adder2.add(3) def test_memoize_classfunc_repr(app, cache): class Adder(object): def __init__(self, initial): self.initial = initial @cache.memoize() def add(self, b): return self.initial + b def __repr__(self): return "42" def __caching_id__(self): return self.initial adder1 = Adder(1) adder2 = Adder(2) x = adder1.add(3) assert adder1.add(3) == x assert adder1.add(4) != x assert adder1.add(3) != adder2.add(3) def test_memoize_classfunc_delete(app, cache): with app.test_request_context(): class Adder(object): def __init__(self, initial): self.initial = initial @cache.memoize() def add(self, b): return self.initial + b + random.random() adder1 = Adder(1) adder2 = Adder(2) a1 = adder1.add(3) a2 = adder2.add(3) assert a1 != a2 assert adder1.add(3) == a1 assert adder2.add(3) == a2 cache.delete_memoized(adder1.add) a3 = adder1.add(3) a4 = adder2.add(3) assert not a1 == a3 # self.assertNotEqual(a1, a3) assert a1 != a3 assert a2 == a4 # self.assertEqual(a2, a4) cache.delete_memoized(Adder.add) a5 = adder1.add(3) a6 = adder2.add(3) assert not a5 == a6 # self.assertNotEqual(a5, a6) assert not a3 == a5 # self.assertNotEqual(a3, a5) assert not a4 == a6 # self.assertNotEqual(a4, a6) def test_memoize_classmethod_delete(app, cache): with app.test_request_context(): class Mock(object): @classmethod @cache.memoize(5) def big_foo(cls, a, b): return a + b + random.randrange(0, 100000) result = Mock.big_foo(5, 2) result2 = Mock.big_foo(5, 3) time.sleep(1) assert Mock.big_foo(5, 2) == result assert Mock.big_foo(5, 2) == result assert Mock.big_foo(5, 3) != result assert Mock.big_foo(5, 3) == result2 cache.delete_memoized(Mock.big_foo) assert Mock.big_foo(5, 2) != result assert Mock.big_foo(5, 3) != result2 def test_memoize_classmethod_delete_with_args(app, cache): with app.test_request_context(): class Mock(object): @classmethod @cache.memoize(5) def big_foo(cls, a, b): return a + b + random.randrange(0, 100000) result = Mock.big_foo(5, 2) result2 = Mock.big_foo(5, 3) time.sleep(1) assert Mock.big_foo(5, 2) == result assert Mock.big_foo(5, 2) == result assert Mock.big_foo(5, 3) != result assert Mock.big_foo(5, 3) == result2 with pytest.raises(ValueError): cache.delete_memoized(Mock.big_foo, 5, 2) assert Mock.big_foo(5, 2) == result assert Mock.big_foo(5, 3) == result2 cache.delete_memoized(Mock.big_foo, Mock, 5, 2) assert Mock.big_foo(5, 2) != result assert Mock.big_foo(5, 3) == result2 def test_memoize_forced_update(app, cache): with app.test_request_context(): forced_update = False @cache.memoize(5, forced_update=lambda: forced_update) def big_foo(a, b): return a + b + random.randrange(0, 100000) result = big_foo(5, 2) time.sleep(1) assert big_foo(5, 2) == result forced_update = True new_result = big_foo(5, 2) assert new_result != result forced_update = False time.sleep(1) assert big_foo(5, 2) == new_result def test_memoize_forced_update_parameters(app, cache): from collections import Counter with app.test_request_context(): call_counter = Counter() call_params = {} forced_update = False def forced_update_func(a, b): call_counter[1] += 1 call_params[call_counter[1] - 1] = (a, b) return forced_update @cache.memoize(5, forced_update=forced_update_func) def memoized_func(a, b): return a + b + random.randrange(0, 100000) # Save the value for later inspection result = memoized_func(5, 2) # forced_update_func should have been called twice; once by memoize # itself, once by _memoize_version… assert call_counter[1] == 2 # …with the values we called the function with assert call_params[0] == (5, 2) assert call_params[1] == (5, 2) time.sleep(1) # Calling the function again should return the cached value assert memoized_func(5, 2) == result # forced_update_func should have been called two more times… assert call_counter[1] == 4 # …with the values we called the function with assert call_params[2] == (5, 2) assert call_params[3] == (5, 2) # Tell forced_update_func to return True next time forced_update = True # Save the new result… new_result = memoized_func(5, 2) # …which, due to the random number in the function, should be different # from the old one assert new_result != result # forced_update_func should have been called two more times again… assert call_counter[1] == 6 # …with the values we called the function with assert call_params[4] == (5, 2) assert call_params[5] == (5, 2) # Now stop forced updating again forced_update = False time.sleep(1) # The function should return the same value as it did last time assert memoized_func(5, 2) == new_result # forced_update_func should have been called two more times again… assert call_counter[1] == 8 # …with the values we called the function with assert call_params[6] == (5, 2) assert call_params[7] == (5, 2) def test_memoize_multiple_arg_kwarg_calls(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a, b, c=[1, 1], d=[1, 1]): return ( sum(a) + sum(b) + sum(c) + sum(d) + random.randrange(0, 100000) ) # noqa result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) assert big_foo([5, 3, 2], [1], d=[3, 3], c=[3, 3]) == result_a assert big_foo(b=[1], a=[5, 3, 2], c=[3, 3], d=[3, 3]) == result_a assert big_foo([5, 3, 2], [1], [3, 3], [3, 3]) == result_a def test_memoize_multiple_arg_kwarg_delete(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a, b, c=[1, 1], d=[1, 1]): return ( sum(a) + sum(b) + sum(c) + sum(d) + random.randrange(0, 100000) ) # noqa result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) cache.delete_memoized(big_foo, [5, 3, 2], [1], [3, 3], [3, 3]) result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) assert result_a != result_b cache.delete_memoized(big_foo, [5, 3, 2], b=[1], c=[3, 3], d=[3, 3]) result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) assert result_a != result_b cache.delete_memoized(big_foo, [5, 3, 2], [1], c=[3, 3], d=[3, 3]) result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) assert result_a != result_b cache.delete_memoized(big_foo, [5, 3, 2], b=[1], c=[3, 3], d=[3, 3]) result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) assert result_a != result_b cache.delete_memoized(big_foo, [5, 3, 2], [1], c=[3, 3], d=[3, 3]) result_b = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) assert result_a != result_b cache.delete_memoized(big_foo, [5, 3, 2], [1], [3, 3], [3, 3]) result_a = big_foo([5, 3, 2], [1], c=[3, 3], d=[3, 3]) assert result_a != result_b def test_memoize_kwargs_to_args(app, cache): with app.test_request_context(): def big_foo(a, b, c=None, d=None): return sum(a) + sum(b) + random.randrange(0, 100000) expected = (1, 2, "foo", "bar") args, kwargs = cache._memoize_kwargs_to_args( big_foo, 1, 2, "foo", "bar" ) assert args == expected args, kwargs = cache._memoize_kwargs_to_args( big_foo, 2, "foo", "bar", a=1 ) assert args == expected args, kwargs = cache._memoize_kwargs_to_args( big_foo, a=1, b=2, c="foo", d="bar" ) assert args == expected args, kwargs = cache._memoize_kwargs_to_args( big_foo, d="bar", b=2, a=1, c="foo" ) assert args == expected args, kwargs = cache._memoize_kwargs_to_args( big_foo, 1, 2, d="bar", c="foo" ) assert args == expected def test_memoize_when_using_args_unpacking(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(*args): return sum(args) + random.randrange(0, 100000) result_a = big_foo(1, 2) result_b = big_foo(1, 3) assert big_foo(1, 2) == result_a assert big_foo(1, 3) == result_b assert big_foo(1, 2) != result_b assert big_foo(1, 3) != result_a cache.delete_memoized(big_foo) assert big_foo(1, 2) != result_a assert big_foo(1, 3) != result_b def test_memoize_when_using_variable_mix_args_unpacking(app, cache): with app.test_request_context(): @cache.memoize() def big_foo(a, b, *args, **kwargs): return ( sum([a, b]) + sum(args) + sum(kwargs.values()) + random.randrange(0, 100000) ) result_a = big_foo(1, 2, 3, 4, x=2, y=5) result_b = big_foo(4, 7, 7, 2, x=1, y=4) assert big_foo(1, 2, 3, 4, x=2, y=5) == result_a assert big_foo(4, 7, 7, 2, x=1, y=4) == result_b assert big_foo(1, 2, 3, 4, x=2, y=5) != result_b assert big_foo(4, 7, 7, 2, x=1, y=4) != result_a cache.delete_memoized(big_foo) assert big_foo(1, 2, 3, 4, x=2, y=5) != result_a assert big_foo(4, 7, 7, 2, x=1, y=4) != result_b def test_memoize_none(app, cache): with app.test_request_context(): from collections import Counter call_counter = Counter() @cache.memoize(cache_none=True) def memoize_none(param): call_counter[param] += 1 return None memoize_none(1) # The memoized function should have been called assert call_counter[1] == 1 # Next time we call the function, the value should be coming from the # cache... assert memoize_none(1) is None # …thus, the call counter should remain 1 assert call_counter[1] == 1 cache.clear() memoize_none(1) assert call_counter[1] == 2 def test_memoize_never_accept_none(app, cache): """Asserting that when cache_none is False, we always assume a None value returned from .get() means the key is not found """ with app.test_request_context(): from collections import Counter call_counter = Counter() @cache.memoize() def memoize_none(param): call_counter[param] += 1 return None memoize_none(1) # The memoized function should have been called assert call_counter[1] == 1 # Next time we call the function, the value should be coming from the # cache… # But the value is None and so we treat it as uncached. assert memoize_none(1) is None # …thus, the call counter should increment to 2 assert call_counter[1] == 2 cache.clear() memoize_none(1) assert call_counter[1] == 3 def test_memoize_with_source_check_enabled(app, cache): with app.test_request_context(): @cache.memoize(source_check=True) def big_foo(a, b): return str(time.time()) first_try = big_foo(5, 2) second_try = big_foo(5, 2) assert second_try == first_try @cache.memoize(source_check=True) def big_foo(a, b): return str(time.time()) third_try = big_foo(5, 2) assert third_try[0] != first_try @cache.memoize(source_check=True) def big_foo(a, b): return str(time.time()) forth_try = big_foo(5, 2) assert forth_try == first_try def test_memoize_with_source_check_disabled(app, cache): with app.test_request_context(): @cache.memoize(source_check=False) def big_foo(a, b): return str(time.time()) first_try = big_foo(5, 2) second_try = big_foo(5, 2) assert second_try == first_try @cache.memoize(source_check=False) def big_foo(a, b): return time.time() third_try = big_foo(5, 2) assert third_try == first_try def test_memoize_ignore_args(app, cache): with app.test_request_context(): @cache.memoize(50, args_to_ignore=["b"]) def big_foo(a, b): return a + b + random.randrange(0, 100000) result = big_foo(5, 2) assert big_foo(5, 3) == result def test_memoize_method_ignore_self_arg(app, cache): with app.test_request_context(): class Foo(object): @cache.memoize(50, args_to_ignore=["self"]) def big_foo(self, a, b): return a + b + random.randrange(0, 100000) assert Foo().big_foo(5, 2) == Foo().big_foo(5, 2) flask-caching-1.10.1/tests/test_template.html000066400000000000000000000003621402442634700211750ustar00rootroot00000000000000{% cache 60, "fragment1" %}{{somevar}}{% endcache %} {% cache 60, "fragment1", "key1" %}{{somevar}}{% endcache %} {% cache 60, "fragment1", "key1", somevar %}{{somevar}}{% endcache %} {% cache timeout, "fragment2" %}{{somevar}}{% endcache %} flask-caching-1.10.1/tests/test_templates.py000066400000000000000000000036321402442634700210470ustar00rootroot00000000000000# -*- coding: utf-8 -*- import string import random from flask import render_template, render_template_string from flask_caching import make_template_fragment_key def test_jinjaext_cache(app, cache): somevar = "".join([random.choice(string.ascii_letters) for x in range(6)]) testkeys = [ make_template_fragment_key("fragment1"), make_template_fragment_key("fragment1", vary_on=["key1"]), make_template_fragment_key("fragment1", vary_on=["key1", somevar]), ] delkey = make_template_fragment_key("fragment2") with app.test_request_context(): #: Test if elements are cached render_template("test_template.html", somevar=somevar, timeout=60) for k in testkeys: assert cache.get(k) == somevar assert cache.get(delkey) == somevar #: Test timeout=del to delete key render_template("test_template.html", somevar=somevar, timeout="del") for k in testkeys: assert cache.get(k) == somevar assert cache.get(delkey) is None #: Test rendering templates from strings output = render_template_string( """{% cache 60, "fragment3" %}{{somevar}}{% endcache %}""", somevar=somevar, ) assert cache.get(make_template_fragment_key("fragment3")) == somevar assert output == somevar #: Test backwards compatibility output = render_template_string( """{% cache 30 %}{{somevar}}{% endcache %}""", somevar=somevar ) assert cache.get(make_template_fragment_key("None1")) == somevar assert output == somevar output = render_template_string( """{% cache 30, "fragment4", "fragment5"%}{{somevar}}{% endcache %}""", somevar=somevar, ) k = make_template_fragment_key("fragment4", vary_on=["fragment5"]) assert cache.get(k) == somevar assert output == somevar flask-caching-1.10.1/tests/test_view.py000066400000000000000000000346611402442634700200310ustar00rootroot00000000000000# -*- coding: utf-8 -*- import hashlib import time from flask import request def test_cached_view(app, cache): @app.route("/") @cache.cached(2) def cached_view(): return str(time.time()) tc = app.test_client() rv = tc.get("/") the_time = rv.data.decode("utf-8") time.sleep(1) rv = tc.get("/") assert the_time == rv.data.decode("utf-8") time.sleep(1) rv = tc.get("/") assert the_time != rv.data.decode("utf-8") def test_cached_view_unless(app, cache): @app.route("/a") @cache.cached(5, unless=lambda: True) def non_cached_view(): return str(time.time()) @app.route("/b") @cache.cached(5, unless=lambda: False) def cached_view(): return str(time.time()) tc = app.test_client() rv = tc.get("/a") the_time = rv.data.decode("utf-8") time.sleep(1) rv = tc.get("/a") assert the_time != rv.data.decode("utf-8") rv = tc.get("/b") the_time = rv.data.decode("utf-8") time.sleep(1) rv = tc.get("/b") assert the_time == rv.data.decode("utf-8") def test_cached_view_response_filter(app, cache): @app.route("/a") @cache.cached(5, response_filter=lambda x: x[1] < 400) def cached_view(): return (str(time.time()), app.return_code) tc = app.test_client() # 500 response does not cache app.return_code = 500 rv = tc.get("/a") the_time = rv.data.decode("utf-8") time.sleep(1) rv = tc.get("/a") assert the_time != rv.data.decode("utf-8") # 200 response caches app.return_code = 200 rv = tc.get("/a") the_time = rv.data.decode("utf-8") time.sleep(1) rv = tc.get("/a") assert the_time == rv.data.decode("utf-8") def test_cached_view_forced_update(app, cache): forced_update = False @app.route("/a") @cache.cached(5, forced_update=lambda: forced_update) def view(): return str(time.time()) tc = app.test_client() rv = tc.get("/a") the_time = rv.data.decode("utf-8") time.sleep(1) rv = tc.get("/a") assert the_time == rv.data.decode("utf-8") forced_update = True rv = tc.get("/a") new_time = rv.data.decode("utf-8") assert new_time != the_time forced_update = False time.sleep(1) rv = tc.get("/a") assert new_time == rv.data.decode("utf-8") def test_generate_cache_key_from_different_view(app, cache): @app.route("/cake/") @cache.cached() def view_cake(flavor): # What's the cache key for apple cake? thanks for making me hungry view_cake.cake_cache_key = view_cake.make_cache_key("apple") # print view_cake.cake_cache_key return str(time.time()) view_cake.cake_cache_key = "" @app.route("/pie/") @cache.cached() def view_pie(flavor): # What's the cache key for apple cake? view_pie.cake_cache_key = view_cake.make_cache_key("apple") # print view_pie.cake_cache_key return str(time.time()) view_pie.cake_cache_key = "" tc = app.test_client() rv1 = tc.get("/cake/chocolate") rv2 = tc.get("/pie/chocolate") # print view_cake.cake_cache_key # print view_pie.cake_cache_key assert view_cake.cake_cache_key == view_pie.cake_cache_key # rename/move to seperate module? def test_cache_key_property(app, cache): @app.route("/") @cache.cached(5) def cached_view(): return str(time.time()) assert hasattr(cached_view, "make_cache_key") assert callable(cached_view.make_cache_key) tc = app.test_client() rv = tc.get("/") the_time = rv.data.decode("utf-8") with app.test_request_context(): cache_data = cache.get(cached_view.make_cache_key()) assert the_time == cache_data def test_make_cache_key_function_property(app, cache): @app.route("//") @cache.memoize(5) def cached_view(foo, bar): return str(time.time()) assert hasattr(cached_view, "make_cache_key") assert callable(cached_view.make_cache_key) tc = app.test_client() rv = tc.get("/a/b") the_time = rv.data.decode("utf-8") cache_key = cached_view.make_cache_key( cached_view.uncached, foo=u"a", bar=u"b" ) cache_data = cache.get(cache_key) assert the_time == cache_data different_key = cached_view.make_cache_key( cached_view.uncached, foo=u"b", bar=u"a" ) different_data = cache.get(different_key) assert the_time != different_data def test_cache_timeout_property(app, cache): @app.route("/") @cache.memoize(2) def cached_view1(): return str(time.time()) @app.route("//") @cache.memoize(4) def cached_view2(foo, bar): return str(time.time()) assert hasattr(cached_view1, "cache_timeout") assert hasattr(cached_view2, "cache_timeout") assert cached_view1.cache_timeout == 2 assert cached_view2.cache_timeout == 4 # test that this is a read-write property cached_view1.cache_timeout = 5 cached_view2.cache_timeout = 7 assert cached_view1.cache_timeout == 5 assert cached_view2.cache_timeout == 7 tc = app.test_client() rv1 = tc.get("/") time1 = rv1.data.decode("utf-8") time.sleep(1) rv2 = tc.get("/a/b") time2 = rv2.data.decode("utf-8") # VIEW1 # it's been 1 second, cache is still active assert time1 == tc.get("/").data.decode("utf-8") time.sleep(5) # it's been >5 seconds, cache is not still active assert time1 != tc.get("/").data.decode("utf-8") # VIEW2 # it's been >17 seconds, cache is still active # self.assertEqual(time2, tc.get('/a/b').data.decode('utf-8')) assert time2 == tc.get("/a/b").data.decode("utf-8") time.sleep(3) # it's been >7 seconds, cache is not still active assert time2 != tc.get("/a/b").data.decode("utf-8") def test_generate_cache_key_from_query_string(app, cache): """Test the _make_cache_key_query_string() cache key maker. Create three requests to verify that the same query string parameters (key/value) always reference the same cache, regardless of the order of parameters. Also test to make sure that the same cache isn't being used for any/all query string parameters. For example, these two requests should yield the same cache/cache key: * GET /v1/works?mock=true&offset=20&limit=15 * GET /v1/works?limit=15&mock=true&offset=20 Caching functionality is verified by a `@cached` route `/works` which produces a time in its response. The time in the response can verify that two requests with the same query string parameters/values, though differently ordered, produce responses with the same time. """ @app.route("/works") @cache.cached(query_string=True) def view_works(): return str(time.time()) tc = app.test_client() # Make our first query... first_response = tc.get("/works?mock=true&offset=20&limit=15") first_time = first_response.get_data(as_text=True) # Make the second query... second_response = tc.get("/works?limit=15&mock=true&offset=20") second_time = second_response.get_data(as_text=True) # Now make sure the time for the first and second # query are the same! assert second_time == first_time # Last/third query with different parameters/values should # produce a different time. third_response = tc.get("/v1/works?limit=20&mock=true&offset=60") third_time = third_response.get_data(as_text=True) # ... making sure that different query parameter values # don't yield the same cache! assert not third_time == second_time def test_generate_cache_key_from_query_string_repeated_paramaters(app, cache): """Test the _make_cache_key_query_string() cache key maker's support for repeated query paramaters URL params can be repeated with different values. Flask's MultiDict supports them """ @app.route("/works") @cache.cached(query_string=True) def view_works(): flatted_values = sum(request.args.listvalues(), []) return str(sorted(flatted_values)) + str(time.time()) tc = app.test_client() # Make our first query... first_response = tc.get( "/works?mock=true&offset=20&limit=15&user[]=123&user[]=124" ) first_time = first_response.get_data(as_text=True) # Make the second query... second_response = tc.get( "/works?mock=true&offset=20&limit=15&user[]=124&user[]=123" ) second_time = second_response.get_data(as_text=True) # Now make sure the time for the first and second # query are the same! assert second_time == first_time # Last/third query with different parameters/values should # produce a different time. third_response = tc.get( "/works?mock=true&offset=20&limit=15&user[]=125&user[]=124" ) third_time = third_response.get_data(as_text=True) # ... making sure that different query parameter values # don't yield the same cache! assert not third_time == second_time def test_generate_cache_key_from_request_body(app, cache): """Test a user supplied cache key maker. Create three requests to verify that the same request body always reference the same cache Also test to make sure that the same cache isn't being used for any/all query string parameters. Caching functionality is verified by a `@cached` route `/works` which produces a time in its response. The time in the response can verify that two requests with the same request body produce responses with the same time. """ def _make_cache_key_request_body(argument): """Create keys based on request body.""" # now hash the request body so it can be # used as a key for cache. request_body = request.get_data(as_text=False) hashed_body = str(hashlib.md5(request_body).hexdigest()) cache_key = request.path + hashed_body return cache_key @app.route("/works/", methods=["POST"]) @cache.cached(make_cache_key=_make_cache_key_request_body) def view_works(argument): return str(time.time()) + request.get_data().decode() tc = app.test_client() # Make our request... first_response = tc.post( "/works/arg", data=dict(mock=True, value=1, test=2) ) first_time = first_response.get_data(as_text=True) # Make the request... second_response = tc.post( "/works/arg", data=dict(mock=True, value=1, test=2) ) second_time = second_response.get_data(as_text=True) # Now make sure the time for the first and second # requests are the same! assert second_time == first_time # Last/third request with different body should # produce a different time. third_response = tc.post( "/works/arg", data=dict(mock=True, value=2, test=3) ) third_time = third_response.get_data(as_text=True) # ... making sure that different request bodies # don't yield the same cache! assert not third_time == second_time def test_cache_with_query_string_and_source_check_enabled(app, cache): """Test the _make_cache_key_query_string() cache key maker with source_check set to True to include the view's function's source code as part of the cache hash key. """ @cache.cached(query_string=True, source_check=True) def view_works(): return str(time.time()) app.add_url_rule("/works", "works", view_works) tc = app.test_client() # Make our first query... first_response = tc.get("/works?mock=true&offset=20&limit=15") first_time = first_response.get_data(as_text=True) # Make our second query... second_response = tc.get("/works?mock=true&offset=20&limit=15") second_time = second_response.get_data(as_text=True) # The cache should yield the same data first and second time assert first_time == second_time # Change the source of the function attached to the view @cache.cached(query_string=True, source_check=True) def view_works(): return str(time.time()) # ... and we overide the function attached to the view app.view_functions["works"] = view_works tc = app.test_client() # Make the second query... third_response = tc.get("/works?mock=true&offset=20&limit=15") third_time = third_response.get_data(as_text=True) # Now make sure the time for the first and third # responses are not the same i.e. cached is not used! assert third_time[0] != first_time # Change the source of the function to what it was originally @cache.cached(query_string=True, source_check=True) def view_works(): return str(time.time()) app.view_functions["works"] = view_works tc = app.test_client() # Last/third query with different parameters/values should # produce a different time. forth_response = tc.get("/works?mock=true&offset=20&limit=15") forth_time = forth_response.get_data(as_text=True) # ... making sure that the first value and the forth value are the same # since the source is the same assert forth_time == first_time def test_cache_with_query_string_and_source_check_disabled(app, cache): """Test the _make_cache_key_query_string() cache key maker with source_check set to False to exclude the view's function's source code as part of the cache hash key and to see if changing the source changes the data. """ @cache.cached(query_string=True, source_check=False) def view_works(): return str(time.time()) app.add_url_rule("/works", "works", view_works) tc = app.test_client() # Make our first query... first_response = tc.get("/works?mock=true&offset=20&limit=15") first_time = first_response.get_data(as_text=True) # Make our second query... second_response = tc.get("/works?mock=true&offset=20&limit=15") second_time = second_response.get_data(as_text=True) # The cache should yield the same data first and second time assert first_time == second_time # Change the source of the function attached to the view @cache.cached(query_string=True, source_check=False) def view_works(): return str(time.time()) # ... and we overide the function attached to the view app.view_functions["works"] = view_works tc = app.test_client() # Make the second query... third_response = tc.get("/works?mock=true&offset=20&limit=15") third_time = third_response.get_data(as_text=True) # Now make sure the time for the first and third responses are the same # i.e. cached is used since cache will not check for source changes! assert third_time == first_time flask-caching-1.10.1/tox.ini000066400000000000000000000005771402442634700156160ustar00rootroot00000000000000# Tox (http://tox.testrun.org/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. [tox] envlist = py36, py37, py38, py39 [testenv] deps = -r{toxinidir}/requirements.txt pytest-xprocess commands = pytest {posargs}