pax_global_header00006660000000000000000000000064141634617000014514gustar00rootroot0000000000000052 comment=1cb1425b1ba24f26fb1e37349c4c2658c2a46d8f python-diskcache-5.4.0/000077500000000000000000000000001416346170000147575ustar00rootroot00000000000000python-diskcache-5.4.0/.github/000077500000000000000000000000001416346170000163175ustar00rootroot00000000000000python-diskcache-5.4.0/.github/workflows/000077500000000000000000000000001416346170000203545ustar00rootroot00000000000000python-diskcache-5.4.0/.github/workflows/integration.yml000066400000000000000000000021751416346170000234270ustar00rootroot00000000000000name: integration on: [push, pull_request] jobs: checks: runs-on: ubuntu-latest strategy: max-parallel: 8 matrix: check: [bluecheck, doc8, docs, flake8, isortcheck, mypy, pylint, rstcheck] steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: '3.10' - name: Install dependencies run: | pip install --upgrade pip pip install tox - name: Run checks with tox run: | tox -e ${{ matrix.check }} tests: needs: checks runs-on: ${{ matrix.os }} strategy: max-parallel: 8 matrix: os: [ubuntu-latest, macos-latest, windows-latest] python-version: [3.6, 3.7, 3.8, 3.9, '3.10'] steps: - name: Set up Python ${{ matrix.python-version }} x64 uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} architecture: x64 - uses: actions/checkout@v2 - name: Install tox run: | pip install --upgrade pip pip install tox - name: Test with tox run: tox -e py python-diskcache-5.4.0/.github/workflows/release.yml000066400000000000000000000015041416346170000225170ustar00rootroot00000000000000name: release on: push: tags: - v* jobs: upload: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install libmemcached-dev run: | sudo apt-get update sudo apt-get install libmemcached-dev - name: Set up Python uses: actions/setup-python@v2 with: python-version: '3.10' - name: Install dependencies run: | pip install --upgrade pip pip install -r requirements.txt - name: Create source dist run: python setup.py sdist - name: Create wheel dist run: python setup.py bdist_wheel - name: Upload with twine env: TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} run: | ls -l dist/* twine upload dist/* python-diskcache-5.4.0/.gitignore000066400000000000000000000003701416346170000167470ustar00rootroot00000000000000# Python byte-code *.py[co] # virutalenv directories /env*/ /.venv*/ # test files/directories /.cache/ .coverage* .pytest_cache/ /.tox/ # setup and upload directories /build/ /dist/ /diskcache.egg-info/ /docs/_build/ # macOS metadata .DS_Store python-diskcache-5.4.0/.pylintrc000066400000000000000000000434411416346170000166320ustar00rootroot00000000000000[MASTER] # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. extension-pkg-whitelist= # Specify a score threshold to be exceeded before program exits with error. fail-under=10.0 # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS # Add files or directories matching the regex patterns to the blacklist. The # regex matches against base names, not paths. ignore-patterns= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use. jobs=1 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or # complex, nested conditions. limit-inference-results=100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. confidence= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". disable=print-statement, parameter-unpacking, unpacking-in-except, old-raise-syntax, backtick, long-suffix, old-ne-operator, old-octal-literal, import-star-module-level, non-ascii-bytes-literal, raw-checker-failed, bad-inline-option, locally-disabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, use-symbolic-message-instead, apply-builtin, basestring-builtin, buffer-builtin, cmp-builtin, coerce-builtin, execfile-builtin, file-builtin, long-builtin, raw_input-builtin, reduce-builtin, standarderror-builtin, unicode-builtin, xrange-builtin, coerce-method, delslice-method, getslice-method, setslice-method, no-absolute-import, old-division, dict-iter-method, dict-view-method, next-method-called, metaclass-assignment, indexing-exception, raising-string, reload-builtin, oct-method, hex-method, nonzero-method, cmp-method, input-builtin, round-builtin, intern-builtin, unichr-builtin, map-builtin-not-iterating, zip-builtin-not-iterating, range-builtin-not-iterating, filter-builtin-not-iterating, using-cmp-argument, eq-without-hash, div-method, idiv-method, rdiv-method, exception-message-attribute, invalid-str-codec, sys-max-int, bad-python3-import, deprecated-string-function, deprecated-str-translate-call, deprecated-itertools-function, deprecated-types-field, next-method-defined, dict-items-not-iterating, dict-keys-not-iterating, dict-values-not-iterating, deprecated-operator-function, deprecated-urllib-function, xreadlines-attribute, deprecated-sys-function, exception-escape, comprehension-escape, no-member, no-else-return, duplicate-code, inconsistent-return-statements, consider-using-f-string, # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable=c-extension-no-member [REPORTS] # Python expression which should return a score less than or equal to 10. You # have access to the variables 'error', 'warning', 'refactor', and 'convention' # which contain the number of messages in each category, as well as 'statement' # which is the total number of statements analyzed. This score is used by the # global evaluation report (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details. #msg-template= # Set the output format. Available formats are text, parseable, colorized, json # and msvs (visual studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. output-format=text # Tells whether to display a full report or only the messages. reports=no # Activate the evaluation score. score=yes [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then # it will be considered as an explicit return statement and no message will be # printed. never-returning-functions=sys.exit [LOGGING] # The type of string formatting that logging methods do. `old` means using % # formatting, `new` is for `{}` formatting. logging-format-style=old # Logging modules to check that the string format arguments are in logging # function parameter format. logging-modules=logging [SPELLING] # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 # Spelling dictionary name. Available dictionaries: none. To make it work, # install the python-enchant package. spelling-dict= # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to the private dictionary (see the # --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME, XXX, TODO # Regular expression of note tags to take in consideration. #notes-rgx= [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis). It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 # List of decorators that change the signature of a decorated function. signature-mutators= [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid defining new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, _cb # A regular expression matching the name of dummy variables (i.e. expected to # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. Default to name # with leading underscore. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Maximum number of characters on a single line. max-line-length=100 # Maximum number of lines in a module. max-module-lines=3000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [SIMILARITIES] # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=yes # Minimum lines number of a similarity. min-similarity-lines=4 [BASIC] # Naming style matching correct argument names. argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- # naming-style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- # style. #attr-rgx= # Bad variable names which should always be refused, separated by a comma. bad-names=foo, bar, baz, toto, tutu, tata # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused bad-names-rgxs= # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. #class-attribute-rgx= # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- # style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- # style. #const-rgx= # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Naming style matching correct function names. function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- # naming-style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. good-names=i, j, k, ex, Run, _ # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted good-names-rgxs= # Include a hint for the correct naming format with invalid-name. include-naming-hint=no # Naming style matching correct inline iteration names. inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- # style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- # style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- # naming-style. #variable-rgx= [STRING] # This flag controls whether inconsistent-quotes generates a warning when the # character used as a quote delimiter is used inconsistently within a module. check-quote-consistency=no # This flag controls whether the implicit-str-concat should generate a warning # on implicit string concatenation in sequences defined over several lines. check-str-concat-over-line-jumps=no [IMPORTS] # List of modules that can be imported at any level, not just the top level # one. allow-any-import-level= # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Deprecated modules which should not be used, separated by a comma. deprecated-modules=optparse,tkinter.tix # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled). ext-import-graph= # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled). import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled). int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant # Couples of modules and preferred modules, separated by a comma. preferred-modules= [CLASSES] # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict, _fields, _replace, _source, _make # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=cls [DESIGN] # Maximum number of arguments for function / method. max-args=8 # Maximum number of attributes for a class (see R0902). max-attributes=8 # Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. max-branches=20 # Maximum number of locals for function / method body. max-locals=30 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=30 # Maximum number of return / yield for function / method body. max-returns=8 # Maximum number of statements in function / method body. max-statements=60 # Minimum number of public methods for a class (see R0903). min-public-methods=2 [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "BaseException, Exception". overgeneral-exceptions=BaseException, Exception python-diskcache-5.4.0/LICENSE000066400000000000000000000010571416346170000157670ustar00rootroot00000000000000Copyright 2016-2022 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. python-diskcache-5.4.0/MANIFEST.in000066400000000000000000000000331416346170000165110ustar00rootroot00000000000000include README.rst LICENSE python-diskcache-5.4.0/README.rst000066400000000000000000000455341416346170000164610ustar00rootroot00000000000000DiskCache: Disk Backed Cache ============================ `DiskCache`_ is an Apache2 licensed disk and file backed cache library, written in pure-Python, and compatible with Django. The cloud-based computing of 2021 puts a premium on memory. Gigabytes of empty space is left on disks as processes vie for memory. Among these processes is Memcached (and sometimes Redis) which is used as a cache. Wouldn't it be nice to leverage empty disk space for caching? Django is Python's most popular web framework and ships with several caching backends. Unfortunately the file-based cache in Django is essentially broken. The culling method is random and large caches repeatedly scan a cache directory which slows linearly with growth. Can you really allow it to take sixty milliseconds to store a key in a cache with a thousand items? In Python, we can do better. And we can do it in pure-Python! :: In [1]: import pylibmc In [2]: client = pylibmc.Client(['127.0.0.1'], binary=True) In [3]: client[b'key'] = b'value' In [4]: %timeit client[b'key'] 10000 loops, best of 3: 25.4 µs per loop In [5]: import diskcache as dc In [6]: cache = dc.Cache('tmp') In [7]: cache[b'key'] = b'value' In [8]: %timeit cache[b'key'] 100000 loops, best of 3: 11.8 µs per loop **Note:** Micro-benchmarks have their place but are not a substitute for real measurements. DiskCache offers cache benchmarks to defend its performance claims. Micro-optimizations are avoided but your mileage may vary. DiskCache efficiently makes gigabytes of storage space available for caching. By leveraging rock-solid database libraries and memory-mapped files, cache performance can match and exceed industry-standard solutions. There's no need for a C compiler or running another process. Performance is a feature and testing has 100% coverage with unit tests and hours of stress. Testimonials ------------ `Daren Hasenkamp`_, Founder -- "It's a useful, simple API, just like I love about Redis. It has reduced the amount of queries hitting my Elasticsearch cluster by over 25% for a website that gets over a million users/day (100+ hits/second)." `Mathias Petermann`_, Senior Linux System Engineer -- "I implemented it into a wrapper for our Ansible lookup modules and we were able to speed up some Ansible runs by almost 3 times. DiskCache is saving us a ton of time." Does your company or website use `DiskCache`_? Send us a `message `_ and let us know. .. _`Daren Hasenkamp`: https://www.linkedin.com/in/daren-hasenkamp-93006438/ .. _`Mathias Petermann`: https://www.linkedin.com/in/mathias-petermann-a8aa273b/ Features -------- - Pure-Python - Fully Documented - Benchmark comparisons (alternatives, Django cache backends) - 100% test coverage - Hours of stress testing - Performance matters - Django compatible API - Thread-safe and process-safe - Supports multiple eviction policies (LRU and LFU included) - Keys support "tag" metadata and eviction - Developed on Python 3.10 - Tested on CPython 3.6, 3.7, 3.8, 3.9, 3.10 - Tested on Linux, Mac OS X, and Windows - Tested using GitHub Actions .. image:: https://github.com/grantjenks/python-diskcache/workflows/integration/badge.svg :target: https://github.com/grantjenks/python-diskcache/actions?query=workflow%3Aintegration .. image:: https://github.com/grantjenks/python-diskcache/workflows/release/badge.svg :target: https://github.com/grantjenks/python-diskcache/actions?query=workflow%3Arelease Quickstart ---------- Installing `DiskCache`_ is simple with `pip `_:: $ pip install diskcache You can access documentation in the interpreter with Python's built-in help function:: >>> import diskcache >>> help(diskcache) # doctest: +SKIP The core of `DiskCache`_ is three data types intended for caching. `Cache`_ objects manage a SQLite database and filesystem directory to store key and value pairs. `FanoutCache`_ provides a sharding layer to utilize multiple caches and `DjangoCache`_ integrates that with `Django`_:: >>> from diskcache import Cache, FanoutCache, DjangoCache >>> help(Cache) # doctest: +SKIP >>> help(FanoutCache) # doctest: +SKIP >>> help(DjangoCache) # doctest: +SKIP Built atop the caching data types, are `Deque`_ and `Index`_ which work as a cross-process, persistent replacements for Python's ``collections.deque`` and ``dict``. These implement the sequence and mapping container base classes:: >>> from diskcache import Deque, Index >>> help(Deque) # doctest: +SKIP >>> help(Index) # doctest: +SKIP Finally, a number of `recipes`_ for cross-process synchronization are provided using an underlying cache. Features like memoization with cache stampede prevention, cross-process locking, and cross-process throttling are available:: >>> from diskcache import memoize_stampede, Lock, throttle >>> help(memoize_stampede) # doctest: +SKIP >>> help(Lock) # doctest: +SKIP >>> help(throttle) # doctest: +SKIP Python's docstrings are a quick way to get started but not intended as a replacement for the `DiskCache Tutorial`_ and `DiskCache API Reference`_. .. _`Cache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#cache .. _`FanoutCache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#fanoutcache .. _`DjangoCache`: http://www.grantjenks.com/docs/diskcache/tutorial.html#djangocache .. _`Django`: https://www.djangoproject.com/ .. _`Deque`: http://www.grantjenks.com/docs/diskcache/tutorial.html#deque .. _`Index`: http://www.grantjenks.com/docs/diskcache/tutorial.html#index .. _`recipes`: http://www.grantjenks.com/docs/diskcache/tutorial.html#recipes User Guide ---------- For those wanting more details, this part of the documentation describes tutorial, benchmarks, API, and development. * `DiskCache Tutorial`_ * `DiskCache Cache Benchmarks`_ * `DiskCache DjangoCache Benchmarks`_ * `Case Study: Web Crawler`_ * `Case Study: Landing Page Caching`_ * `Talk: All Things Cached - SF Python 2017 Meetup`_ * `DiskCache API Reference`_ * `DiskCache Development`_ .. _`DiskCache Tutorial`: http://www.grantjenks.com/docs/diskcache/tutorial.html .. _`DiskCache Cache Benchmarks`: http://www.grantjenks.com/docs/diskcache/cache-benchmarks.html .. _`DiskCache DjangoCache Benchmarks`: http://www.grantjenks.com/docs/diskcache/djangocache-benchmarks.html .. _`Talk: All Things Cached - SF Python 2017 Meetup`: http://www.grantjenks.com/docs/diskcache/sf-python-2017-meetup-talk.html .. _`Case Study: Web Crawler`: http://www.grantjenks.com/docs/diskcache/case-study-web-crawler.html .. _`Case Study: Landing Page Caching`: http://www.grantjenks.com/docs/diskcache/case-study-landing-page-caching.html .. _`DiskCache API Reference`: http://www.grantjenks.com/docs/diskcache/api.html .. _`DiskCache Development`: http://www.grantjenks.com/docs/diskcache/development.html Comparisons ----------- Comparisons to popular projects related to `DiskCache`_. Key-Value Stores ................ `DiskCache`_ is mostly a simple key-value store. Feature comparisons with four other projects are shown in the tables below. * `dbm`_ is part of Python's standard library and implements a generic interface to variants of the DBM database — dbm.gnu or dbm.ndbm. If none of these modules is installed, the slow-but-simple dbm.dumb is used. * `shelve`_ is part of Python's standard library and implements a “shelf” as a persistent, dictionary-like object. The difference with “dbm” databases is that the values can be anything that the pickle module can handle. * `sqlitedict`_ is a lightweight wrapper around Python's sqlite3 database with a simple, Pythonic dict-like interface and support for multi-thread access. Keys are arbitrary strings, values arbitrary pickle-able objects. * `pickleDB`_ is a lightweight and simple key-value store. It is built upon Python's simplejson module and was inspired by Redis. It is licensed with the BSD three-clause license. .. _`dbm`: https://docs.python.org/3/library/dbm.html .. _`shelve`: https://docs.python.org/3/library/shelve.html .. _`sqlitedict`: https://github.com/RaRe-Technologies/sqlitedict .. _`pickleDB`: https://pythonhosted.org/pickleDB/ **Features** ================ ============= ========= ========= ============ ============ Feature diskcache dbm shelve sqlitedict pickleDB ================ ============= ========= ========= ============ ============ Atomic? Always Maybe Maybe Maybe No Persistent? Yes Yes Yes Yes Yes Thread-safe? Yes No No Yes No Process-safe? Yes No No Maybe No Backend? SQLite DBM DBM SQLite File Serialization? Customizable None Pickle Customizable JSON Data Types? Mapping/Deque Mapping Mapping Mapping Mapping Ordering? Insert/Sorted None None None None Eviction? LRU/LFU/more None None None None Vacuum? Automatic Maybe Maybe Manual Automatic Transactions? Yes No No Maybe No Multiprocessing? Yes No No No No Forkable? Yes No No No No Metadata? Yes No No No No ================ ============= ========= ========= ============ ============ **Quality** ================ ============= ========= ========= ============ ============ Project diskcache dbm shelve sqlitedict pickleDB ================ ============= ========= ========= ============ ============ Tests? Yes Yes Yes Yes Yes Coverage? Yes Yes Yes Yes No Stress? Yes No No No No CI Tests? Linux/Windows Yes Yes Linux No Python? 2/3/PyPy All All 2/3 2/3 License? Apache2 Python Python Apache2 3-Clause BSD Docs? Extensive Summary Summary Readme Summary Benchmarks? Yes No No No No Sources? GitHub GitHub GitHub GitHub GitHub Pure-Python? Yes Yes Yes Yes Yes Server? No No No No No Integrations? Django None None None None ================ ============= ========= ========= ============ ============ **Timings** These are rough measurements. See `DiskCache Cache Benchmarks`_ for more rigorous data. ================ ============= ========= ========= ============ ============ Project diskcache dbm shelve sqlitedict pickleDB ================ ============= ========= ========= ============ ============ get 25 µs 36 µs 41 µs 513 µs 92 µs set 198 µs 900 µs 928 µs 697 µs 1,020 µs delete 248 µs 740 µs 702 µs 1,717 µs 1,020 µs ================ ============= ========= ========= ============ ============ Caching Libraries ................. * `joblib.Memory`_ provides caching functions and works by explicitly saving the inputs and outputs to files. It is designed to work with non-hashable and potentially large input and output data types such as numpy arrays. * `klepto`_ extends Python’s `lru_cache` to utilize different keymaps and alternate caching algorithms, such as `lfu_cache` and `mru_cache`. Klepto uses a simple dictionary-sytle interface for all caches and archives. .. _`klepto`: https://pypi.org/project/klepto/ .. _`joblib.Memory`: https://joblib.readthedocs.io/en/latest/memory.html Data Structures ............... * `dict`_ is a mapping object that maps hashable keys to arbitrary values. Mappings are mutable objects. There is currently only one standard Python mapping type, the dictionary. * `pandas`_ is a Python package providing fast, flexible, and expressive data structures designed to make working with “relational” or “labeled” data both easy and intuitive. * `Sorted Containers`_ is an Apache2 licensed sorted collections library, written in pure-Python, and fast as C-extensions. Sorted Containers implements sorted list, sorted dictionary, and sorted set data types. .. _`dict`: https://docs.python.org/3/library/stdtypes.html#typesmapping .. _`pandas`: https://pandas.pydata.org/ .. _`Sorted Containers`: http://www.grantjenks.com/docs/sortedcontainers/ Pure-Python Databases ..................... * `ZODB`_ supports an isomorphic interface for database operations which means there's little impact on your code to make objects persistent and there's no database mapper that partially hides the datbase. * `CodernityDB`_ is an open source, pure-Python, multi-platform, schema-less, NoSQL database and includes an HTTP server version, and a Python client library that aims to be 100% compatible with the embedded version. * `TinyDB`_ is a tiny, document oriented database optimized for your happiness. If you need a simple database with a clean API that just works without lots of configuration, TinyDB might be the right choice for you. .. _`ZODB`: http://www.zodb.org/ .. _`CodernityDB`: https://pypi.org/project/CodernityDB/ .. _`TinyDB`: https://tinydb.readthedocs.io/ Object Relational Mappings (ORM) ................................ * `Django ORM`_ provides models that are the single, definitive source of information about data and contains the essential fields and behaviors of the stored data. Generally, each model maps to a single SQL database table. * `SQLAlchemy`_ is the Python SQL toolkit and Object Relational Mapper that gives application developers the full power and flexibility of SQL. It provides a full suite of well known enterprise-level persistence patterns. * `Peewee`_ is a simple and small ORM. It has few (but expressive) concepts, making it easy to learn and intuitive to use. Peewee supports Sqlite, MySQL, and PostgreSQL with tons of extensions. * `SQLObject`_ is a popular Object Relational Manager for providing an object interface to your database, with tables as classes, rows as instances, and columns as attributes. * `Pony ORM`_ is a Python ORM with beautiful query syntax. Use Python syntax for interacting with the database. Pony translates such queries into SQL and executes them in the database in the most efficient way. .. _`Django ORM`: https://docs.djangoproject.com/en/dev/topics/db/ .. _`SQLAlchemy`: https://www.sqlalchemy.org/ .. _`Peewee`: http://docs.peewee-orm.com/ .. _`SQLObject`: http://sqlobject.org/ .. _`Pony ORM`: https://ponyorm.com/ SQL Databases ............. * `SQLite`_ is part of Python's standard library and provides a lightweight disk-based database that doesn’t require a separate server process and allows accessing the database using a nonstandard variant of the SQL query language. * `MySQL`_ is one of the world’s most popular open source databases and has become a leading database choice for web-based applications. MySQL includes a standardized database driver for Python platforms and development. * `PostgreSQL`_ is a powerful, open source object-relational database system with over 30 years of active development. Psycopg is the most popular PostgreSQL adapter for the Python programming language. * `Oracle DB`_ is a relational database management system (RDBMS) from the Oracle Corporation. Originally developed in 1977, Oracle DB is one of the most trusted and widely used enterprise relational database engines. * `Microsoft SQL Server`_ is a relational database management system developed by Microsoft. As a database server, it stores and retrieves data as requested by other software applications. .. _`SQLite`: https://docs.python.org/3/library/sqlite3.html .. _`MySQL`: https://dev.mysql.com/downloads/connector/python/ .. _`PostgreSQL`: http://initd.org/psycopg/ .. _`Oracle DB`: https://pypi.org/project/cx_Oracle/ .. _`Microsoft SQL Server`: https://pypi.org/project/pyodbc/ Other Databases ............... * `Memcached`_ is free and open source, high-performance, distributed memory object caching system, generic in nature, but intended for use in speeding up dynamic web applications by alleviating database load. * `Redis`_ is an open source, in-memory data structure store, used as a database, cache and message broker. It supports data structures such as strings, hashes, lists, sets, sorted sets with range queries, and more. * `MongoDB`_ is a cross-platform document-oriented database program. Classified as a NoSQL database program, MongoDB uses JSON-like documents with schema. PyMongo is the recommended way to work with MongoDB from Python. * `LMDB`_ is a lightning-fast, memory-mapped database. With memory-mapped files, it has the read performance of a pure in-memory database while retaining the persistence of standard disk-based databases. * `BerkeleyDB`_ is a software library intended to provide a high-performance embedded database for key/value data. Berkeley DB is a programmatic toolkit that provides built-in database support for desktop and server applications. * `LevelDB`_ is a fast key-value storage library written at Google that provides an ordered mapping from string keys to string values. Data is stored sorted by key and users can provide a custom comparison function. .. _`Memcached`: https://pypi.org/project/python-memcached/ .. _`MongoDB`: https://api.mongodb.com/python/current/ .. _`Redis`: https://redis.io/clients#python .. _`LMDB`: https://lmdb.readthedocs.io/ .. _`BerkeleyDB`: https://pypi.org/project/bsddb3/ .. _`LevelDB`: https://plyvel.readthedocs.io/ Reference --------- * `DiskCache Documentation`_ * `DiskCache at PyPI`_ * `DiskCache at GitHub`_ * `DiskCache Issue Tracker`_ .. _`DiskCache Documentation`: http://www.grantjenks.com/docs/diskcache/ .. _`DiskCache at PyPI`: https://pypi.python.org/pypi/diskcache/ .. _`DiskCache at GitHub`: https://github.com/grantjenks/python-diskcache/ .. _`DiskCache Issue Tracker`: https://github.com/grantjenks/python-diskcache/issues/ License ------- Copyright 2016-2022 Grant Jenks Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _`DiskCache`: http://www.grantjenks.com/docs/diskcache/ python-diskcache-5.4.0/diskcache/000077500000000000000000000000001416346170000166755ustar00rootroot00000000000000python-diskcache-5.4.0/diskcache/__init__.py000066400000000000000000000023561416346170000210140ustar00rootroot00000000000000""" DiskCache API Reference ======================= The :doc:`tutorial` provides a helpful walkthrough of most methods. """ from .core import ( DEFAULT_SETTINGS, ENOVAL, EVICTION_POLICY, UNKNOWN, Cache, Disk, EmptyDirWarning, JSONDisk, Timeout, UnknownFileWarning, ) from .fanout import FanoutCache from .persistent import Deque, Index from .recipes import ( Averager, BoundedSemaphore, Lock, RLock, barrier, memoize_stampede, throttle, ) __all__ = [ 'Averager', 'BoundedSemaphore', 'Cache', 'DEFAULT_SETTINGS', 'Deque', 'Disk', 'ENOVAL', 'EVICTION_POLICY', 'EmptyDirWarning', 'FanoutCache', 'Index', 'JSONDisk', 'Lock', 'RLock', 'Timeout', 'UNKNOWN', 'UnknownFileWarning', 'barrier', 'memoize_stampede', 'throttle', ] try: from .djangocache import DjangoCache # noqa __all__.append('DjangoCache') except Exception: # pylint: disable=broad-except # pragma: no cover # Django not installed or not setup so ignore. pass __title__ = 'diskcache' __version__ = '5.4.0' __build__ = 0x050400 __author__ = 'Grant Jenks' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016-2022 Grant Jenks' python-diskcache-5.4.0/diskcache/cli.py000066400000000000000000000000541416346170000200150ustar00rootroot00000000000000"""Command line interface to disk cache.""" python-diskcache-5.4.0/diskcache/core.py000066400000000000000000002400441416346170000202030ustar00rootroot00000000000000"""Core disk and file backed cache API. """ import codecs import contextlib as cl import errno import functools as ft import io import json import os import os.path as op import pickle import pickletools import sqlite3 import struct import tempfile import threading import time import warnings import zlib def full_name(func): """Return full name of `func` by adding the module and function name.""" return func.__module__ + '.' + func.__qualname__ class Constant(tuple): """Pretty display of immutable constant.""" def __new__(cls, name): return tuple.__new__(cls, (name,)) def __repr__(self): return '%s' % self[0] DBNAME = 'cache.db' ENOVAL = Constant('ENOVAL') UNKNOWN = Constant('UNKNOWN') MODE_NONE = 0 MODE_RAW = 1 MODE_BINARY = 2 MODE_TEXT = 3 MODE_PICKLE = 4 DEFAULT_SETTINGS = { 'statistics': 0, # False 'tag_index': 0, # False 'eviction_policy': 'least-recently-stored', 'size_limit': 2 ** 30, # 1gb 'cull_limit': 10, 'sqlite_auto_vacuum': 1, # FULL 'sqlite_cache_size': 2 ** 13, # 8,192 pages 'sqlite_journal_mode': 'wal', 'sqlite_mmap_size': 2 ** 26, # 64mb 'sqlite_synchronous': 1, # NORMAL 'disk_min_file_size': 2 ** 15, # 32kb 'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL, } METADATA = { 'count': 0, 'size': 0, 'hits': 0, 'misses': 0, } EVICTION_POLICY = { 'none': { 'init': None, 'get': None, 'cull': None, }, 'least-recently-stored': { 'init': ( 'CREATE INDEX IF NOT EXISTS Cache_store_time ON' ' Cache (store_time)' ), 'get': None, 'cull': 'SELECT {fields} FROM Cache ORDER BY store_time LIMIT ?', }, 'least-recently-used': { 'init': ( 'CREATE INDEX IF NOT EXISTS Cache_access_time ON' ' Cache (access_time)' ), 'get': 'access_time = {now}', 'cull': 'SELECT {fields} FROM Cache ORDER BY access_time LIMIT ?', }, 'least-frequently-used': { 'init': ( 'CREATE INDEX IF NOT EXISTS Cache_access_count ON' ' Cache (access_count)' ), 'get': 'access_count = access_count + 1', 'cull': 'SELECT {fields} FROM Cache ORDER BY access_count LIMIT ?', }, } class Disk: """Cache key and value serialization for SQLite database and files.""" def __init__(self, directory, min_file_size=0, pickle_protocol=0): """Initialize disk instance. :param str directory: directory path :param int min_file_size: minimum size for file use :param int pickle_protocol: pickle protocol for serialization """ self._directory = directory self.min_file_size = min_file_size self.pickle_protocol = pickle_protocol def hash(self, key): """Compute portable hash for `key`. :param key: key to hash :return: hash value """ mask = 0xFFFFFFFF disk_key, _ = self.put(key) type_disk_key = type(disk_key) if type_disk_key is sqlite3.Binary: return zlib.adler32(disk_key) & mask elif type_disk_key is str: return zlib.adler32(disk_key.encode('utf-8')) & mask # noqa elif type_disk_key is int: return disk_key % mask else: assert type_disk_key is float return zlib.adler32(struct.pack('!d', disk_key)) & mask def put(self, key): """Convert `key` to fields key and raw for Cache table. :param key: key to convert :return: (database key, raw boolean) pair """ # pylint: disable=unidiomatic-typecheck type_key = type(key) if type_key is bytes: return sqlite3.Binary(key), True elif ( (type_key is str) or ( type_key is int and -9223372036854775808 <= key <= 9223372036854775807 ) or (type_key is float) ): return key, True else: data = pickle.dumps(key, protocol=self.pickle_protocol) result = pickletools.optimize(data) return sqlite3.Binary(result), False def get(self, key, raw): """Convert fields `key` and `raw` from Cache table to key. :param key: database key to convert :param bool raw: flag indicating raw database storage :return: corresponding Python key """ # pylint: disable=no-self-use,unidiomatic-typecheck if raw: return bytes(key) if type(key) is sqlite3.Binary else key else: return pickle.load(io.BytesIO(key)) def store(self, value, read, key=UNKNOWN): """Convert `value` to fields size, mode, filename, and value for Cache table. :param value: value to convert :param bool read: True when value is file-like object :param key: key for item (default UNKNOWN) :return: (size, mode, filename, value) tuple for Cache table """ # pylint: disable=unidiomatic-typecheck type_value = type(value) min_file_size = self.min_file_size if ( (type_value is str and len(value) < min_file_size) or ( type_value is int and -9223372036854775808 <= value <= 9223372036854775807 ) or (type_value is float) ): return 0, MODE_RAW, None, value elif type_value is bytes: if len(value) < min_file_size: return 0, MODE_RAW, None, sqlite3.Binary(value) else: filename, full_path = self.filename(key, value) self._write(full_path, io.BytesIO(value), 'xb') return len(value), MODE_BINARY, filename, None elif type_value is str: filename, full_path = self.filename(key, value) self._write(full_path, io.StringIO(value), 'x', 'UTF-8') size = op.getsize(full_path) return size, MODE_TEXT, filename, None elif read: reader = ft.partial(value.read, 2 ** 22) filename, full_path = self.filename(key, value) iterator = iter(reader, b'') size = self._write(full_path, iterator, 'xb') return size, MODE_BINARY, filename, None else: result = pickle.dumps(value, protocol=self.pickle_protocol) if len(result) < min_file_size: return 0, MODE_PICKLE, None, sqlite3.Binary(result) else: filename, full_path = self.filename(key, value) self._write(full_path, io.BytesIO(result), 'xb') return len(result), MODE_PICKLE, filename, None def _write(self, full_path, iterator, mode, encoding=None): # pylint: disable=no-self-use full_dir, _ = op.split(full_path) for count in range(1, 11): with cl.suppress(OSError): os.makedirs(full_dir) try: # Another cache may have deleted the directory before # the file could be opened. writer = open(full_path, mode, encoding=encoding) except OSError: if count == 10: # Give up after 10 tries to open the file. raise continue with writer: size = 0 for chunk in iterator: size += len(chunk) writer.write(chunk) return size def fetch(self, mode, filename, value, read): """Convert fields `mode`, `filename`, and `value` from Cache table to value. :param int mode: value mode raw, binary, text, or pickle :param str filename: filename of corresponding value :param value: database value :param bool read: when True, return an open file handle :return: corresponding Python value :raises: IOError if the value cannot be read """ # pylint: disable=no-self-use,unidiomatic-typecheck,consider-using-with if mode == MODE_RAW: return bytes(value) if type(value) is sqlite3.Binary else value elif mode == MODE_BINARY: if read: return open(op.join(self._directory, filename), 'rb') else: with open(op.join(self._directory, filename), 'rb') as reader: return reader.read() elif mode == MODE_TEXT: full_path = op.join(self._directory, filename) with open(full_path, 'r', encoding='UTF-8') as reader: return reader.read() elif mode == MODE_PICKLE: if value is None: with open(op.join(self._directory, filename), 'rb') as reader: return pickle.load(reader) else: return pickle.load(io.BytesIO(value)) def filename(self, key=UNKNOWN, value=UNKNOWN): """Return filename and full-path tuple for file storage. Filename will be a randomly generated 28 character hexadecimal string with ".val" suffixed. Two levels of sub-directories will be used to reduce the size of directories. On older filesystems, lookups in directories with many files may be slow. The default implementation ignores the `key` and `value` parameters. In some scenarios, for example :meth:`Cache.push `, the `key` or `value` may not be known when the item is stored in the cache. :param key: key for item (default UNKNOWN) :param value: value for item (default UNKNOWN) """ # pylint: disable=unused-argument hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8') sub_dir = op.join(hex_name[:2], hex_name[2:4]) name = hex_name[4:] + '.val' filename = op.join(sub_dir, name) full_path = op.join(self._directory, filename) return filename, full_path def remove(self, file_path): """Remove a file given by `file_path`. This method is cross-thread and cross-process safe. If an OSError occurs, it is suppressed. :param str file_path: relative path to file """ full_path = op.join(self._directory, file_path) full_dir, _ = op.split(full_path) # Suppress OSError that may occur if two caches attempt to delete the # same file or directory at the same time. with cl.suppress(OSError): os.remove(full_path) with cl.suppress(OSError): os.removedirs(full_dir) class JSONDisk(Disk): """Cache key and value using JSON serialization with zlib compression.""" def __init__(self, directory, compress_level=1, **kwargs): """Initialize JSON disk instance. Keys and values are compressed using the zlib library. The `compress_level` is an integer from 0 to 9 controlling the level of compression; 1 is fastest and produces the least compression, 9 is slowest and produces the most compression, and 0 is no compression. :param str directory: directory path :param int compress_level: zlib compression level (default 1) :param kwargs: super class arguments """ self.compress_level = compress_level super().__init__(directory, **kwargs) def put(self, key): json_bytes = json.dumps(key).encode('utf-8') data = zlib.compress(json_bytes, self.compress_level) return super().put(data) def get(self, key, raw): data = super().get(key, raw) return json.loads(zlib.decompress(data).decode('utf-8')) def store(self, value, read, key=UNKNOWN): if not read: json_bytes = json.dumps(value).encode('utf-8') value = zlib.compress(json_bytes, self.compress_level) return super().store(value, read, key=key) def fetch(self, mode, filename, value, read): data = super().fetch(mode, filename, value, read) if not read: data = json.loads(zlib.decompress(data).decode('utf-8')) return data class Timeout(Exception): """Database timeout expired.""" class UnknownFileWarning(UserWarning): """Warning used by Cache.check for unknown files.""" class EmptyDirWarning(UserWarning): """Warning used by Cache.check for empty directories.""" def args_to_key(base, args, kwargs, typed, ignore): """Create cache key out of function arguments. :param tuple base: base of key :param tuple args: function arguments :param dict kwargs: function keyword arguments :param bool typed: include types in cache key :param set ignore: positional or keyword args to ignore :return: cache key tuple """ args = tuple(arg for index, arg in enumerate(args) if index not in ignore) key = base + args + (None,) if kwargs: kwargs = {key: val for key, val in kwargs.items() if key not in ignore} sorted_items = sorted(kwargs.items()) for item in sorted_items: key += item if typed: key += tuple(type(arg) for arg in args) if kwargs: key += tuple(type(value) for _, value in sorted_items) return key class Cache: """Disk and file backed cache.""" def __init__(self, directory=None, timeout=60, disk=Disk, **settings): """Initialize cache instance. :param str directory: cache directory :param float timeout: SQLite connection timeout :param disk: Disk type or subclass for serialization :param settings: any of DEFAULT_SETTINGS """ try: assert issubclass(disk, Disk) except (TypeError, AssertionError): raise ValueError('disk must subclass diskcache.Disk') from None if directory is None: directory = tempfile.mkdtemp(prefix='diskcache-') directory = op.expanduser(directory) directory = op.expandvars(directory) self._directory = directory self._timeout = 0 # Manually handle retries during initialization. self._local = threading.local() self._txn_id = None if not op.isdir(directory): try: os.makedirs(directory, 0o755) except OSError as error: if error.errno != errno.EEXIST: raise EnvironmentError( error.errno, 'Cache directory "%s" does not exist' ' and could not be created' % self._directory, ) from None sql = self._sql_retry # Setup Settings table. try: current_settings = dict( sql('SELECT key, value FROM Settings').fetchall() ) except sqlite3.OperationalError: current_settings = {} sets = DEFAULT_SETTINGS.copy() sets.update(current_settings) sets.update(settings) for key in METADATA: sets.pop(key, None) # Chance to set pragmas before any tables are created. for key, value in sorted(sets.items()): if key.startswith('sqlite_'): self.reset(key, value, update=False) sql( 'CREATE TABLE IF NOT EXISTS Settings (' ' key TEXT NOT NULL UNIQUE,' ' value)' ) # Setup Disk object (must happen after settings initialized). kwargs = { key[5:]: value for key, value in sets.items() if key.startswith('disk_') } self._disk = disk(directory, **kwargs) # Set cached attributes: updates settings and sets pragmas. for key, value in sets.items(): query = 'INSERT OR REPLACE INTO Settings VALUES (?, ?)' sql(query, (key, value)) self.reset(key, value) for key, value in METADATA.items(): query = 'INSERT OR IGNORE INTO Settings VALUES (?, ?)' sql(query, (key, value)) self.reset(key) ((self._page_size,),) = sql('PRAGMA page_size').fetchall() # Setup Cache table. sql( 'CREATE TABLE IF NOT EXISTS Cache (' ' rowid INTEGER PRIMARY KEY,' ' key BLOB,' ' raw INTEGER,' ' store_time REAL,' ' expire_time REAL,' ' access_time REAL,' ' access_count INTEGER DEFAULT 0,' ' tag BLOB,' ' size INTEGER DEFAULT 0,' ' mode INTEGER DEFAULT 0,' ' filename TEXT,' ' value BLOB)' ) sql( 'CREATE UNIQUE INDEX IF NOT EXISTS Cache_key_raw ON' ' Cache(key, raw)' ) sql( 'CREATE INDEX IF NOT EXISTS Cache_expire_time ON' ' Cache (expire_time)' ) query = EVICTION_POLICY[self.eviction_policy]['init'] if query is not None: sql(query) # Use triggers to keep Metadata updated. sql( 'CREATE TRIGGER IF NOT EXISTS Settings_count_insert' ' AFTER INSERT ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings SET value = value + 1' ' WHERE key = "count"; END' ) sql( 'CREATE TRIGGER IF NOT EXISTS Settings_count_delete' ' AFTER DELETE ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings SET value = value - 1' ' WHERE key = "count"; END' ) sql( 'CREATE TRIGGER IF NOT EXISTS Settings_size_insert' ' AFTER INSERT ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings SET value = value + NEW.size' ' WHERE key = "size"; END' ) sql( 'CREATE TRIGGER IF NOT EXISTS Settings_size_update' ' AFTER UPDATE ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings' ' SET value = value + NEW.size - OLD.size' ' WHERE key = "size"; END' ) sql( 'CREATE TRIGGER IF NOT EXISTS Settings_size_delete' ' AFTER DELETE ON Cache FOR EACH ROW BEGIN' ' UPDATE Settings SET value = value - OLD.size' ' WHERE key = "size"; END' ) # Create tag index if requested. if self.tag_index: # pylint: disable=no-member self.create_tag_index() else: self.drop_tag_index() # Close and re-open database connection with given timeout. self.close() self._timeout = timeout self._sql # pylint: disable=pointless-statement @property def directory(self): """Cache directory.""" return self._directory @property def timeout(self): """SQLite connection timeout value in seconds.""" return self._timeout @property def disk(self): """Disk used for serialization.""" return self._disk @property def _con(self): # Check process ID to support process forking. If the process # ID changes, close the connection and update the process ID. local_pid = getattr(self._local, 'pid', None) pid = os.getpid() if local_pid != pid: self.close() self._local.pid = pid con = getattr(self._local, 'con', None) if con is None: con = self._local.con = sqlite3.connect( op.join(self._directory, DBNAME), timeout=self._timeout, isolation_level=None, ) # Some SQLite pragmas work on a per-connection basis so # query the Settings table and reset the pragmas. The # Settings table may not exist so catch and ignore the # OperationalError that may occur. try: select = 'SELECT key, value FROM Settings' settings = con.execute(select).fetchall() except sqlite3.OperationalError: pass else: for key, value in settings: if key.startswith('sqlite_'): self.reset(key, value, update=False) return con @property def _sql(self): return self._con.execute @property def _sql_retry(self): sql = self._sql # 2018-11-01 GrantJ - Some SQLite builds/versions handle # the SQLITE_BUSY return value and connection parameter # "timeout" differently. For a more reliable duration, # manually retry the statement for 60 seconds. Only used # by statements which modify the database and do not use # a transaction (like those in ``__init__`` or ``reset``). # See Issue #85 for and tests/issue_85.py for more details. def _execute_with_retry(statement, *args, **kwargs): start = time.time() while True: try: return sql(statement, *args, **kwargs) except sqlite3.OperationalError as exc: if str(exc) != 'database is locked': raise diff = time.time() - start if diff > 60: raise time.sleep(0.001) return _execute_with_retry @cl.contextmanager def transact(self, retry=False): """Context manager to perform a transaction by locking the cache. While the cache is locked, no other write operation is permitted. Transactions should therefore be as short as possible. Read and write operations performed in a transaction are atomic. Read operations may occur concurrent to a transaction. Transactions may be nested and may not be shared between threads. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). >>> cache = Cache() >>> with cache.transact(): # Atomically increment two keys. ... _ = cache.incr('total', 123.4) ... _ = cache.incr('count', 1) >>> with cache.transact(): # Atomically calculate average. ... average = cache['total'] / cache['count'] >>> average 123.4 :param bool retry: retry if database timeout occurs (default False) :return: context manager for use in `with` statement :raises Timeout: if database timeout occurs """ with self._transact(retry=retry): yield @cl.contextmanager def _transact(self, retry=False, filename=None): sql = self._sql filenames = [] _disk_remove = self._disk.remove tid = threading.get_ident() txn_id = self._txn_id if tid == txn_id: begin = False else: while True: try: sql('BEGIN IMMEDIATE') begin = True self._txn_id = tid break except sqlite3.OperationalError: if retry: continue if filename is not None: _disk_remove(filename) raise Timeout from None try: yield sql, filenames.append except BaseException: if begin: assert self._txn_id == tid self._txn_id = None sql('ROLLBACK') raise else: if begin: assert self._txn_id == tid self._txn_id = None sql('COMMIT') for name in filenames: if name is not None: _disk_remove(name) def set(self, key, value, expire=None, read=False, tag=None, retry=False): """Set `key` and `value` item in cache. When `read` is `True`, `value` should be a file-like object opened for reading in binary mode. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key for item :param value: value for item :param float expire: seconds until item expires (default None, no expiry) :param bool read: read value as bytes from file (default False) :param str tag: text to associate with key (default None) :param bool retry: retry if database timeout occurs (default False) :return: True if item was set :raises Timeout: if database timeout occurs """ now = time.time() db_key, raw = self._disk.put(key) expire_time = None if expire is None else now + expire size, mode, filename, db_value = self._disk.store(value, read, key=key) columns = (expire_time, tag, size, mode, filename, db_value) # The order of SELECT, UPDATE, and INSERT is important below. # # Typical cache usage pattern is: # # value = cache.get(key) # if value is None: # value = expensive_calculation() # cache.set(key, value) # # Cache.get does not evict expired keys to avoid writes during lookups. # Commonly used/expired keys will therefore remain in the cache making # an UPDATE the preferred path. # # The alternative is to assume the key is not present by first trying # to INSERT and then handling the IntegrityError that occurs from # violating the UNIQUE constraint. This optimistic approach was # rejected based on the common cache usage pattern. # # INSERT OR REPLACE aka UPSERT is not used because the old filename may # need cleanup. with self._transact(retry, filename) as (sql, cleanup): rows = sql( 'SELECT rowid, filename FROM Cache' ' WHERE key = ? AND raw = ?', (db_key, raw), ).fetchall() if rows: ((rowid, old_filename),) = rows cleanup(old_filename) self._row_update(rowid, now, columns) else: self._row_insert(db_key, raw, now, columns) self._cull(now, sql, cleanup) return True def __setitem__(self, key, value): """Set corresponding `value` for `key` in cache. :param key: key for item :param value: value for item :return: corresponding value :raises KeyError: if key is not found """ self.set(key, value, retry=True) def _row_update(self, rowid, now, columns): sql = self._sql expire_time, tag, size, mode, filename, value = columns sql( 'UPDATE Cache SET' ' store_time = ?,' ' expire_time = ?,' ' access_time = ?,' ' access_count = ?,' ' tag = ?,' ' size = ?,' ' mode = ?,' ' filename = ?,' ' value = ?' ' WHERE rowid = ?', ( now, # store_time expire_time, now, # access_time 0, # access_count tag, size, mode, filename, value, rowid, ), ) def _row_insert(self, key, raw, now, columns): sql = self._sql expire_time, tag, size, mode, filename, value = columns sql( 'INSERT INTO Cache(' ' key, raw, store_time, expire_time, access_time,' ' access_count, tag, size, mode, filename, value' ') VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( key, raw, now, # store_time expire_time, now, # access_time 0, # access_count tag, size, mode, filename, value, ), ) def _cull(self, now, sql, cleanup, limit=None): cull_limit = self.cull_limit if limit is None else limit if cull_limit == 0: return # Evict expired keys. select_expired_template = ( 'SELECT %s FROM Cache' ' WHERE expire_time IS NOT NULL AND expire_time < ?' ' ORDER BY expire_time LIMIT ?' ) select_expired = select_expired_template % 'filename' rows = sql(select_expired, (now, cull_limit)).fetchall() if rows: delete_expired = 'DELETE FROM Cache WHERE rowid IN (%s)' % ( select_expired_template % 'rowid' ) sql(delete_expired, (now, cull_limit)) for (filename,) in rows: cleanup(filename) cull_limit -= len(rows) if cull_limit == 0: return # Evict keys by policy. select_policy = EVICTION_POLICY[self.eviction_policy]['cull'] if select_policy is None or self.volume() < self.size_limit: return select_filename = select_policy.format(fields='filename', now=now) rows = sql(select_filename, (cull_limit,)).fetchall() if rows: delete = 'DELETE FROM Cache WHERE rowid IN (%s)' % ( select_policy.format(fields='rowid', now=now) ) sql(delete, (cull_limit,)) for (filename,) in rows: cleanup(filename) def touch(self, key, expire=None, retry=False): """Touch `key` in cache and update `expire` time. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key for item :param float expire: seconds until item expires (default None, no expiry) :param bool retry: retry if database timeout occurs (default False) :return: True if key was touched :raises Timeout: if database timeout occurs """ now = time.time() db_key, raw = self._disk.put(key) expire_time = None if expire is None else now + expire with self._transact(retry) as (sql, _): rows = sql( 'SELECT rowid, expire_time FROM Cache' ' WHERE key = ? AND raw = ?', (db_key, raw), ).fetchall() if rows: ((rowid, old_expire_time),) = rows if old_expire_time is None or old_expire_time > now: sql( 'UPDATE Cache SET expire_time = ? WHERE rowid = ?', (expire_time, rowid), ) return True return False def add(self, key, value, expire=None, read=False, tag=None, retry=False): """Add `key` and `value` item to cache. Similar to `set`, but only add to cache if key not present. Operation is atomic. Only one concurrent add operation for a given key will succeed. When `read` is `True`, `value` should be a file-like object opened for reading in binary mode. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key for item :param value: value for item :param float expire: seconds until the key expires (default None, no expiry) :param bool read: read value as bytes from file (default False) :param str tag: text to associate with key (default None) :param bool retry: retry if database timeout occurs (default False) :return: True if item was added :raises Timeout: if database timeout occurs """ now = time.time() db_key, raw = self._disk.put(key) expire_time = None if expire is None else now + expire size, mode, filename, db_value = self._disk.store(value, read, key=key) columns = (expire_time, tag, size, mode, filename, db_value) with self._transact(retry, filename) as (sql, cleanup): rows = sql( 'SELECT rowid, filename, expire_time FROM Cache' ' WHERE key = ? AND raw = ?', (db_key, raw), ).fetchall() if rows: ((rowid, old_filename, old_expire_time),) = rows if old_expire_time is None or old_expire_time > now: cleanup(filename) return False cleanup(old_filename) self._row_update(rowid, now, columns) else: self._row_insert(db_key, raw, now, columns) self._cull(now, sql, cleanup) return True def incr(self, key, delta=1, default=0, retry=False): """Increment value by delta for item with key. If key is missing and default is None then raise KeyError. Else if key is missing and default is not None then use default for value. Operation is atomic. All concurrent increment operations will be counted individually. Assumes value may be stored in a SQLite column. Most builds that target machines with 64-bit pointer widths will support 64-bit signed integers. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key for item :param int delta: amount to increment (default 1) :param int default: value if key is missing (default 0) :param bool retry: retry if database timeout occurs (default False) :return: new value for item :raises KeyError: if key is not found and default is None :raises Timeout: if database timeout occurs """ now = time.time() db_key, raw = self._disk.put(key) select = ( 'SELECT rowid, expire_time, filename, value FROM Cache' ' WHERE key = ? AND raw = ?' ) with self._transact(retry) as (sql, cleanup): rows = sql(select, (db_key, raw)).fetchall() if not rows: if default is None: raise KeyError(key) value = default + delta columns = (None, None) + self._disk.store( value, False, key=key ) self._row_insert(db_key, raw, now, columns) self._cull(now, sql, cleanup) return value ((rowid, expire_time, filename, value),) = rows if expire_time is not None and expire_time < now: if default is None: raise KeyError(key) value = default + delta columns = (None, None) + self._disk.store( value, False, key=key ) self._row_update(rowid, now, columns) self._cull(now, sql, cleanup) cleanup(filename) return value value += delta columns = 'store_time = ?, value = ?' update_column = EVICTION_POLICY[self.eviction_policy]['get'] if update_column is not None: columns += ', ' + update_column.format(now=now) update = 'UPDATE Cache SET %s WHERE rowid = ?' % columns sql(update, (now, value, rowid)) return value def decr(self, key, delta=1, default=0, retry=False): """Decrement value by delta for item with key. If key is missing and default is None then raise KeyError. Else if key is missing and default is not None then use default for value. Operation is atomic. All concurrent decrement operations will be counted individually. Unlike Memcached, negative values are supported. Value may be decremented below zero. Assumes value may be stored in a SQLite column. Most builds that target machines with 64-bit pointer widths will support 64-bit signed integers. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key for item :param int delta: amount to decrement (default 1) :param int default: value if key is missing (default 0) :param bool retry: retry if database timeout occurs (default False) :return: new value for item :raises KeyError: if key is not found and default is None :raises Timeout: if database timeout occurs """ return self.incr(key, -delta, default, retry) def get( self, key, default=None, read=False, expire_time=False, tag=False, retry=False, ): """Retrieve value from cache. If `key` is missing, return `default`. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key for item :param default: value to return if key is missing (default None) :param bool read: if True, return file handle to value (default False) :param bool expire_time: if True, return expire_time in tuple (default False) :param bool tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default False) :return: value for item or default if key not found :raises Timeout: if database timeout occurs """ db_key, raw = self._disk.put(key) update_column = EVICTION_POLICY[self.eviction_policy]['get'] select = ( 'SELECT rowid, expire_time, tag, mode, filename, value' ' FROM Cache WHERE key = ? AND raw = ?' ' AND (expire_time IS NULL OR expire_time > ?)' ) if expire_time and tag: default = (default, None, None) elif expire_time or tag: default = (default, None) if not self.statistics and update_column is None: # Fast path, no transaction necessary. rows = self._sql(select, (db_key, raw, time.time())).fetchall() if not rows: return default ((rowid, db_expire_time, db_tag, mode, filename, db_value),) = rows try: value = self._disk.fetch(mode, filename, db_value, read) except IOError: # Key was deleted before we could retrieve result. return default else: # Slow path, transaction required. cache_hit = ( 'UPDATE Settings SET value = value + 1 WHERE key = "hits"' ) cache_miss = ( 'UPDATE Settings SET value = value + 1 WHERE key = "misses"' ) with self._transact(retry) as (sql, _): rows = sql(select, (db_key, raw, time.time())).fetchall() if not rows: if self.statistics: sql(cache_miss) return default ( (rowid, db_expire_time, db_tag, mode, filename, db_value), ) = rows # noqa: E127 try: value = self._disk.fetch(mode, filename, db_value, read) except IOError: # Key was deleted before we could retrieve result. if self.statistics: sql(cache_miss) return default if self.statistics: sql(cache_hit) now = time.time() update = 'UPDATE Cache SET %s WHERE rowid = ?' if update_column is not None: sql(update % update_column.format(now=now), (rowid,)) if expire_time and tag: return (value, db_expire_time, db_tag) elif expire_time: return (value, db_expire_time) elif tag: return (value, db_tag) else: return value def __getitem__(self, key): """Return corresponding value for `key` from cache. :param key: key matching item :return: corresponding value :raises KeyError: if key is not found """ value = self.get(key, default=ENOVAL, retry=True) if value is ENOVAL: raise KeyError(key) return value def read(self, key, retry=False): """Return file handle value corresponding to `key` from cache. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key matching item :param bool retry: retry if database timeout occurs (default False) :return: file open for reading in binary mode :raises KeyError: if key is not found :raises Timeout: if database timeout occurs """ handle = self.get(key, default=ENOVAL, read=True, retry=retry) if handle is ENOVAL: raise KeyError(key) return handle def __contains__(self, key): """Return `True` if `key` matching item is found in cache. :param key: key matching item :return: True if key matching item """ sql = self._sql db_key, raw = self._disk.put(key) select = ( 'SELECT rowid FROM Cache' ' WHERE key = ? AND raw = ?' ' AND (expire_time IS NULL OR expire_time > ?)' ) rows = sql(select, (db_key, raw, time.time())).fetchall() return bool(rows) def pop( self, key, default=None, expire_time=False, tag=False, retry=False ): # noqa: E501 """Remove corresponding item for `key` from cache and return value. If `key` is missing, return `default`. Operation is atomic. Concurrent operations will be serialized. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key for item :param default: value to return if key is missing (default None) :param bool expire_time: if True, return expire_time in tuple (default False) :param bool tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default False) :return: value for item or default if key not found :raises Timeout: if database timeout occurs """ db_key, raw = self._disk.put(key) select = ( 'SELECT rowid, expire_time, tag, mode, filename, value' ' FROM Cache WHERE key = ? AND raw = ?' ' AND (expire_time IS NULL OR expire_time > ?)' ) if expire_time and tag: default = default, None, None elif expire_time or tag: default = default, None with self._transact(retry) as (sql, _): rows = sql(select, (db_key, raw, time.time())).fetchall() if not rows: return default ((rowid, db_expire_time, db_tag, mode, filename, db_value),) = rows sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) try: value = self._disk.fetch(mode, filename, db_value, False) except IOError: # Key was deleted before we could retrieve result. return default finally: if filename is not None: self._disk.remove(filename) if expire_time and tag: return value, db_expire_time, db_tag elif expire_time: return value, db_expire_time elif tag: return value, db_tag else: return value def __delitem__(self, key, retry=True): """Delete corresponding item for `key` from cache. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default `True`). :param key: key matching item :param bool retry: retry if database timeout occurs (default True) :raises KeyError: if key is not found :raises Timeout: if database timeout occurs """ db_key, raw = self._disk.put(key) with self._transact(retry) as (sql, cleanup): rows = sql( 'SELECT rowid, filename FROM Cache' ' WHERE key = ? AND raw = ?' ' AND (expire_time IS NULL OR expire_time > ?)', (db_key, raw, time.time()), ).fetchall() if not rows: raise KeyError(key) ((rowid, filename),) = rows sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) cleanup(filename) return True def delete(self, key, retry=False): """Delete corresponding item for `key` from cache. Missing keys are ignored. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param key: key matching item :param bool retry: retry if database timeout occurs (default False) :return: True if item was deleted :raises Timeout: if database timeout occurs """ try: return self.__delitem__(key, retry=retry) except KeyError: return False def push( self, value, prefix=None, side='back', expire=None, read=False, tag=None, retry=False, ): """Push `value` onto `side` of queue identified by `prefix` in cache. When prefix is None, integer keys are used. Otherwise, string keys are used in the format "prefix-integer". Integer starts at 500 trillion. Defaults to pushing value on back of queue. Set side to 'front' to push value on front of queue. Side must be one of 'back' or 'front'. Operation is atomic. Concurrent operations will be serialized. When `read` is `True`, `value` should be a file-like object opened for reading in binary mode. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). See also `Cache.pull`. >>> cache = Cache() >>> print(cache.push('first value')) 500000000000000 >>> cache.get(500000000000000) 'first value' >>> print(cache.push('second value')) 500000000000001 >>> print(cache.push('third value', side='front')) 499999999999999 >>> cache.push(1234, prefix='userids') 'userids-500000000000000' :param value: value for item :param str prefix: key prefix (default None, key is integer) :param str side: either 'back' or 'front' (default 'back') :param float expire: seconds until the key expires (default None, no expiry) :param bool read: read value as bytes from file (default False) :param str tag: text to associate with key (default None) :param bool retry: retry if database timeout occurs (default False) :return: key for item in cache :raises Timeout: if database timeout occurs """ if prefix is None: min_key = 0 max_key = 999999999999999 else: min_key = prefix + '-000000000000000' max_key = prefix + '-999999999999999' now = time.time() raw = True expire_time = None if expire is None else now + expire size, mode, filename, db_value = self._disk.store(value, read) columns = (expire_time, tag, size, mode, filename, db_value) order = {'back': 'DESC', 'front': 'ASC'} select = ( 'SELECT key FROM Cache' ' WHERE ? < key AND key < ? AND raw = ?' ' ORDER BY key %s LIMIT 1' ) % order[side] with self._transact(retry, filename) as (sql, cleanup): rows = sql(select, (min_key, max_key, raw)).fetchall() if rows: ((key,),) = rows if prefix is not None: num = int(key[(key.rfind('-') + 1) :]) else: num = key if side == 'back': num += 1 else: assert side == 'front' num -= 1 else: num = 500000000000000 if prefix is not None: db_key = '{0}-{1:015d}'.format(prefix, num) else: db_key = num self._row_insert(db_key, raw, now, columns) self._cull(now, sql, cleanup) return db_key def pull( self, prefix=None, default=(None, None), side='front', expire_time=False, tag=False, retry=False, ): """Pull key and value item pair from `side` of queue in cache. When prefix is None, integer keys are used. Otherwise, string keys are used in the format "prefix-integer". Integer starts at 500 trillion. If queue is empty, return default. Defaults to pulling key and value item pairs from front of queue. Set side to 'back' to pull from back of queue. Side must be one of 'front' or 'back'. Operation is atomic. Concurrent operations will be serialized. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). See also `Cache.push` and `Cache.get`. >>> cache = Cache() >>> cache.pull() (None, None) >>> for letter in 'abc': ... print(cache.push(letter)) 500000000000000 500000000000001 500000000000002 >>> key, value = cache.pull() >>> print(key) 500000000000000 >>> value 'a' >>> _, value = cache.pull(side='back') >>> value 'c' >>> cache.push(1234, 'userids') 'userids-500000000000000' >>> _, value = cache.pull('userids') >>> value 1234 :param str prefix: key prefix (default None, key is integer) :param default: value to return if key is missing (default (None, None)) :param str side: either 'front' or 'back' (default 'front') :param bool expire_time: if True, return expire_time in tuple (default False) :param bool tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default False) :return: key and value item pair or default if queue is empty :raises Timeout: if database timeout occurs """ # Caution: Nearly identical code exists in Cache.peek if prefix is None: min_key = 0 max_key = 999999999999999 else: min_key = prefix + '-000000000000000' max_key = prefix + '-999999999999999' order = {'front': 'ASC', 'back': 'DESC'} select = ( 'SELECT rowid, key, expire_time, tag, mode, filename, value' ' FROM Cache WHERE ? < key AND key < ? AND raw = 1' ' ORDER BY key %s LIMIT 1' ) % order[side] if expire_time and tag: default = default, None, None elif expire_time or tag: default = default, None while True: while True: with self._transact(retry) as (sql, cleanup): rows = sql(select, (min_key, max_key)).fetchall() if not rows: return default ( (rowid, key, db_expire, db_tag, mode, name, db_value), ) = rows sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) if db_expire is not None and db_expire < time.time(): cleanup(name) else: break try: value = self._disk.fetch(mode, name, db_value, False) except IOError: # Key was deleted before we could retrieve result. continue finally: if name is not None: self._disk.remove(name) break if expire_time and tag: return (key, value), db_expire, db_tag elif expire_time: return (key, value), db_expire elif tag: return (key, value), db_tag else: return key, value def peek( self, prefix=None, default=(None, None), side='front', expire_time=False, tag=False, retry=False, ): """Peek at key and value item pair from `side` of queue in cache. When prefix is None, integer keys are used. Otherwise, string keys are used in the format "prefix-integer". Integer starts at 500 trillion. If queue is empty, return default. Defaults to peeking at key and value item pairs from front of queue. Set side to 'back' to pull from back of queue. Side must be one of 'front' or 'back'. Expired items are deleted from cache. Operation is atomic. Concurrent operations will be serialized. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). See also `Cache.pull` and `Cache.push`. >>> cache = Cache() >>> for letter in 'abc': ... print(cache.push(letter)) 500000000000000 500000000000001 500000000000002 >>> key, value = cache.peek() >>> print(key) 500000000000000 >>> value 'a' >>> key, value = cache.peek(side='back') >>> print(key) 500000000000002 >>> value 'c' :param str prefix: key prefix (default None, key is integer) :param default: value to return if key is missing (default (None, None)) :param str side: either 'front' or 'back' (default 'front') :param bool expire_time: if True, return expire_time in tuple (default False) :param bool tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default False) :return: key and value item pair or default if queue is empty :raises Timeout: if database timeout occurs """ # Caution: Nearly identical code exists in Cache.pull if prefix is None: min_key = 0 max_key = 999999999999999 else: min_key = prefix + '-000000000000000' max_key = prefix + '-999999999999999' order = {'front': 'ASC', 'back': 'DESC'} select = ( 'SELECT rowid, key, expire_time, tag, mode, filename, value' ' FROM Cache WHERE ? < key AND key < ? AND raw = 1' ' ORDER BY key %s LIMIT 1' ) % order[side] if expire_time and tag: default = default, None, None elif expire_time or tag: default = default, None while True: while True: with self._transact(retry) as (sql, cleanup): rows = sql(select, (min_key, max_key)).fetchall() if not rows: return default ( (rowid, key, db_expire, db_tag, mode, name, db_value), ) = rows if db_expire is not None and db_expire < time.time(): sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) cleanup(name) else: break try: value = self._disk.fetch(mode, name, db_value, False) except IOError: # Key was deleted before we could retrieve result. continue finally: if name is not None: self._disk.remove(name) break if expire_time and tag: return (key, value), db_expire, db_tag elif expire_time: return (key, value), db_expire elif tag: return (key, value), db_tag else: return key, value def peekitem(self, last=True, expire_time=False, tag=False, retry=False): """Peek at key and value item pair in cache based on iteration order. Expired items are deleted from cache. Operation is atomic. Concurrent operations will be serialized. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). >>> cache = Cache() >>> for num, letter in enumerate('abc'): ... cache[letter] = num >>> cache.peekitem() ('c', 2) >>> cache.peekitem(last=False) ('a', 0) :param bool last: last item in iteration order (default True) :param bool expire_time: if True, return expire_time in tuple (default False) :param bool tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default False) :return: key and value item pair :raises KeyError: if cache is empty :raises Timeout: if database timeout occurs """ order = ('ASC', 'DESC') select = ( 'SELECT rowid, key, raw, expire_time, tag, mode, filename, value' ' FROM Cache ORDER BY rowid %s LIMIT 1' ) % order[last] while True: while True: with self._transact(retry) as (sql, cleanup): rows = sql(select).fetchall() if not rows: raise KeyError('dictionary is empty') ( ( rowid, db_key, raw, db_expire, db_tag, mode, name, db_value, ), ) = rows if db_expire is not None and db_expire < time.time(): sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) cleanup(name) else: break key = self._disk.get(db_key, raw) try: value = self._disk.fetch(mode, name, db_value, False) except IOError: # Key was deleted before we could retrieve result. continue break if expire_time and tag: return (key, value), db_expire, db_tag elif expire_time: return (key, value), db_expire elif tag: return (key, value), db_tag else: return key, value def memoize( self, name=None, typed=False, expire=None, tag=None, ignore=() ): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. Repeated calls with the same arguments will lookup result in cache and avoid function evaluation. If name is set to None (default), the callable name will be determined automatically. When expire is set to zero, function results will not be set in the cache. Cache lookups still occur, however. Read :doc:`case-study-landing-page-caching` for example usage. If typed is set to True, function arguments of different types will be cached separately. For example, f(3) and f(3.0) will be treated as distinct calls with distinct results. The original underlying function is accessible through the __wrapped__ attribute. This is useful for introspection, for bypassing the cache, or for rewrapping the function with a different cache. >>> from diskcache import Cache >>> cache = Cache() >>> @cache.memoize(expire=1, tag='fib') ... def fibonacci(number): ... if number == 0: ... return 0 ... elif number == 1: ... return 1 ... else: ... return fibonacci(number - 1) + fibonacci(number - 2) >>> print(fibonacci(100)) 354224848179261915075 An additional `__cache_key__` attribute can be used to generate the cache key used for the given arguments. >>> key = fibonacci.__cache_key__(100) >>> print(cache[key]) 354224848179261915075 Remember to call memoize when decorating a callable. If you forget, then a TypeError will occur. Note the lack of parenthenses after memoize below: >>> @cache.memoize ... def test(): ... pass Traceback (most recent call last): ... TypeError: name cannot be callable :param cache: cache to store callable arguments and return values :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) :param float expire: seconds until arguments expire (default None, no expiry) :param str tag: text to associate with arguments (default None) :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ # Caution: Nearly identical code exists in DjangoCache.memoize if callable(name): raise TypeError('name cannot be callable') def decorator(func): """Decorator created by memoize() for callable `func`.""" base = (full_name(func),) if name is None else (name,) @ft.wraps(func) def wrapper(*args, **kwargs): """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) result = self.get(key, default=ENOVAL, retry=True) if result is ENOVAL: result = func(*args, **kwargs) if expire is None or expire > 0: self.set(key, result, expire, tag=tag, retry=True) return result def __cache_key__(*args, **kwargs): """Make key for cache given function arguments.""" return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper return decorator def check(self, fix=False, retry=False): """Check database and file system consistency. Intended for use in testing and post-mortem error analysis. While checking the Cache table for consistency, a writer lock is held on the database. The lock blocks other cache clients from writing to the database. For caches with many file references, the lock may be held for a long time. For example, local benchmarking shows that a cache with 1,000 file references takes ~60ms to check. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param bool fix: correct inconsistencies :param bool retry: retry if database timeout occurs (default False) :return: list of warnings :raises Timeout: if database timeout occurs """ # pylint: disable=access-member-before-definition,W0201 with warnings.catch_warnings(record=True) as warns: sql = self._sql # Check integrity of database. rows = sql('PRAGMA integrity_check').fetchall() if len(rows) != 1 or rows[0][0] != 'ok': for (message,) in rows: warnings.warn(message) if fix: sql('VACUUM') with self._transact(retry) as (sql, _): # Check Cache.filename against file system. filenames = set() select = ( 'SELECT rowid, size, filename FROM Cache' ' WHERE filename IS NOT NULL' ) rows = sql(select).fetchall() for rowid, size, filename in rows: full_path = op.join(self._directory, filename) filenames.add(full_path) if op.exists(full_path): real_size = op.getsize(full_path) if size != real_size: message = 'wrong file size: %s, %d != %d' args = full_path, real_size, size warnings.warn(message % args) if fix: sql( 'UPDATE Cache SET size = ?' ' WHERE rowid = ?', (real_size, rowid), ) continue warnings.warn('file not found: %s' % full_path) if fix: sql('DELETE FROM Cache WHERE rowid = ?', (rowid,)) # Check file system against Cache.filename. for dirpath, _, files in os.walk(self._directory): paths = [op.join(dirpath, filename) for filename in files] error = set(paths) - filenames for full_path in error: if DBNAME in full_path: continue message = 'unknown file: %s' % full_path warnings.warn(message, UnknownFileWarning) if fix: os.remove(full_path) # Check for empty directories. for dirpath, dirs, files in os.walk(self._directory): if not (dirs or files): message = 'empty directory: %s' % dirpath warnings.warn(message, EmptyDirWarning) if fix: os.rmdir(dirpath) # Check Settings.count against count of Cache rows. self.reset('count') ((count,),) = sql('SELECT COUNT(key) FROM Cache').fetchall() if self.count != count: message = 'Settings.count != COUNT(Cache.key); %d != %d' warnings.warn(message % (self.count, count)) if fix: sql( 'UPDATE Settings SET value = ? WHERE key = ?', (count, 'count'), ) # Check Settings.size against sum of Cache.size column. self.reset('size') select_size = 'SELECT COALESCE(SUM(size), 0) FROM Cache' ((size,),) = sql(select_size).fetchall() if self.size != size: message = 'Settings.size != SUM(Cache.size); %d != %d' warnings.warn(message % (self.size, size)) if fix: sql( 'UPDATE Settings SET value = ? WHERE key =?', (size, 'size'), ) return warns def create_tag_index(self): """Create tag index on cache database. It is better to initialize cache with `tag_index=True` than use this. :raises Timeout: if database timeout occurs """ sql = self._sql sql('CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid)') self.reset('tag_index', 1) def drop_tag_index(self): """Drop tag index on cache database. :raises Timeout: if database timeout occurs """ sql = self._sql sql('DROP INDEX IF EXISTS Cache_tag_rowid') self.reset('tag_index', 0) def evict(self, tag, retry=False): """Remove items with matching `tag` from cache. Removing items is an iterative process. In each iteration, a subset of items is removed. Concurrent writes may occur between iterations. If a :exc:`Timeout` occurs, the first element of the exception's `args` attribute will be the number of items removed before the exception occurred. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param str tag: tag identifying items :param bool retry: retry if database timeout occurs (default False) :return: count of rows removed :raises Timeout: if database timeout occurs """ select = ( 'SELECT rowid, filename FROM Cache' ' WHERE tag = ? AND rowid > ?' ' ORDER BY rowid LIMIT ?' ) args = [tag, 0, 100] return self._select_delete(select, args, arg_index=1, retry=retry) def expire(self, now=None, retry=False): """Remove expired items from cache. Removing items is an iterative process. In each iteration, a subset of items is removed. Concurrent writes may occur between iterations. If a :exc:`Timeout` occurs, the first element of the exception's `args` attribute will be the number of items removed before the exception occurred. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param float now: current time (default None, ``time.time()`` used) :param bool retry: retry if database timeout occurs (default False) :return: count of items removed :raises Timeout: if database timeout occurs """ select = ( 'SELECT rowid, expire_time, filename FROM Cache' ' WHERE ? < expire_time AND expire_time < ?' ' ORDER BY expire_time LIMIT ?' ) args = [0, now or time.time(), 100] return self._select_delete(select, args, row_index=1, retry=retry) def cull(self, retry=False): """Cull items from cache until volume is less than size limit. Removing items is an iterative process. In each iteration, a subset of items is removed. Concurrent writes may occur between iterations. If a :exc:`Timeout` occurs, the first element of the exception's `args` attribute will be the number of items removed before the exception occurred. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param bool retry: retry if database timeout occurs (default False) :return: count of items removed :raises Timeout: if database timeout occurs """ now = time.time() # Remove expired items. count = self.expire(now) # Remove items by policy. select_policy = EVICTION_POLICY[self.eviction_policy]['cull'] if select_policy is None: return 0 select_filename = select_policy.format(fields='filename', now=now) try: while self.volume() > self.size_limit: with self._transact(retry) as (sql, cleanup): rows = sql(select_filename, (10,)).fetchall() if not rows: break count += len(rows) delete = ( 'DELETE FROM Cache WHERE rowid IN (%s)' % select_policy.format(fields='rowid', now=now) ) sql(delete, (10,)) for (filename,) in rows: cleanup(filename) except Timeout: raise Timeout(count) from None return count def clear(self, retry=False): """Remove all items from cache. Removing items is an iterative process. In each iteration, a subset of items is removed. Concurrent writes may occur between iterations. If a :exc:`Timeout` occurs, the first element of the exception's `args` attribute will be the number of items removed before the exception occurred. Raises :exc:`Timeout` error when database timeout occurs and `retry` is `False` (default). :param bool retry: retry if database timeout occurs (default False) :return: count of rows removed :raises Timeout: if database timeout occurs """ select = ( 'SELECT rowid, filename FROM Cache' ' WHERE rowid > ?' ' ORDER BY rowid LIMIT ?' ) args = [0, 100] return self._select_delete(select, args, retry=retry) def _select_delete( self, select, args, row_index=0, arg_index=0, retry=False ): count = 0 delete = 'DELETE FROM Cache WHERE rowid IN (%s)' try: while True: with self._transact(retry) as (sql, cleanup): rows = sql(select, args).fetchall() if not rows: break count += len(rows) sql(delete % ','.join(str(row[0]) for row in rows)) for row in rows: args[arg_index] = row[row_index] cleanup(row[-1]) except Timeout: raise Timeout(count) from None return count def iterkeys(self, reverse=False): """Iterate Cache keys in database sort order. >>> cache = Cache() >>> for key in [4, 1, 3, 0, 2]: ... cache[key] = key >>> list(cache.iterkeys()) [0, 1, 2, 3, 4] >>> list(cache.iterkeys(reverse=True)) [4, 3, 2, 1, 0] :param bool reverse: reverse sort order (default False) :return: iterator of Cache keys """ sql = self._sql limit = 100 _disk_get = self._disk.get if reverse: select = ( 'SELECT key, raw FROM Cache' ' ORDER BY key DESC, raw DESC LIMIT 1' ) iterate = ( 'SELECT key, raw FROM Cache' ' WHERE key = ? AND raw < ? OR key < ?' ' ORDER BY key DESC, raw DESC LIMIT ?' ) else: select = ( 'SELECT key, raw FROM Cache' ' ORDER BY key ASC, raw ASC LIMIT 1' ) iterate = ( 'SELECT key, raw FROM Cache' ' WHERE key = ? AND raw > ? OR key > ?' ' ORDER BY key ASC, raw ASC LIMIT ?' ) row = sql(select).fetchall() if row: ((key, raw),) = row else: return yield _disk_get(key, raw) while True: rows = sql(iterate, (key, raw, key, limit)).fetchall() if not rows: break for key, raw in rows: yield _disk_get(key, raw) def _iter(self, ascending=True): sql = self._sql rows = sql('SELECT MAX(rowid) FROM Cache').fetchall() ((max_rowid,),) = rows yield # Signal ready. if max_rowid is None: return bound = max_rowid + 1 limit = 100 _disk_get = self._disk.get rowid = 0 if ascending else bound select = ( 'SELECT rowid, key, raw FROM Cache' ' WHERE ? < rowid AND rowid < ?' ' ORDER BY rowid %s LIMIT ?' ) % ('ASC' if ascending else 'DESC') while True: if ascending: args = (rowid, bound, limit) else: args = (0, rowid, limit) rows = sql(select, args).fetchall() if not rows: break for rowid, key, raw in rows: yield _disk_get(key, raw) def __iter__(self): """Iterate keys in cache including expired items.""" iterator = self._iter() next(iterator) return iterator def __reversed__(self): """Reverse iterate keys in cache including expired items.""" iterator = self._iter(ascending=False) next(iterator) return iterator def stats(self, enable=True, reset=False): """Return cache statistics hits and misses. :param bool enable: enable collecting statistics (default True) :param bool reset: reset hits and misses to 0 (default False) :return: (hits, misses) """ # pylint: disable=E0203,W0201 result = (self.reset('hits'), self.reset('misses')) if reset: self.reset('hits', 0) self.reset('misses', 0) self.reset('statistics', enable) return result def volume(self): """Return estimated total size of cache on disk. :return: size in bytes """ ((page_count,),) = self._sql('PRAGMA page_count').fetchall() total_size = self._page_size * page_count + self.reset('size') return total_size def close(self): """Close database connection.""" con = getattr(self._local, 'con', None) if con is None: return con.close() try: delattr(self._local, 'con') except AttributeError: pass def __enter__(self): # Create connection in thread. # pylint: disable=unused-variable connection = self._con # noqa return self def __exit__(self, *exception): self.close() def __len__(self): """Count of items in cache including expired items.""" return self.reset('count') def __getstate__(self): return (self.directory, self.timeout, type(self.disk)) def __setstate__(self, state): self.__init__(*state) def reset(self, key, value=ENOVAL, update=True): """Reset `key` and `value` item from Settings table. Use `reset` to update the value of Cache settings correctly. Cache settings are stored in the Settings table of the SQLite database. If `update` is ``False`` then no attempt is made to update the database. If `value` is not given, it is reloaded from the Settings table. Otherwise, the Settings table is updated. Settings with the ``disk_`` prefix correspond to Disk attributes. Updating the value will change the unprefixed attribute on the associated Disk instance. Settings with the ``sqlite_`` prefix correspond to SQLite pragmas. Updating the value will execute the corresponding PRAGMA statement. SQLite PRAGMA statements may be executed before the Settings table exists in the database by setting `update` to ``False``. :param str key: Settings key for item :param value: value for item (optional) :param bool update: update database Settings table (default True) :return: updated value for item :raises Timeout: if database timeout occurs """ sql = self._sql sql_retry = self._sql_retry if value is ENOVAL: select = 'SELECT value FROM Settings WHERE key = ?' ((value,),) = sql_retry(select, (key,)).fetchall() setattr(self, key, value) return value if update: statement = 'UPDATE Settings SET value = ? WHERE key = ?' sql_retry(statement, (value, key)) if key.startswith('sqlite_'): pragma = key[7:] # 2016-02-17 GrantJ - PRAGMA and isolation_level=None # don't always play nicely together. Retry setting the # PRAGMA. I think some PRAGMA statements expect to # immediately take an EXCLUSIVE lock on the database. I # can't find any documentation for this but without the # retry, stress will intermittently fail with multiple # processes. # 2018-11-05 GrantJ - Avoid setting pragma values that # are already set. Pragma settings like auto_vacuum and # journal_mode can take a long time or may not work after # tables have been created. start = time.time() while True: try: try: ((old_value,),) = sql( 'PRAGMA %s' % (pragma) ).fetchall() update = old_value != value except ValueError: update = True if update: sql('PRAGMA %s = %s' % (pragma, value)).fetchall() break except sqlite3.OperationalError as exc: if str(exc) != 'database is locked': raise diff = time.time() - start if diff > 60: raise time.sleep(0.001) elif key.startswith('disk_'): attr = key[5:] setattr(self._disk, attr, value) setattr(self, key, value) return value python-diskcache-5.4.0/diskcache/djangocache.py000066400000000000000000000372311416346170000215030ustar00rootroot00000000000000"""Django-compatible disk and file backed cache.""" from functools import wraps from django.core.cache.backends.base import BaseCache try: from django.core.cache.backends.base import DEFAULT_TIMEOUT except ImportError: # pragma: no cover # For older versions of Django simply use 300 seconds. DEFAULT_TIMEOUT = 300 from .core import ENOVAL, args_to_key, full_name from .fanout import FanoutCache class DjangoCache(BaseCache): """Django-compatible disk and file backed cache.""" def __init__(self, directory, params): """Initialize DjangoCache instance. :param str directory: cache directory :param dict params: cache parameters """ super().__init__(params) shards = params.get('SHARDS', 8) timeout = params.get('DATABASE_TIMEOUT', 0.010) options = params.get('OPTIONS', {}) self._cache = FanoutCache(directory, shards, timeout, **options) @property def directory(self): """Cache directory.""" return self._cache.directory def cache(self, name): """Return Cache with given `name` in subdirectory. :param str name: subdirectory name for Cache :return: Cache with given name """ return self._cache.cache(name) def deque(self, name): """Return Deque with given `name` in subdirectory. :param str name: subdirectory name for Deque :return: Deque with given name """ return self._cache.deque(name) def index(self, name): """Return Index with given `name` in subdirectory. :param str name: subdirectory name for Index :return: Index with given name """ return self._cache.index(name) def add( self, key, value, timeout=DEFAULT_TIMEOUT, version=None, read=False, tag=None, retry=True, ): """Set a value in the cache if the key does not already exist. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. Return True if the value was stored, False otherwise. :param key: key for item :param value: value for item :param float timeout: seconds until the item expires (default 300 seconds) :param int version: key version number (default None, cache parameter) :param bool read: read value as bytes from file (default False) :param str tag: text to associate with key (default None) :param bool retry: retry if database timeout occurs (default True) :return: True if item was added """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) timeout = self.get_backend_timeout(timeout=timeout) return self._cache.add(key, value, timeout, read, tag, retry) def get( self, key, default=None, version=None, read=False, expire_time=False, tag=False, retry=False, ): """Fetch a given key from the cache. If the key does not exist, return default, which itself defaults to None. :param key: key for item :param default: return value if key is missing (default None) :param int version: key version number (default None, cache parameter) :param bool read: if True, return file handle to value (default False) :param float expire_time: if True, return expire_time in tuple (default False) :param tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default False) :return: value for item if key is found else default """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) return self._cache.get(key, default, read, expire_time, tag, retry) def read(self, key, version=None): """Return file handle corresponding to `key` from Cache. :param key: Python key to retrieve :param int version: key version number (default None, cache parameter) :return: file open for reading in binary mode :raises KeyError: if key is not found """ key = self.make_key(key, version=version) return self._cache.read(key) def set( self, key, value, timeout=DEFAULT_TIMEOUT, version=None, read=False, tag=None, retry=True, ): """Set a value in the cache. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. :param key: key for item :param value: value for item :param float timeout: seconds until the item expires (default 300 seconds) :param int version: key version number (default None, cache parameter) :param bool read: read value as bytes from file (default False) :param str tag: text to associate with key (default None) :param bool retry: retry if database timeout occurs (default True) :return: True if item was set """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) timeout = self.get_backend_timeout(timeout=timeout) return self._cache.set(key, value, timeout, read, tag, retry) def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, retry=True): """Touch a key in the cache. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. :param key: key for item :param float timeout: seconds until the item expires (default 300 seconds) :param int version: key version number (default None, cache parameter) :param bool retry: retry if database timeout occurs (default True) :return: True if key was touched """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) timeout = self.get_backend_timeout(timeout=timeout) return self._cache.touch(key, timeout, retry) def pop( self, key, default=None, version=None, expire_time=False, tag=False, retry=True, ): """Remove corresponding item for `key` from cache and return value. If `key` is missing, return `default`. Operation is atomic. Concurrent operations will be serialized. :param key: key for item :param default: return value if key is missing (default None) :param int version: key version number (default None, cache parameter) :param float expire_time: if True, return expire_time in tuple (default False) :param tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default True) :return: value for item if key is found else default """ key = self.make_key(key, version=version) return self._cache.pop(key, default, expire_time, tag, retry) def delete(self, key, version=None, retry=True): """Delete a key from the cache, failing silently. :param key: key for item :param int version: key version number (default None, cache parameter) :param bool retry: retry if database timeout occurs (default True) :return: True if item was deleted """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) return self._cache.delete(key, retry) def incr(self, key, delta=1, version=None, default=None, retry=True): """Increment value by delta for item with key. If key is missing and default is None then raise KeyError. Else if key is missing and default is not None then use default for value. Operation is atomic. All concurrent increment operations will be counted individually. Assumes value may be stored in a SQLite column. Most builds that target machines with 64-bit pointer widths will support 64-bit signed integers. :param key: key for item :param int delta: amount to increment (default 1) :param int version: key version number (default None, cache parameter) :param int default: value if key is missing (default None) :param bool retry: retry if database timeout occurs (default True) :return: new value for item on success else None :raises ValueError: if key is not found and default is None """ # pylint: disable=arguments-differ key = self.make_key(key, version=version) try: return self._cache.incr(key, delta, default, retry) except KeyError: raise ValueError("Key '%s' not found" % key) from None def decr(self, key, delta=1, version=None, default=None, retry=True): """Decrement value by delta for item with key. If key is missing and default is None then raise KeyError. Else if key is missing and default is not None then use default for value. Operation is atomic. All concurrent decrement operations will be counted individually. Unlike Memcached, negative values are supported. Value may be decremented below zero. Assumes value may be stored in a SQLite column. Most builds that target machines with 64-bit pointer widths will support 64-bit signed integers. :param key: key for item :param int delta: amount to decrement (default 1) :param int version: key version number (default None, cache parameter) :param int default: value if key is missing (default None) :param bool retry: retry if database timeout occurs (default True) :return: new value for item on success else None :raises ValueError: if key is not found and default is None """ # pylint: disable=arguments-differ return self.incr(key, -delta, version, default, retry) def has_key(self, key, version=None): """Returns True if the key is in the cache and has not expired. :param key: key for item :param int version: key version number (default None, cache parameter) :return: True if key is found """ key = self.make_key(key, version=version) return key in self._cache def expire(self): """Remove expired items from cache. :return: count of items removed """ return self._cache.expire() def stats(self, enable=True, reset=False): """Return cache statistics hits and misses. :param bool enable: enable collecting statistics (default True) :param bool reset: reset hits and misses to 0 (default False) :return: (hits, misses) """ return self._cache.stats(enable=enable, reset=reset) def create_tag_index(self): """Create tag index on cache database. Better to initialize cache with `tag_index=True` than use this. :raises Timeout: if database timeout occurs """ self._cache.create_tag_index() def drop_tag_index(self): """Drop tag index on cache database. :raises Timeout: if database timeout occurs """ self._cache.drop_tag_index() def evict(self, tag): """Remove items with matching `tag` from cache. :param str tag: tag identifying items :return: count of items removed """ return self._cache.evict(tag) def cull(self): """Cull items from cache until volume is less than size limit. :return: count of items removed """ return self._cache.cull() def clear(self): """Remove *all* values from the cache at once.""" return self._cache.clear() def close(self, **kwargs): """Close the cache connection.""" # pylint: disable=unused-argument self._cache.close() def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): """Return seconds to expiration. :param float timeout: seconds until the item expires (default 300 seconds) """ if timeout == DEFAULT_TIMEOUT: timeout = self.default_timeout elif timeout == 0: # ticket 21147 - avoid time.time() related precision issues timeout = -1 return None if timeout is None else timeout def memoize( self, name=None, timeout=DEFAULT_TIMEOUT, version=None, typed=False, tag=None, ignore=(), ): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. Repeated calls with the same arguments will lookup result in cache and avoid function evaluation. If name is set to None (default), the callable name will be determined automatically. When timeout is set to zero, function results will not be set in the cache. Cache lookups still occur, however. Read :doc:`case-study-landing-page-caching` for example usage. If typed is set to True, function arguments of different types will be cached separately. For example, f(3) and f(3.0) will be treated as distinct calls with distinct results. The original underlying function is accessible through the __wrapped__ attribute. This is useful for introspection, for bypassing the cache, or for rewrapping the function with a different cache. An additional `__cache_key__` attribute can be used to generate the cache key used for the given arguments. Remember to call memoize when decorating a callable. If you forget, then a TypeError will occur. :param str name: name given for callable (default None, automatic) :param float timeout: seconds until the item expires (default 300 seconds) :param int version: key version number (default None, cache parameter) :param bool typed: cache different types separately (default False) :param str tag: text to associate with arguments (default None) :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ # Caution: Nearly identical code exists in Cache.memoize if callable(name): raise TypeError('name cannot be callable') def decorator(func): """Decorator created by memoize() for callable `func`.""" base = (full_name(func),) if name is None else (name,) @wraps(func) def wrapper(*args, **kwargs): """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) result = self.get(key, ENOVAL, version, retry=True) if result is ENOVAL: result = func(*args, **kwargs) valid_timeout = ( timeout is None or timeout == DEFAULT_TIMEOUT or timeout > 0 ) if valid_timeout: self.set( key, result, timeout, version, tag=tag, retry=True, ) return result def __cache_key__(*args, **kwargs): """Make key for cache given function arguments.""" return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper return decorator python-diskcache-5.4.0/diskcache/fanout.py000066400000000000000000000530171416346170000205510ustar00rootroot00000000000000"""Fanout cache automatically shards keys and values.""" import contextlib as cl import functools import itertools as it import operator import os.path as op import sqlite3 import tempfile import time from .core import DEFAULT_SETTINGS, ENOVAL, Cache, Disk, Timeout from .persistent import Deque, Index class FanoutCache: """Cache that shards keys and values.""" def __init__( self, directory=None, shards=8, timeout=0.010, disk=Disk, **settings ): """Initialize cache instance. :param str directory: cache directory :param int shards: number of shards to distribute writes :param float timeout: SQLite connection timeout :param disk: `Disk` instance for serialization :param settings: any of `DEFAULT_SETTINGS` """ if directory is None: directory = tempfile.mkdtemp(prefix='diskcache-') directory = op.expanduser(directory) directory = op.expandvars(directory) default_size_limit = DEFAULT_SETTINGS['size_limit'] size_limit = settings.pop('size_limit', default_size_limit) / shards self._count = shards self._directory = directory self._disk = disk self._shards = tuple( Cache( directory=op.join(directory, '%03d' % num), timeout=timeout, disk=disk, size_limit=size_limit, **settings ) for num in range(shards) ) self._hash = self._shards[0].disk.hash self._caches = {} self._deques = {} self._indexes = {} @property def directory(self): """Cache directory.""" return self._directory def __getattr__(self, name): safe_names = {'timeout', 'disk'} valid_name = name in DEFAULT_SETTINGS or name in safe_names assert valid_name, 'cannot access {} in cache shard'.format(name) return getattr(self._shards[0], name) @cl.contextmanager def transact(self, retry=True): """Context manager to perform a transaction by locking the cache. While the cache is locked, no other write operation is permitted. Transactions should therefore be as short as possible. Read and write operations performed in a transaction are atomic. Read operations may occur concurrent to a transaction. Transactions may be nested and may not be shared between threads. Blocks until transactions are held on all cache shards by retrying as necessary. >>> cache = FanoutCache() >>> with cache.transact(): # Atomically increment two keys. ... _ = cache.incr('total', 123.4) ... _ = cache.incr('count', 1) >>> with cache.transact(): # Atomically calculate average. ... average = cache['total'] / cache['count'] >>> average 123.4 :return: context manager for use in `with` statement """ assert retry, 'retry must be True in FanoutCache' with cl.ExitStack() as stack: for shard in self._shards: shard_transaction = shard.transact(retry=True) stack.enter_context(shard_transaction) yield def set(self, key, value, expire=None, read=False, tag=None, retry=False): """Set `key` and `value` item in cache. When `read` is `True`, `value` should be a file-like object opened for reading in binary mode. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param key: key for item :param value: value for item :param float expire: seconds until the key expires (default None, no expiry) :param bool read: read value as raw bytes from file (default False) :param str tag: text to associate with key (default None) :param bool retry: retry if database timeout occurs (default False) :return: True if item was set """ index = self._hash(key) % self._count shard = self._shards[index] try: return shard.set(key, value, expire, read, tag, retry) except Timeout: return False def __setitem__(self, key, value): """Set `key` and `value` item in cache. Calls :func:`FanoutCache.set` internally with `retry` set to `True`. :param key: key for item :param value: value for item """ index = self._hash(key) % self._count shard = self._shards[index] shard[key] = value def touch(self, key, expire=None, retry=False): """Touch `key` in cache and update `expire` time. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param key: key for item :param float expire: seconds until the key expires (default None, no expiry) :param bool retry: retry if database timeout occurs (default False) :return: True if key was touched """ index = self._hash(key) % self._count shard = self._shards[index] try: return shard.touch(key, expire, retry) except Timeout: return False def add(self, key, value, expire=None, read=False, tag=None, retry=False): """Add `key` and `value` item to cache. Similar to `set`, but only add to cache if key not present. This operation is atomic. Only one concurrent add operation for given key from separate threads or processes will succeed. When `read` is `True`, `value` should be a file-like object opened for reading in binary mode. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param key: key for item :param value: value for item :param float expire: seconds until the key expires (default None, no expiry) :param bool read: read value as bytes from file (default False) :param str tag: text to associate with key (default None) :param bool retry: retry if database timeout occurs (default False) :return: True if item was added """ index = self._hash(key) % self._count shard = self._shards[index] try: return shard.add(key, value, expire, read, tag, retry) except Timeout: return False def incr(self, key, delta=1, default=0, retry=False): """Increment value by delta for item with key. If key is missing and default is None then raise KeyError. Else if key is missing and default is not None then use default for value. Operation is atomic. All concurrent increment operations will be counted individually. Assumes value may be stored in a SQLite column. Most builds that target machines with 64-bit pointer widths will support 64-bit signed integers. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param key: key for item :param int delta: amount to increment (default 1) :param int default: value if key is missing (default 0) :param bool retry: retry if database timeout occurs (default False) :return: new value for item on success else None :raises KeyError: if key is not found and default is None """ index = self._hash(key) % self._count shard = self._shards[index] try: return shard.incr(key, delta, default, retry) except Timeout: return None def decr(self, key, delta=1, default=0, retry=False): """Decrement value by delta for item with key. If key is missing and default is None then raise KeyError. Else if key is missing and default is not None then use default for value. Operation is atomic. All concurrent decrement operations will be counted individually. Unlike Memcached, negative values are supported. Value may be decremented below zero. Assumes value may be stored in a SQLite column. Most builds that target machines with 64-bit pointer widths will support 64-bit signed integers. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param key: key for item :param int delta: amount to decrement (default 1) :param int default: value if key is missing (default 0) :param bool retry: retry if database timeout occurs (default False) :return: new value for item on success else None :raises KeyError: if key is not found and default is None """ index = self._hash(key) % self._count shard = self._shards[index] try: return shard.decr(key, delta, default, retry) except Timeout: return None def get( self, key, default=None, read=False, expire_time=False, tag=False, retry=False, ): """Retrieve value from cache. If `key` is missing, return `default`. If database timeout occurs then returns `default` unless `retry` is set to `True` (default `False`). :param key: key for item :param default: return value if key is missing (default None) :param bool read: if True, return file handle to value (default False) :param float expire_time: if True, return expire_time in tuple (default False) :param tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default False) :return: value for item if key is found else default """ index = self._hash(key) % self._count shard = self._shards[index] try: return shard.get(key, default, read, expire_time, tag, retry) except (Timeout, sqlite3.OperationalError): return default def __getitem__(self, key): """Return corresponding value for `key` from cache. Calls :func:`FanoutCache.get` internally with `retry` set to `True`. :param key: key for item :return: value for item :raises KeyError: if key is not found """ index = self._hash(key) % self._count shard = self._shards[index] return shard[key] def read(self, key): """Return file handle corresponding to `key` from cache. :param key: key for item :return: file open for reading in binary mode :raises KeyError: if key is not found """ handle = self.get(key, default=ENOVAL, read=True, retry=True) if handle is ENOVAL: raise KeyError(key) return handle def __contains__(self, key): """Return `True` if `key` matching item is found in cache. :param key: key for item :return: True if key is found """ index = self._hash(key) % self._count shard = self._shards[index] return key in shard def pop( self, key, default=None, expire_time=False, tag=False, retry=False ): # noqa: E501 """Remove corresponding item for `key` from cache and return value. If `key` is missing, return `default`. Operation is atomic. Concurrent operations will be serialized. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param key: key for item :param default: return value if key is missing (default None) :param float expire_time: if True, return expire_time in tuple (default False) :param tag: if True, return tag in tuple (default False) :param bool retry: retry if database timeout occurs (default False) :return: value for item if key is found else default """ index = self._hash(key) % self._count shard = self._shards[index] try: return shard.pop(key, default, expire_time, tag, retry) except Timeout: return default def delete(self, key, retry=False): """Delete corresponding item for `key` from cache. Missing keys are ignored. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param key: key for item :param bool retry: retry if database timeout occurs (default False) :return: True if item was deleted """ index = self._hash(key) % self._count shard = self._shards[index] try: return shard.delete(key, retry) except Timeout: return False def __delitem__(self, key): """Delete corresponding item for `key` from cache. Calls :func:`FanoutCache.delete` internally with `retry` set to `True`. :param key: key for item :raises KeyError: if key is not found """ index = self._hash(key) % self._count shard = self._shards[index] del shard[key] def check(self, fix=False, retry=False): """Check database and file system consistency. Intended for use in testing and post-mortem error analysis. While checking the cache table for consistency, a writer lock is held on the database. The lock blocks other cache clients from writing to the database. For caches with many file references, the lock may be held for a long time. For example, local benchmarking shows that a cache with 1,000 file references takes ~60ms to check. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param bool fix: correct inconsistencies :param bool retry: retry if database timeout occurs (default False) :return: list of warnings :raises Timeout: if database timeout occurs """ warnings = (shard.check(fix, retry) for shard in self._shards) return functools.reduce(operator.iadd, warnings, []) def expire(self, retry=False): """Remove expired items from cache. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param bool retry: retry if database timeout occurs (default False) :return: count of items removed """ return self._remove('expire', args=(time.time(),), retry=retry) def create_tag_index(self): """Create tag index on cache database. Better to initialize cache with `tag_index=True` than use this. :raises Timeout: if database timeout occurs """ for shard in self._shards: shard.create_tag_index() def drop_tag_index(self): """Drop tag index on cache database. :raises Timeout: if database timeout occurs """ for shard in self._shards: shard.drop_tag_index() def evict(self, tag, retry=False): """Remove items with matching `tag` from cache. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param str tag: tag identifying items :param bool retry: retry if database timeout occurs (default False) :return: count of items removed """ return self._remove('evict', args=(tag,), retry=retry) def cull(self, retry=False): """Cull items from cache until volume is less than size limit. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param bool retry: retry if database timeout occurs (default False) :return: count of items removed """ return self._remove('cull', retry=retry) def clear(self, retry=False): """Remove all items from cache. If database timeout occurs then fails silently unless `retry` is set to `True` (default `False`). :param bool retry: retry if database timeout occurs (default False) :return: count of items removed """ return self._remove('clear', retry=retry) def _remove(self, name, args=(), retry=False): total = 0 for shard in self._shards: method = getattr(shard, name) while True: try: count = method(*args, retry=retry) total += count except Timeout as timeout: total += timeout.args[0] else: break return total def stats(self, enable=True, reset=False): """Return cache statistics hits and misses. :param bool enable: enable collecting statistics (default True) :param bool reset: reset hits and misses to 0 (default False) :return: (hits, misses) """ results = [shard.stats(enable, reset) for shard in self._shards] total_hits = sum(hits for hits, _ in results) total_misses = sum(misses for _, misses in results) return total_hits, total_misses def volume(self): """Return estimated total size of cache on disk. :return: size in bytes """ return sum(shard.volume() for shard in self._shards) def close(self): """Close database connection.""" for shard in self._shards: shard.close() self._caches.clear() self._deques.clear() self._indexes.clear() def __enter__(self): return self def __exit__(self, *exception): self.close() def __getstate__(self): return (self._directory, self._count, self.timeout, type(self.disk)) def __setstate__(self, state): self.__init__(*state) def __iter__(self): """Iterate keys in cache including expired items.""" iterators = (iter(shard) for shard in self._shards) return it.chain.from_iterable(iterators) def __reversed__(self): """Reverse iterate keys in cache including expired items.""" iterators = (reversed(shard) for shard in reversed(self._shards)) return it.chain.from_iterable(iterators) def __len__(self): """Count of items in cache including expired items.""" return sum(len(shard) for shard in self._shards) def reset(self, key, value=ENOVAL): """Reset `key` and `value` item from Settings table. If `value` is not given, it is reloaded from the Settings table. Otherwise, the Settings table is updated. Settings attributes on cache objects are lazy-loaded and read-only. Use `reset` to update the value. Settings with the ``sqlite_`` prefix correspond to SQLite pragmas. Updating the value will execute the corresponding PRAGMA statement. :param str key: Settings key for item :param value: value for item (optional) :return: updated value for item """ for shard in self._shards: while True: try: result = shard.reset(key, value) except Timeout: pass else: break return result def cache(self, name): """Return Cache with given `name` in subdirectory. >>> fanout_cache = FanoutCache() >>> cache = fanout_cache.cache('test') >>> cache.set('abc', 123) True >>> cache.get('abc') 123 >>> len(cache) 1 >>> cache.delete('abc') True :param str name: subdirectory name for Cache :return: Cache with given name """ _caches = self._caches try: return _caches[name] except KeyError: parts = name.split('/') directory = op.join(self._directory, 'cache', *parts) temp = Cache(directory=directory, disk=self._disk) _caches[name] = temp return temp def deque(self, name): """Return Deque with given `name` in subdirectory. >>> cache = FanoutCache() >>> deque = cache.deque('test') >>> deque.extend('abc') >>> deque.popleft() 'a' >>> deque.pop() 'c' >>> len(deque) 1 :param str name: subdirectory name for Deque :return: Deque with given name """ _deques = self._deques try: return _deques[name] except KeyError: parts = name.split('/') directory = op.join(self._directory, 'deque', *parts) cache = Cache(directory=directory, disk=self._disk) deque = Deque.fromcache(cache) _deques[name] = deque return deque def index(self, name): """Return Index with given `name` in subdirectory. >>> cache = FanoutCache() >>> index = cache.index('test') >>> index['abc'] = 123 >>> index['def'] = 456 >>> index['ghi'] = 789 >>> index.popitem() ('ghi', 789) >>> del index['abc'] >>> len(index) 1 >>> index['def'] 456 :param str name: subdirectory name for Index :return: Index with given name """ _indexes = self._indexes try: return _indexes[name] except KeyError: parts = name.split('/') directory = op.join(self._directory, 'index', *parts) cache = Cache(directory=directory, disk=self._disk) index = Index.fromcache(cache) _indexes[name] = index return index FanoutCache.memoize = Cache.memoize # type: ignore python-diskcache-5.4.0/diskcache/persistent.py000066400000000000000000001006071416346170000214530ustar00rootroot00000000000000"""Persistent Data Types """ import operator as op from collections import OrderedDict from collections.abc import ( ItemsView, KeysView, MutableMapping, Sequence, ValuesView, ) from contextlib import contextmanager from shutil import rmtree from .core import ENOVAL, Cache def _make_compare(seq_op, doc): """Make compare method with Sequence semantics.""" def compare(self, that): """Compare method for deque and sequence.""" if not isinstance(that, Sequence): return NotImplemented len_self = len(self) len_that = len(that) if len_self != len_that: if seq_op is op.eq: return False if seq_op is op.ne: return True for alpha, beta in zip(self, that): if alpha != beta: return seq_op(alpha, beta) return seq_op(len_self, len_that) compare.__name__ = '__{0}__'.format(seq_op.__name__) doc_str = 'Return True if and only if deque is {0} `that`.' compare.__doc__ = doc_str.format(doc) return compare class Deque(Sequence): """Persistent sequence with double-ended queue semantics. Double-ended queue is an ordered collection with optimized access at its endpoints. Items are serialized to disk. Deque may be initialized from directory path where items are stored. >>> deque = Deque() >>> deque += range(5) >>> list(deque) [0, 1, 2, 3, 4] >>> for value in range(5): ... deque.appendleft(-value) >>> len(deque) 10 >>> list(deque) [-4, -3, -2, -1, 0, 0, 1, 2, 3, 4] >>> deque.pop() 4 >>> deque.popleft() -4 >>> deque.reverse() >>> list(deque) [3, 2, 1, 0, 0, -1, -2, -3] """ def __init__(self, iterable=(), directory=None): """Initialize deque instance. If directory is None then temporary directory created. The directory will *not* be automatically removed. :param iterable: iterable of items to append to deque :param directory: deque directory (default None) """ self._cache = Cache(directory, eviction_policy='none') self.extend(iterable) @classmethod def fromcache(cls, cache, iterable=()): """Initialize deque using `cache`. >>> cache = Cache() >>> deque = Deque.fromcache(cache, [5, 6, 7, 8]) >>> deque.cache is cache True >>> len(deque) 4 >>> 7 in deque True >>> deque.popleft() 5 :param Cache cache: cache to use :param iterable: iterable of items :return: initialized Deque """ # pylint: disable=no-member,protected-access self = cls.__new__(cls) self._cache = cache self.extend(iterable) return self @property def cache(self): """Cache used by deque.""" return self._cache @property def directory(self): """Directory path where deque is stored.""" return self._cache.directory def _index(self, index, func): len_self = len(self) if index >= 0: if index >= len_self: raise IndexError('deque index out of range') for key in self._cache.iterkeys(): if index == 0: try: return func(key) except KeyError: continue index -= 1 else: if index < -len_self: raise IndexError('deque index out of range') index += 1 for key in self._cache.iterkeys(reverse=True): if index == 0: try: return func(key) except KeyError: continue index += 1 raise IndexError('deque index out of range') def __getitem__(self, index): """deque.__getitem__(index) <==> deque[index] Return corresponding item for `index` in deque. See also `Deque.peekleft` and `Deque.peek` for indexing deque at index ``0`` or ``-1``. >>> deque = Deque() >>> deque.extend('abcde') >>> deque[1] 'b' >>> deque[-2] 'd' :param int index: index of item :return: corresponding item :raises IndexError: if index out of range """ return self._index(index, self._cache.__getitem__) def __setitem__(self, index, value): """deque.__setitem__(index, value) <==> deque[index] = value Store `value` in deque at `index`. >>> deque = Deque() >>> deque.extend([None] * 3) >>> deque[0] = 'a' >>> deque[1] = 'b' >>> deque[-1] = 'c' >>> ''.join(deque) 'abc' :param int index: index of value :param value: value to store :raises IndexError: if index out of range """ def _set_value(key): return self._cache.__setitem__(key, value) self._index(index, _set_value) def __delitem__(self, index): """deque.__delitem__(index) <==> del deque[index] Delete item in deque at `index`. >>> deque = Deque() >>> deque.extend([None] * 3) >>> del deque[0] >>> del deque[1] >>> del deque[-1] >>> len(deque) 0 :param int index: index of item :raises IndexError: if index out of range """ self._index(index, self._cache.__delitem__) def __repr__(self): """deque.__repr__() <==> repr(deque) Return string with printable representation of deque. """ name = type(self).__name__ return '{0}(directory={1!r})'.format(name, self.directory) __eq__ = _make_compare(op.eq, 'equal to') __ne__ = _make_compare(op.ne, 'not equal to') __lt__ = _make_compare(op.lt, 'less than') __gt__ = _make_compare(op.gt, 'greater than') __le__ = _make_compare(op.le, 'less than or equal to') __ge__ = _make_compare(op.ge, 'greater than or equal to') def __iadd__(self, iterable): """deque.__iadd__(iterable) <==> deque += iterable Extend back side of deque with items from iterable. :param iterable: iterable of items to append to deque :return: deque with added items """ self.extend(iterable) return self def __iter__(self): """deque.__iter__() <==> iter(deque) Return iterator of deque from front to back. """ _cache = self._cache for key in _cache.iterkeys(): try: yield _cache[key] except KeyError: pass def __len__(self): """deque.__len__() <==> len(deque) Return length of deque. """ return len(self._cache) def __reversed__(self): """deque.__reversed__() <==> reversed(deque) Return iterator of deque from back to front. >>> deque = Deque() >>> deque.extend('abcd') >>> iterator = reversed(deque) >>> next(iterator) 'd' >>> list(iterator) ['c', 'b', 'a'] """ _cache = self._cache for key in _cache.iterkeys(reverse=True): try: yield _cache[key] except KeyError: pass def __getstate__(self): return self.directory def __setstate__(self, state): self.__init__(directory=state) def append(self, value): """Add `value` to back of deque. >>> deque = Deque() >>> deque.append('a') >>> deque.append('b') >>> deque.append('c') >>> list(deque) ['a', 'b', 'c'] :param value: value to add to back of deque """ self._cache.push(value, retry=True) def appendleft(self, value): """Add `value` to front of deque. >>> deque = Deque() >>> deque.appendleft('a') >>> deque.appendleft('b') >>> deque.appendleft('c') >>> list(deque) ['c', 'b', 'a'] :param value: value to add to front of deque """ self._cache.push(value, side='front', retry=True) def clear(self): """Remove all elements from deque. >>> deque = Deque('abc') >>> len(deque) 3 >>> deque.clear() >>> list(deque) [] """ self._cache.clear(retry=True) def count(self, value): """Return number of occurrences of `value` in deque. >>> deque = Deque() >>> deque += [num for num in range(1, 5) for _ in range(num)] >>> deque.count(0) 0 >>> deque.count(1) 1 >>> deque.count(4) 4 :param value: value to count in deque :return: count of items equal to value in deque """ return sum(1 for item in self if value == item) def extend(self, iterable): """Extend back side of deque with values from `iterable`. :param iterable: iterable of values """ for value in iterable: self.append(value) def extendleft(self, iterable): """Extend front side of deque with value from `iterable`. >>> deque = Deque() >>> deque.extendleft('abc') >>> list(deque) ['c', 'b', 'a'] :param iterable: iterable of values """ for value in iterable: self.appendleft(value) def peek(self): """Peek at value at back of deque. Faster than indexing deque at -1. If deque is empty then raise IndexError. >>> deque = Deque() >>> deque.peek() Traceback (most recent call last): ... IndexError: peek from an empty deque >>> deque += 'abc' >>> deque.peek() 'c' :return: value at back of deque :raises IndexError: if deque is empty """ default = None, ENOVAL _, value = self._cache.peek(default=default, side='back', retry=True) if value is ENOVAL: raise IndexError('peek from an empty deque') return value def peekleft(self): """Peek at value at front of deque. Faster than indexing deque at 0. If deque is empty then raise IndexError. >>> deque = Deque() >>> deque.peekleft() Traceback (most recent call last): ... IndexError: peek from an empty deque >>> deque += 'abc' >>> deque.peekleft() 'a' :return: value at front of deque :raises IndexError: if deque is empty """ default = None, ENOVAL _, value = self._cache.peek(default=default, side='front', retry=True) if value is ENOVAL: raise IndexError('peek from an empty deque') return value def pop(self): """Remove and return value at back of deque. If deque is empty then raise IndexError. >>> deque = Deque() >>> deque += 'ab' >>> deque.pop() 'b' >>> deque.pop() 'a' >>> deque.pop() Traceback (most recent call last): ... IndexError: pop from an empty deque :return: value at back of deque :raises IndexError: if deque is empty """ default = None, ENOVAL _, value = self._cache.pull(default=default, side='back', retry=True) if value is ENOVAL: raise IndexError('pop from an empty deque') return value def popleft(self): """Remove and return value at front of deque. >>> deque = Deque() >>> deque += 'ab' >>> deque.popleft() 'a' >>> deque.popleft() 'b' >>> deque.popleft() Traceback (most recent call last): ... IndexError: pop from an empty deque :return: value at front of deque :raises IndexError: if deque is empty """ default = None, ENOVAL _, value = self._cache.pull(default=default, retry=True) if value is ENOVAL: raise IndexError('pop from an empty deque') return value def remove(self, value): """Remove first occurrence of `value` in deque. >>> deque = Deque() >>> deque += 'aab' >>> deque.remove('a') >>> list(deque) ['a', 'b'] >>> deque.remove('b') >>> list(deque) ['a'] >>> deque.remove('c') Traceback (most recent call last): ... ValueError: deque.remove(value): value not in deque :param value: value to remove :raises ValueError: if value not in deque """ _cache = self._cache for key in _cache.iterkeys(): try: item = _cache[key] except KeyError: continue else: if value == item: try: del _cache[key] except KeyError: continue return raise ValueError('deque.remove(value): value not in deque') def reverse(self): """Reverse deque in place. >>> deque = Deque() >>> deque += 'abc' >>> deque.reverse() >>> list(deque) ['c', 'b', 'a'] """ # GrantJ 2019-03-22 Consider using an algorithm that swaps the values # at two keys. Like self._cache.swap(key1, key2, retry=True) The swap # method would exchange the values at two given keys. Then, using a # forward iterator and a reverse iterator, the reversis method could # avoid making copies of the values. temp = Deque(iterable=reversed(self)) self.clear() self.extend(temp) directory = temp.directory del temp rmtree(directory) def rotate(self, steps=1): """Rotate deque right by `steps`. If steps is negative then rotate left. >>> deque = Deque() >>> deque += range(5) >>> deque.rotate(2) >>> list(deque) [3, 4, 0, 1, 2] >>> deque.rotate(-1) >>> list(deque) [4, 0, 1, 2, 3] :param int steps: number of steps to rotate (default 1) """ if not isinstance(steps, int): type_name = type(steps).__name__ raise TypeError('integer argument expected, got %s' % type_name) len_self = len(self) if not len_self: return if steps >= 0: steps %= len_self for _ in range(steps): try: value = self.pop() except IndexError: return else: self.appendleft(value) else: steps *= -1 steps %= len_self for _ in range(steps): try: value = self.popleft() except IndexError: return else: self.append(value) __hash__ = None # type: ignore @contextmanager def transact(self): """Context manager to perform a transaction by locking the deque. While the deque is locked, no other write operation is permitted. Transactions should therefore be as short as possible. Read and write operations performed in a transaction are atomic. Read operations may occur concurrent to a transaction. Transactions may be nested and may not be shared between threads. >>> from diskcache import Deque >>> deque = Deque() >>> deque += range(5) >>> with deque.transact(): # Atomically rotate elements. ... value = deque.pop() ... deque.appendleft(value) >>> list(deque) [4, 0, 1, 2, 3] :return: context manager for use in `with` statement """ with self._cache.transact(retry=True): yield class Index(MutableMapping): """Persistent mutable mapping with insertion order iteration. Items are serialized to disk. Index may be initialized from directory path where items are stored. Hashing protocol is not used. Keys are looked up by their serialized format. See ``diskcache.Disk`` for details. >>> index = Index() >>> index.update([('a', 1), ('b', 2), ('c', 3)]) >>> index['a'] 1 >>> list(index) ['a', 'b', 'c'] >>> len(index) 3 >>> del index['b'] >>> index.popitem() ('c', 3) """ def __init__(self, *args, **kwargs): """Initialize index in directory and update items. Optional first argument may be string specifying directory where items are stored. When None or not given, temporary directory is created. >>> index = Index({'a': 1, 'b': 2, 'c': 3}) >>> len(index) 3 >>> directory = index.directory >>> inventory = Index(directory, d=4) >>> inventory['b'] 2 >>> len(inventory) 4 """ if args and isinstance(args[0], (bytes, str)): directory = args[0] args = args[1:] else: if args and args[0] is None: args = args[1:] directory = None self._cache = Cache(directory, eviction_policy='none') self.update(*args, **kwargs) @classmethod def fromcache(cls, cache, *args, **kwargs): """Initialize index using `cache` and update items. >>> cache = Cache() >>> index = Index.fromcache(cache, {'a': 1, 'b': 2, 'c': 3}) >>> index.cache is cache True >>> len(index) 3 >>> 'b' in index True >>> index['c'] 3 :param Cache cache: cache to use :param args: mapping or sequence of items :param kwargs: mapping of items :return: initialized Index """ # pylint: disable=no-member,protected-access self = cls.__new__(cls) self._cache = cache self.update(*args, **kwargs) return self @property def cache(self): """Cache used by index.""" return self._cache @property def directory(self): """Directory path where items are stored.""" return self._cache.directory def __getitem__(self, key): """index.__getitem__(key) <==> index[key] Return corresponding value for `key` in index. >>> index = Index() >>> index.update({'a': 1, 'b': 2}) >>> index['a'] 1 >>> index['b'] 2 >>> index['c'] Traceback (most recent call last): ... KeyError: 'c' :param key: key for item :return: value for item in index with given key :raises KeyError: if key is not found """ return self._cache[key] def __setitem__(self, key, value): """index.__setitem__(key, value) <==> index[key] = value Set `key` and `value` item in index. >>> index = Index() >>> index['a'] = 1 >>> index[0] = None >>> len(index) 2 :param key: key for item :param value: value for item """ self._cache[key] = value def __delitem__(self, key): """index.__delitem__(key) <==> del index[key] Delete corresponding item for `key` from index. >>> index = Index() >>> index.update({'a': 1, 'b': 2}) >>> del index['a'] >>> del index['b'] >>> len(index) 0 >>> del index['c'] Traceback (most recent call last): ... KeyError: 'c' :param key: key for item :raises KeyError: if key is not found """ del self._cache[key] def setdefault(self, key, default=None): """Set and get value for `key` in index using `default`. If `key` is not in index then set corresponding value to `default`. If `key` is in index then ignore `default` and return existing value. >>> index = Index() >>> index.setdefault('a', 0) 0 >>> index.setdefault('a', 1) 0 :param key: key for item :param default: value if key is missing (default None) :return: value for item in index with given key """ _cache = self._cache while True: try: return _cache[key] except KeyError: _cache.add(key, default, retry=True) def peekitem(self, last=True): """Peek at key and value item pair in index based on iteration order. >>> index = Index() >>> for num, letter in enumerate('xyz'): ... index[letter] = num >>> index.peekitem() ('z', 2) >>> index.peekitem(last=False) ('x', 0) :param bool last: last item in iteration order (default True) :return: key and value item pair :raises KeyError: if cache is empty """ return self._cache.peekitem(last, retry=True) def pop(self, key, default=ENOVAL): """Remove corresponding item for `key` from index and return value. If `key` is missing then return `default`. If `default` is `ENOVAL` then raise KeyError. >>> index = Index({'a': 1, 'b': 2}) >>> index.pop('a') 1 >>> index.pop('b') 2 >>> index.pop('c', default=3) 3 >>> index.pop('d') Traceback (most recent call last): ... KeyError: 'd' :param key: key for item :param default: return value if key is missing (default ENOVAL) :return: value for item if key is found else default :raises KeyError: if key is not found and default is ENOVAL """ _cache = self._cache value = _cache.pop(key, default=default, retry=True) if value is ENOVAL: raise KeyError(key) return value def popitem(self, last=True): """Remove and return item pair. Item pairs are returned in last-in-first-out (LIFO) order if last is True else first-in-first-out (FIFO) order. LIFO order imitates a stack and FIFO order imitates a queue. >>> index = Index() >>> index.update([('a', 1), ('b', 2), ('c', 3)]) >>> index.popitem() ('c', 3) >>> index.popitem(last=False) ('a', 1) >>> index.popitem() ('b', 2) >>> index.popitem() Traceback (most recent call last): ... KeyError: 'dictionary is empty' :param bool last: pop last item pair (default True) :return: key and value item pair :raises KeyError: if index is empty """ # pylint: disable=arguments-differ,unbalanced-tuple-unpacking _cache = self._cache with _cache.transact(retry=True): key, value = _cache.peekitem(last=last) del _cache[key] return key, value def push(self, value, prefix=None, side='back'): """Push `value` onto `side` of queue in index identified by `prefix`. When prefix is None, integer keys are used. Otherwise, string keys are used in the format "prefix-integer". Integer starts at 500 trillion. Defaults to pushing value on back of queue. Set side to 'front' to push value on front of queue. Side must be one of 'back' or 'front'. See also `Index.pull`. >>> index = Index() >>> print(index.push('apples')) 500000000000000 >>> print(index.push('beans')) 500000000000001 >>> print(index.push('cherries', side='front')) 499999999999999 >>> index[500000000000001] 'beans' >>> index.push('dates', prefix='fruit') 'fruit-500000000000000' :param value: value for item :param str prefix: key prefix (default None, key is integer) :param str side: either 'back' or 'front' (default 'back') :return: key for item in cache """ return self._cache.push(value, prefix, side, retry=True) def pull(self, prefix=None, default=(None, None), side='front'): """Pull key and value item pair from `side` of queue in index. When prefix is None, integer keys are used. Otherwise, string keys are used in the format "prefix-integer". Integer starts at 500 trillion. If queue is empty, return default. Defaults to pulling key and value item pairs from front of queue. Set side to 'back' to pull from back of queue. Side must be one of 'front' or 'back'. See also `Index.push`. >>> index = Index() >>> for letter in 'abc': ... print(index.push(letter)) 500000000000000 500000000000001 500000000000002 >>> key, value = index.pull() >>> print(key) 500000000000000 >>> value 'a' >>> _, value = index.pull(side='back') >>> value 'c' >>> index.pull(prefix='fruit') (None, None) :param str prefix: key prefix (default None, key is integer) :param default: value to return if key is missing (default (None, None)) :param str side: either 'front' or 'back' (default 'front') :return: key and value item pair or default if queue is empty """ return self._cache.pull(prefix, default, side, retry=True) def clear(self): """Remove all items from index. >>> index = Index({'a': 0, 'b': 1, 'c': 2}) >>> len(index) 3 >>> index.clear() >>> dict(index) {} """ self._cache.clear(retry=True) def __iter__(self): """index.__iter__() <==> iter(index) Return iterator of index keys in insertion order. """ return iter(self._cache) def __reversed__(self): """index.__reversed__() <==> reversed(index) Return iterator of index keys in reversed insertion order. >>> index = Index() >>> index.update([('a', 1), ('b', 2), ('c', 3)]) >>> iterator = reversed(index) >>> next(iterator) 'c' >>> list(iterator) ['b', 'a'] """ return reversed(self._cache) def __len__(self): """index.__len__() <==> len(index) Return length of index. """ return len(self._cache) def keys(self): """Set-like object providing a view of index keys. >>> index = Index() >>> index.update({'a': 1, 'b': 2, 'c': 3}) >>> keys_view = index.keys() >>> 'b' in keys_view True :return: keys view """ return KeysView(self) def values(self): """Set-like object providing a view of index values. >>> index = Index() >>> index.update({'a': 1, 'b': 2, 'c': 3}) >>> values_view = index.values() >>> 2 in values_view True :return: values view """ return ValuesView(self) def items(self): """Set-like object providing a view of index items. >>> index = Index() >>> index.update({'a': 1, 'b': 2, 'c': 3}) >>> items_view = index.items() >>> ('b', 2) in items_view True :return: items view """ return ItemsView(self) __hash__ = None # type: ignore def __getstate__(self): return self.directory def __setstate__(self, state): self.__init__(state) def __eq__(self, other): """index.__eq__(other) <==> index == other Compare equality for index and `other`. Comparison to another index or ordered dictionary is order-sensitive. Comparison to all other mappings is order-insensitive. >>> index = Index() >>> pairs = [('a', 1), ('b', 2), ('c', 3)] >>> index.update(pairs) >>> from collections import OrderedDict >>> od = OrderedDict(pairs) >>> index == od True >>> index == {'c': 3, 'b': 2, 'a': 1} True :param other: other mapping in equality comparison :return: True if index equals other """ if len(self) != len(other): return False if isinstance(other, (Index, OrderedDict)): alpha = ((key, self[key]) for key in self) beta = ((key, other[key]) for key in other) pairs = zip(alpha, beta) return not any(a != x or b != y for (a, b), (x, y) in pairs) else: return all(self[key] == other.get(key, ENOVAL) for key in self) def __ne__(self, other): """index.__ne__(other) <==> index != other Compare inequality for index and `other`. Comparison to another index or ordered dictionary is order-sensitive. Comparison to all other mappings is order-insensitive. >>> index = Index() >>> index.update([('a', 1), ('b', 2), ('c', 3)]) >>> from collections import OrderedDict >>> od = OrderedDict([('c', 3), ('b', 2), ('a', 1)]) >>> index != od True >>> index != {'a': 1, 'b': 2} True :param other: other mapping in inequality comparison :return: True if index does not equal other """ return not self == other def memoize(self, name=None, typed=False, ignore=()): """Memoizing cache decorator. Decorator to wrap callable with memoizing function using cache. Repeated calls with the same arguments will lookup result in cache and avoid function evaluation. If name is set to None (default), the callable name will be determined automatically. If typed is set to True, function arguments of different types will be cached separately. For example, f(3) and f(3.0) will be treated as distinct calls with distinct results. The original underlying function is accessible through the __wrapped__ attribute. This is useful for introspection, for bypassing the cache, or for rewrapping the function with a different cache. >>> from diskcache import Index >>> mapping = Index() >>> @mapping.memoize() ... def fibonacci(number): ... if number == 0: ... return 0 ... elif number == 1: ... return 1 ... else: ... return fibonacci(number - 1) + fibonacci(number - 2) >>> print(fibonacci(100)) 354224848179261915075 An additional `__cache_key__` attribute can be used to generate the cache key used for the given arguments. >>> key = fibonacci.__cache_key__(100) >>> print(mapping[key]) 354224848179261915075 Remember to call memoize when decorating a callable. If you forget, then a TypeError will occur. Note the lack of parenthenses after memoize below: >>> @mapping.memoize ... def test(): ... pass Traceback (most recent call last): ... TypeError: name cannot be callable :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ return self._cache.memoize(name, typed, ignore=ignore) @contextmanager def transact(self): """Context manager to perform a transaction by locking the index. While the index is locked, no other write operation is permitted. Transactions should therefore be as short as possible. Read and write operations performed in a transaction are atomic. Read operations may occur concurrent to a transaction. Transactions may be nested and may not be shared between threads. >>> from diskcache import Index >>> mapping = Index() >>> with mapping.transact(): # Atomically increment two keys. ... mapping['total'] = mapping.get('total', 0) + 123.4 ... mapping['count'] = mapping.get('count', 0) + 1 >>> with mapping.transact(): # Atomically calculate average. ... average = mapping['total'] / mapping['count'] >>> average 123.4 :return: context manager for use in `with` statement """ with self._cache.transact(retry=True): yield def __repr__(self): """index.__repr__() <==> repr(index) Return string with printable representation of index. """ name = type(self).__name__ return '{0}({1!r})'.format(name, self.directory) python-diskcache-5.4.0/diskcache/recipes.py000066400000000000000000000335001416346170000207020ustar00rootroot00000000000000"""Disk Cache Recipes """ import functools import math import os import random import threading import time from .core import ENOVAL, args_to_key, full_name class Averager: """Recipe for calculating a running average. Sometimes known as "online statistics," the running average maintains the total and count. The average can then be calculated at any time. >>> import diskcache >>> cache = diskcache.FanoutCache() >>> ave = Averager(cache, 'latency') >>> ave.add(0.080) >>> ave.add(0.120) >>> ave.get() 0.1 >>> ave.add(0.160) >>> ave.pop() 0.12 >>> print(ave.get()) None """ def __init__(self, cache, key, expire=None, tag=None): self._cache = cache self._key = key self._expire = expire self._tag = tag def add(self, value): """Add `value` to average.""" with self._cache.transact(retry=True): total, count = self._cache.get(self._key, default=(0.0, 0)) total += value count += 1 self._cache.set( self._key, (total, count), expire=self._expire, tag=self._tag, ) def get(self): """Get current average or return `None` if count equals zero.""" total, count = self._cache.get(self._key, default=(0.0, 0), retry=True) return None if count == 0 else total / count def pop(self): """Return current average and delete key.""" total, count = self._cache.pop(self._key, default=(0.0, 0), retry=True) return None if count == 0 else total / count class Lock: """Recipe for cross-process and cross-thread lock. >>> import diskcache >>> cache = diskcache.Cache() >>> lock = Lock(cache, 'report-123') >>> lock.acquire() >>> lock.release() >>> with lock: ... pass """ def __init__(self, cache, key, expire=None, tag=None): self._cache = cache self._key = key self._expire = expire self._tag = tag def acquire(self): """Acquire lock using spin-lock algorithm.""" while True: added = self._cache.add( self._key, None, expire=self._expire, tag=self._tag, retry=True, ) if added: break time.sleep(0.001) def release(self): """Release lock by deleting key.""" self._cache.delete(self._key, retry=True) def locked(self): """Return true if the lock is acquired.""" return self._key in self._cache def __enter__(self): self.acquire() def __exit__(self, *exc_info): self.release() class RLock: """Recipe for cross-process and cross-thread re-entrant lock. >>> import diskcache >>> cache = diskcache.Cache() >>> rlock = RLock(cache, 'user-123') >>> rlock.acquire() >>> rlock.acquire() >>> rlock.release() >>> with rlock: ... pass >>> rlock.release() >>> rlock.release() Traceback (most recent call last): ... AssertionError: cannot release un-acquired lock """ def __init__(self, cache, key, expire=None, tag=None): self._cache = cache self._key = key self._expire = expire self._tag = tag def acquire(self): """Acquire lock by incrementing count using spin-lock algorithm.""" pid = os.getpid() tid = threading.get_ident() pid_tid = '{}-{}'.format(pid, tid) while True: with self._cache.transact(retry=True): value, count = self._cache.get(self._key, default=(None, 0)) if pid_tid == value or count == 0: self._cache.set( self._key, (pid_tid, count + 1), expire=self._expire, tag=self._tag, ) return time.sleep(0.001) def release(self): """Release lock by decrementing count.""" pid = os.getpid() tid = threading.get_ident() pid_tid = '{}-{}'.format(pid, tid) with self._cache.transact(retry=True): value, count = self._cache.get(self._key, default=(None, 0)) is_owned = pid_tid == value and count > 0 assert is_owned, 'cannot release un-acquired lock' self._cache.set( self._key, (value, count - 1), expire=self._expire, tag=self._tag, ) def __enter__(self): self.acquire() def __exit__(self, *exc_info): self.release() class BoundedSemaphore: """Recipe for cross-process and cross-thread bounded semaphore. >>> import diskcache >>> cache = diskcache.Cache() >>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2) >>> semaphore.acquire() >>> semaphore.acquire() >>> semaphore.release() >>> with semaphore: ... pass >>> semaphore.release() >>> semaphore.release() Traceback (most recent call last): ... AssertionError: cannot release un-acquired semaphore """ def __init__(self, cache, key, value=1, expire=None, tag=None): self._cache = cache self._key = key self._value = value self._expire = expire self._tag = tag def acquire(self): """Acquire semaphore by decrementing value using spin-lock algorithm.""" while True: with self._cache.transact(retry=True): value = self._cache.get(self._key, default=self._value) if value > 0: self._cache.set( self._key, value - 1, expire=self._expire, tag=self._tag, ) return time.sleep(0.001) def release(self): """Release semaphore by incrementing value.""" with self._cache.transact(retry=True): value = self._cache.get(self._key, default=self._value) assert self._value > value, 'cannot release un-acquired semaphore' value += 1 self._cache.set( self._key, value, expire=self._expire, tag=self._tag, ) def __enter__(self): self.acquire() def __exit__(self, *exc_info): self.release() def throttle( cache, count, seconds, name=None, expire=None, tag=None, time_func=time.time, sleep_func=time.sleep, ): """Decorator to throttle calls to function. >>> import diskcache, time >>> cache = diskcache.Cache() >>> count = 0 >>> @throttle(cache, 2, 1) # 2 calls per 1 second ... def increment(): ... global count ... count += 1 >>> start = time.time() >>> while (time.time() - start) <= 2: ... increment() >>> count in (6, 7) # 6 or 7 calls depending on CPU load True """ def decorator(func): rate = count / float(seconds) key = full_name(func) if name is None else name now = time_func() cache.set(key, (now, count), expire=expire, tag=tag, retry=True) @functools.wraps(func) def wrapper(*args, **kwargs): while True: with cache.transact(retry=True): last, tally = cache.get(key) now = time_func() tally += (now - last) * rate delay = 0 if tally > count: cache.set(key, (now, count - 1), expire) elif tally >= 1: cache.set(key, (now, tally - 1), expire) else: delay = (1 - tally) / rate if delay: sleep_func(delay) else: break return func(*args, **kwargs) return wrapper return decorator def barrier(cache, lock_factory, name=None, expire=None, tag=None): """Barrier to calling decorated function. Supports different kinds of locks: Lock, RLock, BoundedSemaphore. >>> import diskcache, time >>> cache = diskcache.Cache() >>> @barrier(cache, Lock) ... def work(num): ... print('worker started') ... time.sleep(1) ... print('worker finished') >>> import multiprocessing.pool >>> pool = multiprocessing.pool.ThreadPool(2) >>> _ = pool.map(work, range(2)) worker started worker finished worker started worker finished >>> pool.terminate() """ def decorator(func): key = full_name(func) if name is None else name lock = lock_factory(cache, key, expire=expire, tag=tag) @functools.wraps(func) def wrapper(*args, **kwargs): with lock: return func(*args, **kwargs) return wrapper return decorator def memoize_stampede( cache, expire, name=None, typed=False, tag=None, beta=1, ignore=() ): """Memoizing cache decorator with cache stampede protection. Cache stampedes are a type of system overload that can occur when parallel computing systems using memoization come under heavy load. This behaviour is sometimes also called dog-piling, cache miss storm, cache choking, or the thundering herd problem. The memoization decorator implements cache stampede protection through early recomputation. Early recomputation of function results will occur probabilistically before expiration in a background thread of execution. Early probabilistic recomputation is based on research by Vattani, A.; Chierichetti, F.; Lowenstein, K. (2015), Optimal Probabilistic Cache Stampede Prevention, VLDB, pp. 886-897, ISSN 2150-8097 If name is set to None (default), the callable name will be determined automatically. If typed is set to True, function arguments of different types will be cached separately. For example, f(3) and f(3.0) will be treated as distinct calls with distinct results. The original underlying function is accessible through the `__wrapped__` attribute. This is useful for introspection, for bypassing the cache, or for rewrapping the function with a different cache. >>> from diskcache import Cache >>> cache = Cache() >>> @memoize_stampede(cache, expire=1) ... def fib(number): ... if number == 0: ... return 0 ... elif number == 1: ... return 1 ... else: ... return fib(number - 1) + fib(number - 2) >>> print(fib(100)) 354224848179261915075 An additional `__cache_key__` attribute can be used to generate the cache key used for the given arguments. >>> key = fib.__cache_key__(100) >>> del cache[key] Remember to call memoize when decorating a callable. If you forget, then a TypeError will occur. :param cache: cache to store callable arguments and return values :param float expire: seconds until arguments expire :param str name: name given for callable (default None, automatic) :param bool typed: cache different types separately (default False) :param str tag: text to associate with arguments (default None) :param set ignore: positional or keyword args to ignore (default ()) :return: callable decorator """ # Caution: Nearly identical code exists in Cache.memoize def decorator(func): """Decorator created by memoize call for callable.""" base = (full_name(func),) if name is None else (name,) def timer(*args, **kwargs): """Time execution of `func` and return result and time delta.""" start = time.time() result = func(*args, **kwargs) delta = time.time() - start return result, delta @functools.wraps(func) def wrapper(*args, **kwargs): """Wrapper for callable to cache arguments and return values.""" key = wrapper.__cache_key__(*args, **kwargs) pair, expire_time = cache.get( key, default=ENOVAL, expire_time=True, retry=True, ) if pair is not ENOVAL: result, delta = pair now = time.time() ttl = expire_time - now if (-delta * beta * math.log(random.random())) < ttl: return result # Cache hit. # Check whether a thread has started for early recomputation. thread_key = key + (ENOVAL,) thread_added = cache.add( thread_key, None, expire=delta, retry=True, ) if thread_added: # Start thread for early recomputation. def recompute(): with cache: pair = timer(*args, **kwargs) cache.set( key, pair, expire=expire, tag=tag, retry=True, ) thread = threading.Thread(target=recompute) thread.daemon = True thread.start() return result pair = timer(*args, **kwargs) cache.set(key, pair, expire=expire, tag=tag, retry=True) return pair[0] def __cache_key__(*args, **kwargs): """Make key for cache given function arguments.""" return args_to_key(base, args, kwargs, typed, ignore) wrapper.__cache_key__ = __cache_key__ return wrapper return decorator python-diskcache-5.4.0/docs/000077500000000000000000000000001416346170000157075ustar00rootroot00000000000000python-diskcache-5.4.0/docs/Makefile000066400000000000000000000011721416346170000173500ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) python-diskcache-5.4.0/docs/_static/000077500000000000000000000000001416346170000173355ustar00rootroot00000000000000python-diskcache-5.4.0/docs/_static/core-p1-delete.png000066400000000000000000001271201416346170000225540ustar00rootroot00000000000000PNG  IHDRGe@sBIT|d pHYsttfx9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATxwVս.wH#)V",Qט(j"آQWDⱢ1^A`"XQQA,>nFok&Tъc=zt{.iܸqZlc=|N2%_5kƍg͟??C I׮]Ӱa4i${G?}:̙ڵkgϞ+{,ӦM+o{{m۶_~ڵk~eȑhޯr'If̘yU&ߡVܚ;NHSv=|̒%Kree_+7N׮]$n-Y$^zizFqy3f̘Jsm /?}7M6l~o[;w._gʪ |rP:NӦMsGiӦyGӧO4iҤm  /0͛7~VZeԩ+3nܸL4)n}q[noC9$M6SO=_ycN:ѣGaÆU*IgntI_~t!jn#8"sW:ɓ'/;N8!ofy[&I<<0aB Tb[/-UP3u͚5+f̘Q޾lٲ._HRł *r-E3ϬީSSNU=蠃%KT:thk*')W屼EݺuM7ݴ={v?EZSNѻw V^mu_I~UdMҡC̚5BIRn}ݹ+mge޼yFU9%K /EkS~L>ˊe'Lg}R{we˖W^I޽~kvmW]vIRe+V'={vyۊshѢ*X79 6~xϗ%s=9蠃ҢEg:~x~iٲef*֦N:iٲe>}ӧOҾC;wNV*?Y|yvu4j+k|glٲ*{KK&\Wg]nݴn: ,yWW)N,Z(Iʟ6mT G|A>_^Wf+ G ,HQ7o$W\qֵ.|Uߊ_j9=X{챕M1o1zky8[$ 0viѢEFk6\sMү_\qUׯ׾קP6ml_7Y{Ź~g +Tu6mZ:udٲe_;(^ju_/;S6͚5KەWөSgϞ_U=E}Jmsp-|'oy|,\0K.͘1cҢEEG}tz|?>O>dUZauuC_\V9s|:묯'XCV['|I^V/{7[oUji`_y7nnݺ{ϟUJf}vJL8qZ߭s0hР|'뮻?1~8ԭ[w4m4Ons1?~|j)YWaRV<裏5}]v|E 80uu]W=)˗/Ϲ瞛˗W欳JxU;裏>SO=s<8}Y;*ߍ`gVh lI8uYgW^yRg}p./7|sc^|ż{xG}tjժn-v[TXJ}'xʫ4+` |J˖-sgܹ9s*}/^\~+jV2pL<9]tQ_ό3V*ZqkoY~j^`#/ܹsF>;={4i$>h.\ݻgԩc=2bĈ 2$oygt%/άY2a„7=>lƌ.]1?jʐ!C2mڴ4k,I_bU[;,zYԯ_hѢE1pbΜ9E~V<ĉ;h۶mQnݢEŶn[uYų>[alUKXCoѲeˢnݺE֭߾ ӧW?яVZ/3>t cNZ 4رcQ^YfEn݊N:xW}{S󗏱(V.(?K^z5*4hPtܹg}nko*gq]t)6lX4k֬ѣGqe}ӿ=G_eUCd/..bm)6lX4nܸڵkӟ~iqu;s+:tPW_]?ϢX'~{;=[j[YQ“R9pD8H"$GI#$I]l…0aB:trڧ~z+KӦMWy{&Lȁ G%СC6l-W^{x忧*+nlҭ[\ dy @#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI:kXa]%T\%PC\9pD8H"$GI#$@ IRgmڳGv 깶+ߙ+GGI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI:k .D8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GIp?quFcǎ~W^yӧgiܸq7o:*ͫ4nӥK4h ݻwϘ1cjpu\]\ve_;,ݻw;3믿>zSO=|;Iٳgg]wM&M2|,^8W^ye^|<3W^>/1"'xb~;6GqDrᇯCl83ѣG=zdɒ%VK~Άe˖?a&M;;s9}u>;>`g/}]#Lӿ?ŋsWdmɱ[>~7Ιg+"K.ox 'N{/sl8z$=PzJ+Q2a„ <8w^ի}7W]uUFf͚nѣ;GXhxln裏~ZjeȐ!2dH F3G5I8pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ pD8H"$GI#$@ p$ Rk |յ$ŋ3t 0 ͛7OYYYF]i1J?[mU˗/_.]A޽{ƌSu^n;vl׿楗^$` ҵk'x`vm.ϯt1nmƏұM7TI&]p1bDNˠA z=\ >(nacǎҥKsiSOGI&o߾T#J9sfF(Iz#Fd̙2djM,Y$na4i͛Oŋ+2eJ5j]VhaOtO^Iի~m۶~+˗/#<#G^Sοsܹiݺu*m$oJx2o޼ mZ5 _>ҥKӨQ5J. ?l r=/~oРAyʌ92Æ ƪuIIKyYgUmذaiܸq6m:ҭm5嬳JZ?aÆU.7'iiӦUyp`(UW]={aÆe}M׮]suK.te&4l0mQϟ_ֶm<)­usMknkժUZj  ˾~L{dmkLI_ϠA?yiӦMԩS'˗/Ͻ޻V?ϼ:L IDATiٲey[=rM7ez۟~~XiamPI9oJVmu~i:I?){w[oufϞ]Z_O>?J]tQȀ8ԭ[7#G,o+"FJӧO5Z+*Q.]?9'pB&O^{-\rIyロƍT_ $C38# ,HϞ=([mUG͸q2`pxsg+ҥKĉ `XIOOӼK={v6x~_ӭ[ +3k֬w_$ɑGMfc=[o5˖-fmÇsIZ/1"͚5 7ܐѣGg7wܑ#8:[I3H 2nܸ;?W|?~y睜r)%8s̯sժU+C Y//%Ỏ'Xy#GB֤I'ڵk .I'T]QiٲeFU b5%$93hРL<9fJt)m]ujI/uN;픝vک:vP%Ky7`EQ裏i֘믿#<2=na)SԈрr 7dʔ);{2@(i).(<ڵB֤_۶mz)"Kl2eee%PSJG+~iܸ`wJZ!I&Od 6Fm &$Ip?~|Sq%o۷o^}yY|yy_-hѢp % ?t5/R^yKF}{챩_~o>N)SԈQݺu+JesIƍKFvis=U}G[ү_R%aÆew}M 馛һw̛7//P57n\N=}I>;I馛fܸq޽{Ua%vw/>j/_M74{2X`Qr8ZGѣGuF?1cTh{G뮻fwoے)%g뮻?Ϙ1#tPf̘$ҫWR%=s4lذ 0 [mU=lI_~9zk>G &v}7.{nFQGӿ  %v=̔)S;רSNiӦMԔ~hɒ%ݻwF$iӦMvq㎂Ypdƌ)++z֊V0`@}`))+:*_2gΜ̟?u$y饗rwtܲeJ`+)WPR8 g-J G/ңG3Æ +e QR8{{}]wU5pofM7]i.]2k֬R%ƍe1cF4hP5pnnȜ9s*[~2@(i).(;Cu?ӦM7ߜ(rEUKkRIh-ĉsgꫯз뮻kM׮]K*&{0aBIM6$-Z(8Rr8ZEZptm%I:ꨔ:G}WPV)s1)++zcmʄ#`JhƌIzU [pԩS *%%V%7|sx,X EQT/++ /P4kTI7M=4h [ne7o^]uԨW\~y衇ҤI ƕђ%K2p@X_|jXkJ G]w]\y啙?~uPJ G:t';/-[LFVq>(iA_WKҾ}lv*)5*oxԪ}Dge}^Ifĉp4tмK9se޼y?~u]ImI>7pJ-[iָW+++Z֚х^XMe]*K/4/^I>\z饫@MYptwC92~\haҥN:);v̘1cJ.`MYgN;3W^yeF;Ntf͚(,X 3fȴiӲtl63p5u %[pTVVf2eJxL4)O=T>$Fm*?sW^kpګ3={ZD8H"$GI#$%,EsɓO>{/rH6x,[,-J&MRv`)QQ</ΕW^_|1 .Ȉ#r'fرcsG,~x?J GÇO>}W{0JmfܹiӦM&O~u|Gyұc$;쐽+GI'$3gNꪜ~$'pBs=7vXj׮]JnѢE8p FIR~ik{oʃQb-__mcǎҥKsiSOٳ3iҤ=`Qҕ~_ZV˜9s{eԷ;dܸq埧LFk׮ƭ۷o{azJ-XG?3{w.holܹIu ޗm6ϧ~gܹiݺu*K~{92Æ uIIh뭷3dȐ 2$ 4NYYY-ZTR_?N[Acׯ_ϯ2vZ; mZ<ծXw9JWajZÆ $~iO>˜ ~qUiժUZjUrpwդĭΝ͛_-j۶mxEQ!ԭض]v5P1Ԭ9;*i?筵]@%%V.h߾}Zlɓ'W{gңG=zȒ%K2} ~~*]9ے$GuT?>z+[rHnּ[_;W^YgU>Yg#G(5*۷O>}hg㬨-,MVKP*E /XA,%4#`,D) D JT;? Ҽ؇3gf>3{:pFLL ]wqqqr G< 7oInԩ]obŊ0a.233>|8kצgϞe+W;3Ç{nׯoͼy7n?+I$+}wŅ=>FUB'MĤI)V˗gܹ#..mGܝ3%J^W^J*;]m$It:pTbE>C .LҥX+dʕWfMfΜy~ z\$Ir .f͚u}]$I)6mJǎ8D̙3d*UM7{G֭R%I$`Nauںu+{h߲e f#ݻ2%I$`N3G={$33X6méW-bϞ=ǹS ,87lƍVXql7@$IRԜ(..\y啜q,Y#FдiS>c.B֯_@2e"/S ;:}Y r\$I}D8jԨ5 =n߾=;vgƌdee1BBBhr-t)mŊtl$Ih;%Q^*WW_ͤI&11;wFݱcGhr\j$I} U|yvŶmBrk}ٿvy$IN t8oIHH ))ZjQ`A>>vb…ԩS'JUJ$I:h|L2֭[S@+F˖-;v,[n FfffD$IN/5G]t!11F’%K=z4 sСCiԨ͛7wެ]֭[sGq $I$E)qClڴ#Gr-k?5j]tE$&&r]w1zhnF&N%I$N3G_~ۤI>\$I)qH$IH$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$P0H"v$IQ#I$Ip$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IY< IDATÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 8 Ν;߿?g}64lؐYfE,I$IQvڅ4F~;z)bcc+?~K$IE]O?7`}tޝZjqGBI$IrZ98q";Ԗ7'|š5kX$Ih:т Z*a 4`…(K$I V~zʔ)ўp 6qư%KbŊcX"mVF[Wov a߲6%DX{wK믣]Btx\y\@<D;:N:sΝ;h*eeeў~ >,C sZM] o]ɡVhWC'x\ t\Y.;Qbbb)rǎr-ЩS-[l2j׮gfŊtЁ~ʕ+GIǀǵt>ܹ5kмy# Geʔaݺuׯ>)))D_|Ǯ@*WL͚5]cZ:x\>em۶ZcX)xiҤIXܹsYv-u͚5#++ICq9cϟs=ǻŋ駟⋉{H"˴nݚ>f͚vZ5kFbxGdĈ,ZO?]hp޽\2<W6zǎt҅p7sEK/ѷoߣ/&##M6c>#z!i׮c֭-Z44O ]֭[~PL)S;w%u,3K}\rI߼y#:wLVVӦMc֭L6-!u&LXbjՊM6֭KRRR6>vo#;3_mƦMhԨA7iӦ|Ziٲ%K|\wu$%%[oQlY͛ҥ ,`ٲey|a+^_YIϱ8/"6lȣ>/ʕ+>}:}PB')IPtiZl믿Φcǎ/_NFF)))y.gÆ Z *UDDg 8)SD\˔8!!ҥK(Q"b>IUR`A_G}RSSyiܸAX"Lӹj߸q!,Ze˖ꫯ݀a߻I"Ep$&&Ҹqc`ҥKO?N:ZPTd֯_OZZگU!9")){ :wLvv6ӟ"ٳ͛7{7*TQFAϓO>y:r;__NN{/ W'IУGNo޼9}aذa,\֭[SP!/_΄ xꩧرc跆 Fv+Y`ӧO38:WNjj*w}7֭#997|k([j;w}uY|<w|0a\r wq >ڵkӳg(mq}wcԩݻyOyW"H:y֭ /<@T_}~yfϞMÆ yh۶A_P!NJ~6l \s5vm\p{$@rr2eʔglٲ׏|0˗gܹ#..m{t9 /'dܸq(P ['`q:$It#I$Ip$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$I(ݞ={_$h#Ii'&&$J(A~Ց 4' XjYYY,IRٳl .L vI:IIN: [n%++bŊQLߌ%I X~=lݺhIGa˖-$IRRR, ÑtvMN'IR{{hH: AP$I' xs$?IGt$7YGp$I$I$I$ 0I:PR%kiii$%%ʎ^mڴ)ڥ7-ZVZ.Cؒ'^z"KOOc֬Y|Qvm:wL޽ILLv$6 GҥKkxwԩtޝZjk.ϟ=_ѣ]$I ÑN:}]w+V'eʔ M[YbN+$Wjԯ_RSSy"ݻ2dUT!!!RJѤIf͚uu-\ҥKӢE 233CӧOy-Zdׯ믿>o<:uD |u]deeEosΔ.]DUƃ>oͤQxq+FϞ=پ}{DcRn])Y$]wk֬9v>;{.ejZd \r lٲSFڵk0aBh_}M6PBݛJ*ԩS:thX=;wsaذa|3}PC2`:wL^ظq#FYf,XŋpM:s=FI{9j֬I)X SN[n!''[o5/? _~/ksL8Svm rrrh߾=ϧwԨQEOl2~|.I"&g#p$!5\Ì3Xt)*T/k&;;;+*UE+ԩSr1mڴ.;--'G}ĕW^IӦMy7C222(_|;I5eV')O̜9:5jЦM[xqk/_~̞=6mpe1iҤf͚֭[>CmƦMhԨA`6nȇ~ 7=/+7iӦOlٲI&CΝٴiSדּ΢J*̞=ۜEtkm`ӦM4oޜo ))믿>8.. >X3r5jԠzax饗t%I:9NR6nHVVUTVZ5}CqWSjUjժ_Nn8رmRn]ƏOoIgD)##bŊ{+W."0(Q*xty.cÆ _H1׬Y3ә=Ͼd>l/^Zӹ˨^:#G|O6j`6֮]#Gٷ|^INd#Iyʽ[^C.]zK,IϞ=ٳ'4k֌ƍW_MN>}:-ZMOMM^Sr<׳h"-[ƫJC/wy~ɡsUV=۵kѣOwԩܹ)SX[~HMM/.s$I9XڴioիC9sA駟'%%QrW]u~ihZ֭)Z(Æ cǎa= xꩧ.]f͚׿5l{7Zbcc2dHAM67߄ {)RzX~zzzhƌ ^~yGΝYn/bĴ,mv˔$D#I4df̘AӦM[سgFf͚aצΣEԭ[%K3qDn<'&&2m4.R ΝKZHNN'W^ԯ_]RD KoΫJIMMfݺu$''oF\{OӤI."z9ʕ+ywXpaT~a~V\I(Z(}o{gaȐ!̞=;tf,55_.]PF wNZصk1&LvT֭㪫O>dff/C5gnݺ1~xnffϞMƍoa̜9z2%I:Xzzz2su s=7xA}TX1ѣG?4h (^xT^=:thk׮P=zE [צM;/8묳˗ڧL4j(HLL  Cӗ,Yl2HJJ 8ছn ^~u,^8kŋ AjՂnƍ{ |͠I&A"E"EիWn`ҥ˜={v]lYpM7*U ₢E7Fر#lABBBPRG ׿YӡW̓5kFңGbŊamv }Ѡf͚A|||PDnݺ!CeHR4.w)Hte-9$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I:L&&&&RJrHJJ:۷iӦh?6o|̗ݺuk{X\/^r8VXALL cǎjOLL Ϗv)zD $) ^ybbb]a;v,O?gdd0h ?|HLLv_W͝;{.?sgo}ygx׎ae'iӦCe/cǎ/_zuX0l09 ?~|DOqcU*So͌v kH㾎KRߵzs9'VZQȌ;+VЯ_i+Ve˖[Ν;s7S`A+FɓYdI>2ÇM6ϙ#;v8<3+Wݻ_zdee8L61c0pcaÆEÆ K3bCݺu4i]t@t1k[o;KÑv| +zEb\s5O|\|aӇãT[~=3fॗ^v)ya׮]$$$D0_~%/"bŊV^͓O>wO> @^hܸ1w}7^{m X~k1`a҉ίz%_> /okv͐!CR *U&M0k֬o….]-Zj>}:͛7hѢ$''S~}^yѩ/ IDATS'*T@||<˗箻"+++b| ;wt$&&RZ5|~7o&--ŋSX1z#;u뒘Hɒ%XfApK\z饤@͚5=ztDrѡCΝ7.+ر#%Jp\|̘1#Ϙ1caڵa_ѤIfΜIzzzhX`ʕ?~</fXb<ás B Ò%KԩgqT^=޿ ݻwXb/^^zxWÞǮ]nݺ>ӦM#;;-[޽AQre83hڴ)|Az֬YCIJJtߟ>>(5dɒ$&&R^=z뭰>{!&&;3^{;x}`0.Q={d˖-|уeO2eСW{GN| rʱtR>k-뚣&MPN.\HӦM)\0UTaҤI̞=듘H5?εkגƙgI||uJ#IyZh[t <={0h SFڵk0aBh_}M6PBݛJ*ԩS:thX=;wsaذa|3}PC2`:wL^ظq#FYf,X _gddD3>K:uh߾= dӇ ӧO|˖-K.Ջ4ƌCݩWժUhܸ1;woD+mۖz} 8{#FPhQL@ndzcnVJ,ɿ/z){-\f͚O>}X"+V:??^z3<32dC 뮣W^lذ~,Xd>cRRR([l:?2|pzMz>c\ve~wu4i҄#F{cQrenPzkwڵ_kӧs嗇7 *CO۷/իW7ߤgϞkaٲe~TX{kRB2335$....r>cر,]8QF32et~]ҥKХKƎ˝wI߾}~ر#k֬H"caÆ,XoRJҳgO233ضm͛7nRJ?ݻen[n3g^}Ua˿ ه\N"EBG+8ZjX{ B߄֭ =[UvcR $ ==#աC !!!XjUmɒ%Alll[GŊ=z_pA۶m=zE ϟ$''m۶ vyhѢAÆ srrB}6, YfAѢE_֠A \s5ARBW\C hѢ`{ Ͽ}m]vYPjհe@G֯_*T(߿n O> edd*TRSSC_ `͚5a5kV i&HMMvaP?OALLLvP[FbŊEԶ@лw>W]uUpgX"(P@裏[pa&hذaD5k n~y䑰??b;w5jZnj۽{w7|ĉ92F Alܸ1'x"_. /333(W\0` y뭷VZz]ƍ@0~P_A>,; Ulয়~ [Wǎ%JޫFok׮~Arrry /_>}ycPӟoצM srro:C,g$Ef̙t!1@5hӦ A/^8_5˗/J*;{l*ZnoA\\\hڬYغu+w_}DZmʢQFA P7n?;ۦo{ܴiSz-lBrr2&M"''Ν;9묳R glD|}-##ݻwӢE  mBr4j(o6ҨQoy馛0`K.zꇬ;?l:y=7&.\Hٲe@r8uToN…CC:v<-[s=ٳgLO?ŋh"VXRx kҤI}Cl޼l4i1ވ `w}u,Xn?8VH *ٳCCHrVJ {#?Knpy瑔Djjjص~ 6+999L4nݺhӦ 'Nd…4lؐw}eҹsPB ѯ_?uƼy"7x#_gZ<ECUcbb(^?HR7gV!C=W_MժUU_~9ݺucڶmKݺu?~|ĸtwo[z5dʔ)/a222}P%JiINNfApWP|A!üy4h#y Gל[d4o<_5Xj1 G|rJȴi<5`c…Y|9999y "AO\s UTv\qt-Ƥ$J,Q:eʗ_~Ν;CykުU([, k?D%&&#пRRRi׮ݻw>[fMj֬IOO_ {ʕ++F#ڀs?uV}Y}WN…Yz57xq}ccc>}zۼo8*UTDK^Gg̘1ag\̞=:ТE {9:, *Ę1c8qbļG{MwMx9s&> Æ cΜ9/3222CsTT&MrJ`M `oXr%+V<ѣ_}}/êXS˫ShcL˔)'|ў{K>;=222®N#Ir|idɒ3trf|SB99.|>u+hxA*R{T SYf?~<ƏOQQQuVUZZ455bnݺ:abbcǎqCe욙!""Ɂ={.+[HH¬ݻ7-999 ϩlӧO}ahhHJ211Ann\OIM;eRTd…;wnIII}ߝ8q[_Uaa!***3BM(7!DP('vK{e+א9G*Y/|}}PQQc;v 00[l,aٲex5sN?~?#/8wm۶ƛ;oqS=z9880UUUֶm[f.uU򎉉a;wfihh0+++޼yÕ[ÇƆx3KKK&ҥ KMMUXgcUʻ`F;w~1---^RywAE{ؔ)SX֭ 344d,11QرcY͙*k߾=ۼyܾ}0eʔzm3j(H$ޖχ@ @rrrC!7uSЩS'|WM G >f͚ͫdUh߾=vaٸ{wQ믘1cFS7VBCCCx{{㯿jp+V͛c+݋ z׭[++++W^'@CC]t*W.##Ǐ-`ffVmXhLMM: %%E̙3:Rn1˻=>>rrrDZb055-k4Lrr2&.??}6ɓ'CYYϟGBBvލK.5Afɒ%{͈/qqq@HHJJJf?C\bZjr777|jjjMj޽X~=ϟ\r%BCC1dDDDȑ# ūW_ֺѣ{nL6 RIII۷/=]r咓sN|hٲeuΜ9};wbذaPRRr ի9!D ! i CNDYY#oP%K4Qݻ@bbbS7EJyMyk׮ñi&n?,,,m۶GQRRG?1o< 8뙙8q"'BGG?T,]ӦM3goƕoqFo߾WXgaa!-[p,[k3"""]t 4h6oތy &}Kjptt:R)֮]ܻs sssYfpqqÇk_VVѳgO<{[~pttĶm۸ǎÐ!C`ll 555n9^|)+W`Сׇ,--1gr?رc!Hqŋr咓 ayfYpssakkrZ/={Ͷn*W6??.455ѵkWdddʬ_n-wn <nXlMJJ 1o<ttt=?r֭[CMM /+m/]!CyЀ+%%%$ &N}i&8bܾ}Wf޽;oyYY"##affuuu4oݻw/"7ob҂>fΜJ^ŋ[nӃ:ut^rL6 7o ?ŸqܹscƌвeKPl}~_F-x !Q:[j/{MZ(s{{{dee{Ԅ9v ̄#444`mmpح[0vXhjjjCRR\c[lᕩn^ԻsFkע;Ne]_PRR`<}qT`ĉ2 7;ox'|Zu IDATk.$0eܸq'O@AAkƠ#BB.\@>}(#22Rǎ"QQQXh&NΝ;ɓ'8}4Μ9ۜ:u ԩvxJJJƯϞ=gϞEFFFرc^x)SYf8y$q-رѽ{w`ҤIhӦ O?!66מC-™3g~z`\X̛7CĉQTTxٳu_ZZ*y߫V= eee޽AAA`!((]nn. 'bرX~=ЩS'XZZxׯ_cԩERRϟӧ={x;.RRR+@OO/_;w~eeejjj 4hR)>}hѢFtt4'É'pYboȈsbɒ%4i:uR:u gϞE޽reeeӧ\\\wСCoaff@#G͛7ضmqۗCa9366Fee% ?SL0n8s\L:&&&>:[n3TUUs $&& ...(..Ftt45k]Wq6lѣG#0l0\Æ Crr2M)S`ȑXd 7oB$xYqrr2Nf͚a7n={Po{ezk׮!44mڴAJJ 8q]9ru:l#ѳg\/#Ξ=ÇW[ٳgaee%Ds`!++BQbruw҅[.i?Ю]z틐za+((`ծIjGC2uuuv n٥KP(d~u1cp;tk̘1L$1ߙX,fիW\Ǐ3mmm^|۾ŋr/Z ^]]]6oٻuEFF2l2~~~Yfׯ3Pbccy.\喿kƍ GUw‚Ȉ`ݻLEE͜9[?[VZZʌT*źuvM~>cǎq<==T*kgvx6u…L [nq˺utttVu3g&M+Zh=gJJJlżrYYYL(w҅999ɵ֖ 8c9r$kyx5f}ᖕ1L(+Wʧ2֭lc`K.Sk{ݛ]NN133c5&raLٙ`)))ܲ/2LII:u[o>޹aw{o_fww}۷se޼yX,fϞ=1' & g… y)OWW5J:KKKOܹs [~n{{Gܹsy+++P(dSN_&6sDSQQחz k^"ŋ˃ye333>}`PUU>|O>ŬYTs^uKt 1={(**op1[ɓyϻwtkkk [[[Ԥ nnnҥKnnnh׮ eZj%4urpŽ{SZ VRX=rz\\\꽍TիWjV !s444p޽zd 䃣޹wơC~z,]k֬5PSSvލ ߿1sLXYYA$;vd ~WVVB  ˾Ѻuk⧟~B\\ܱH]:XYY… {nR̚5 VVVDaa!&LA_GP(HR0`jjZ㰸سg޼y{~(Ԗ& _+WLLLǼkW;v@rr2O^c[r6eѪxR)*** JHuo5k ǏS **NѪJKK&222xà֭[W:LLLp1nLu]333DDD ""999ee Q]{ݿeee(//T5044H$Bee\vw 77WWTUL@ǎ2)*pB̝;`oo8}4Õ9y$ccʕxω'?Pՙ\wwTXX GRyBBxzzb׮]/_kѣGZZZ033S8,CUU;w#|||x[mmm,ZHn pի1,_W^__ذa\C("::Zn{<|W\Q &wߘyٳgXnR)7JTIyy4"H 6 666Xp\Z^$ُ7EZYY86֭֯_/f!ATױk׮(**{~ֆT*vSMB!x=eW^2Յw!rXWŋr+333hii>{l>|G{V~n߾WS;x)++))) oR\TT[nzXݻx{%%%g a{"[l߾NG/wwwkB~e﫪FYY֯_-{%ܠ#___B^cXv-y`27U!P!Dhddd{Fyy9akk׸ z HMMR߾KCC{뇣GbK.ĉ#F@WWΝË/i&XYYA*"""o߆X,FZZOqqqpqq~I&ׯ_Ǿ}U#Jٳg6]tL4 +V ::xzzBEE ӧOCCC|}͞=)))DXX$ pMsW;tGGG̘1EEEH$tU988 -- pppX,7TUU8;;cذapvv2m6 &&055Bhii!55kT,]zǎ׮]Pueaah̛7W^At5ܹM/PǏɓصk]}x{{#..}g}{qI;.^X:ХKDDDիDZZK.o߾:t(lll  :7dΑzD<~}6VX---seˡy֯_XHR3U~-=Ν;#00(..Grs&Oua8y$LLL'N >>ڦ,]1i?SN;TTT s$`cذa#G}v,^cٲeűcǸ}9;;3fݻw!JqFܺuK~NYYYػw/}II w?;0,[ _v܉Ǐ/mۖxǜʛ1Ǝ=*k۶-[f 꺪wSyİΝ;3D444eo޼TM-Cfcc Y^^|Ϟ=[nLCCbֹsg?p/]ݙk޼9 Rnܸl$ SWWgl޼yz񶓥޾voyZZsqqa"D"feeBBBXNN\rUM'Ȯ]SSScc rivvvK}SWWgNNNlr1777 ܹsYFF\:'OÇ3Dȥ-..fsevvvLSSvڱݿ+ܘH$b,((9sF.1c? 8Ry.E;3u cˋyzz-X9::rmf-beee\#G2sx3KKK&ҥ KMMUXg}=EEE1$ aΝ+))ajM'cwacZZZեС\>+՝{)S֭[3fhhYbb\cDz͛3UUU־}{yf}?x1MMMǂ着r̚7oAik͚5‚2333'WFy7/_| :ܹ3;|ϘDŽ xeYLL 366fΎ^)7i,c @ hU |233\ܿ'{쁟.\nJMMرcQPPP2i,sD!Q^ЫW/89r$FZ/FXXX!tBˤB!B@!B!B!BPpD!B!(8"B!B!B !B!GB!#B!B@!B!?$=OJJ@ ׹emڴA@_@$nYTTA5B! )7uM@QM݄֑#GǏNNN7nmwABB|}}aooߤm!B-dF/_ĤRoի5k`x m۶Uܹsھ;w ::YYYt?BDŽz!MF(B(6u3-55 ,m6pO핕L_B? B%s :b͚5Cxx8^zѣ:tp{KKKxzzV[9G2=acc;w*GXX!H7o.tuu1c 0xuTVVbh׮ա}5yAOO6lF25ΛnQrr2=== >7oٳ'p% 022·~˕9rƍ@ D!yBj5tPz -0i$oƝ?ټmN:\U9Oyyy6lEAYYC ÇN:yyyƀy%K`˖-m'LiӦuXx1f͚uuu_5ʕ+v:9{L6 \]]c^ْ:tXYYa̙8p ,L4 [l-[K!|h\!Vؽ{7 $$bVBDD S"99| Mrr2D"Q"--v„ \+ۢE ߿ǒ%KիWx i6l@@@ 33III ˹K._ h׮]:7n@dd$bbbW_qѱcGZΝ;ؼy3F 111Abb"-Z_~?>vڠBz! =:u*`?pAEEE~GB${| b8{,ݻ+;a5'''00anP(DNpUnYZZ"##_S'O{5ڹs'*++1tP<|{ZZZGUU;w!Bꏂ#BHyϥR)B(,,ıc?3߿lԗ\baar󓌍yutt[[^RR=/(('|==zM,>}Zj͡{\|::=ĉQ^^.Cw:Br?1 =z /ضm["Jٳg\k׮!==&MBDDDO*B"`͚5ֆH$B!|8R(㏘?>f͚eeebɒ%r0cƌ'b177G||}:tttЩS't֭mcbb憸8^E.]{n 0^m5k,,,tR7uӧOlڴ gɓQ^^7RpD!@h/! &Kܶm&nɇhy浖_|9>s\~].!}ɇGs!c H=h qIDATѣFBWauFy9كL\pwn&B!4GF)**ˆ# HW_5h~ !B?9"hl3!AIcќ#B!BGB!#B!B@!B!B!BPpD!B!(8"B!B@RR_-ٳ'zdm#G@ ȑ#MݔQcǢM6M&B!䟂#BGᰲ йsg̜9Ϟ=m۶ ˖-iRN b=z}'m#B[MB>fZ7u`tfS7N:^)..FN?VVVxΟ?իWcʔ)68ƴi\+^| UUzkXreÍ7zj׾kݺu B (8"!{ 6B֭oݓ'OW^AUUJJJPWWM屰Q /_#Z?!oA!Պ@ K0bp <zzzPWWGNg:.^777hhhUVQKhQ||bǎjtR;| z WWWxyyAGG+###U+>}`AAAhٲ% Hu1@׮]aaaÇ׸1B̝;CTT"%%C C}A, 'B>4Rɓ's_СCS<|>ģG鉼<ܾ}$]t#ȑ#kݧD"[pԩzE8w&OY#F.\,u1f̘Z{Ξ=k׮aڴi@CJLL> ЩS'/1c c iiic;>'JKKqo}˖-y&&MTk[$ ?Ç׹Bǀ#BHLMMüy󠯯{DFFOܺu Ν̘1[lA߾}Υ611kG>}`hhgggh/_Ɗ+ mmmo^z!22QQQ-[`֭󃃃TUUqelذꫯ}888_|GGGhiiǧ^BII W1n8lW\ŋqAn_OOOB >ƺǎc|7̄accb9s?3Lb F˖-ehjjzL'NDqq1ЪU+ܸq񰷷u!! VPP Ldd$fhhTTT߿?KMM;<ѣSWWgFFFl…,11`׮]уу{vZʚ5kԘT*eӧOg\LEFF7}t駟2===ZlɆ Μ9k۳g؈#D"a ;vl]ff&o<<<6D},>>[_^^ΦN@ `U~}1XTT߿BBBX֭ 344d{f moܸ 455YYxx8Ȑo̘1ܱ3Xjj*ӧ300`ؘw*l#!S|ɇ'`ym !Kڵygf͚WUZl\-[x0a/~)u)?K'd8$?msg%111rJ.ͩZ.cVVڵ?cEW\1cPT\V'I^QF5\hΜ9\zTRhԩàAؾ}{dLts,*g~Q޽ B^siCbb"G&/ӴiSʖ-K||}v 99eR\99O}TTP(Ě5k"2228p '|2TTM2jԨ\ȠxDGGSre:tkls+дiSʔ)CJYn]dl˼yr gU^:^I:IR>{]wܹs˹Kx76lRJs=tԉ;8p'|2y}]tBVV_~9'tk|G4j谵tԉ8MƘ1c(U*gΜ۹;#}{f~ٓX֯_ς xw}?!yjՊT5ju]3gݺu㫯u ТE :lBTT/M6l۶?.]k.VX#{#?.mF6mҥ [l7ޠE\z饹0ao&:te˖,Z3ftR,YBtt4*T`L2K9& Wr Kͷ~.UTJ*>~W_¿a |9333sM<9 ok׮]#G}-[<իWp^r+ k׮Ȉgff+W.SLJ*+VDW^8***?DڷmPBr᯾*<˗/-[6ܰa#}Yr]za lٲp8o߾= 7߿?-[ќÇÇh j* H{wC{p6m¡P(i{'N5޽{Éa /_vmԩϝ;7ǸuօkԨVZxϞ=㎋|w򗿄3rl2? x?'OT$$)M63`"P|%K:'駟B 9zMrr2/B^^j֬\B:t]曩W^/::}iؾ};)))vi9ӧORSSYboӫW/}7ndΜ94lؐ38-MYr#gܹ1#F0`N;4SJ ֭[:u*M4q|LL =piӦ:rr27xcYff:t@nrתU+~m[ @˖-sQw}77nsÄFl}r//Aꕤ_sY$TZhv$$$x~^ Uti^~e^~\۷͛7u֣e8hҤI5jиq\}A[/\K1ʕ+HKK~yԭ[YfIŊ;p@gʗ/_άYHNN+ϦL2'/͋uvB-59sa;ʱVp$IaÆ,X W_zz:k׮ެY3233ꫯx%K_5k hժ-믿 W tޝ9spI'`ȝҴiSJ(o y#%?DžU?Ñ$nݺQT)ƍ#a /ӧׯO?EVre6o|\{TP<?x s=sf?[30}tTe]c͛Y|yc'vIR"[exwŋ5jT?3~-W>s^~$&&o_>ձcGԩ?fϞ.\Ȯ]xd/3Gԩȑ#:t(gy&W]uU=G۶m3dٲe9袋x2d'|2^z)IIIܹt͛G-xwsӶm[?|93 EʕyWܹ3͚5㢋.~B!֮]… ٺu+{9svܙ㏓ŀ(]t1֭^נAرco7ndwq}?oo}:IIIGt(^~e.uƤIh֬{!--?0ҥKkѦMڷoOINNL2]? 6M)]tEt҅K/Xj׮5\sT)z%9$) 2Zj裏2yd;8ڴi?%\y.̹O< 9s&qqqԬY}$+۷og֬Y|8p^zatq̙󉊊FjՊ+WLv3<佤.11ΝG}Ė-[Tr > W_}u\ۯ/_ySO1m4^}UCժU9yhݺQI&,Y|wyO>;N:#G۠A.]ʣ>[oɓ)QիWaÆz6hp /~Zlyᨰ WsK Ԏ;Z*ɑ%IR3G6oޜcjswg:w\DIΑ$峉'/mV\Irr2| E]$I9|vgӢE DyNJJbذa <`$IR1#I$Ig$I$ 0I$I`8$I$ پ};#!!.G$ISۻw/k׮e˖TPᨏ70o<:uTeH$I7x;qN:$IVZEN"-QKN:$ׯ_H$I~#/n I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I@.@$IYW3Kȥ溵E]B`9$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$;w2|pڶmKJBL2deeqi ;vlDLL 4`t$I:mٲ#GƙgyDnj7Æ cnݚqq 'Э[7^|*[$I1Xիa3foߴi#Gdy[Gy[nz>}0k,;< ā$I$#u8ZjG<{SNGyϜ9,i B|dddp5K$I:6*g}Ƴ>˂ ByIMMlٲԫW/G{ӦM#-ZM6ymVʇ%I$p0`W]usk֬s܆ ZjTzu֯_9&L@JJJ,I$xC)S|r^yÎ۽{7ѹcbb"ү_?v횣mժUtwT,I$9ю;2d "!!ccccٻwo={D%>>`J$I* Gbر۷5kְf222d͚5۷yƍ 9αajԨQK$I*6pߓIIJJ"));ѣGĊ+HNNf׮]8ǢE"$IeuϦMݻ7;v$)) ;r3aƏĉY&͛7/%I$>?۷Gv5kVd܀hԨ5qLnujժmƘ1c⬳7` ,Yp.J$IRSرcIOO|~xѣqqqGu|+2i$L'ԩS֭[-I$RѡYt86]VD !CV&I$ߐA$IH$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$;w2|pڶmKJBL2%ǘ2e:t@BBe˖OcϞ=yg^zp'3nܸBI$IYG[laȑqg9f׮]\{l޼nM2|pڵkG81~ҤIp ԯ_qq90p@z¸$I$IT.pWΆ V/欳5&**?͛GCbb"Ç?/` 6+Db֭/[n~~;$It,*wظq#UTФIc7nL%HMMGyoӦMl޼9G۪UdI$IE~aʗ/Ov"m6ldɒEʕY~!7aRRR ^I$IEF| PB}DEEyLLL w>9G׮]sZN:Oђ$I.͘1\|9bccٷo_ٳC7>>>'I$IzCӳgOڷoĉsW^iӦc֭ԨQJ$ITaѢEܹ3M4ᥗ^T7ŒXxqŋsH$I??D8JKK}$&&[ory\VTO>d'|2eо}(W$IR1T9?~<۷o$7k,2220`%JM6dff2hР\*Ss3GF[nk׮iӆ3uT~*UT'I$pQq8ٷzjy|^2eJ~GyիW@[ BGUW_}駟Η_~IXI$ZW3Kȥ溵E]Bϋ5k昣w}O>"I$IDg$I$)(Ñ$I$a8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$JusNƌâEd;ش4nv,X@TT۷G1;'| 6Pn] _B*I ׺ E]B.5׭-$)b}h˖-94F'>>,]$IR1roPfM?x/^>#999999]vcܢE"$IppW[ov޶rK:vHҥ0aB-3qDj֬I nI$IG_Vp6+ IDAT7~xoIn֬Yddd0`:t(/2^x!z+;wd̘1q\{sժUn1cƐYgoy|$I'VرcIOO|~xѣqqq$$$0o<h߾=zFv_|/ѣ;v,k֬ R$I$ GCwDGGƍӸqcFɓW$IT*x㍿k}$I${߾}OqjI$I*ы/ߞ-%%rQB:wΝ;(I$I!P8zGr!OHIIM6~$IsҫWiӦQZ5^uJ*yWy*I$I)Н{{Ѯ];J9svidddP$I Ap|ŋYjm۶+W.X$IT-bԪU.,S~EJ$IRA  @LL gϦq }ӧOJ*x⠧$IBQ /$IcQ9jժQO 8I$I*LG(:t:$I$@[yϞ=+ѣG3zh0]tw͗B%I$ s4j(4h)[lCߟ-ZB۶m*I$I)Нe˖ѫW([ٲeݻ7˖- 2$I$@(&&m۶۶mB$I EpԪU+p\}-'/2$I$@=Üs9hтMr)_gC=/J$IRA t())e˖1p@2331c3f 33[oKOJ$IR x{1{G$ID;Ggǎ߱c2$I$@h4o{.wyg)$I$P G.W^y!JfϞd I$I*Y!kԨuL!I$I"P8\2_!(_|)$I$P Gm۶eҤI?z)ڵkd I$I*5j.M6Cԯ_/YfϨQPI$I*HQ5Xx1s3g_|tޝѣGSF|)T$I RV^g}p8͛8 B$I8e BDGGS\9$IcN /^L۶m)S +Wf޼ylٲ;2wܠSH$IR >Zh7|C=8x`J*L4)p$ITCR^=VXѣs_x,Z(GoꫯVZ)SSO=#Gk׮]2eVdΝR$I+3G9<y5kq SkҴiS߿?*Ub… >/3gd."իǣ>JFFcǎow):%I$_Qҥs,uQ\ S۷`Ȼy̤bŊ :+2w\ʗ/@bb"}K.)Z%I$O5k֌W^y%Ͼ~ɓ'Ӳe S;vPjիWDDEEc}z F={\rK^$I+P8JIIaŴo>,mҥql޼{7_ = .믿%KvZf̘O>)[,˗/g4i$DZQQQ$''z96mW_}ϪU $I$@>lfϞ7LϞ=;SgϦA m۶eԨQ=7|3>l0>6l|7תW;DŽ HIIǪ%I$'_۪U+k,Y7|S7.ԗ&&&rsWPre~mFMj߿?w :::ױ111Cׯ]vѶj*:u!I$GْINNί_|}rJjժ@.]8x / ݻ7{J||<_$Ib!3GK,a9̙g@ &аaH0֡CvEjjjd9]_ڰa5j(Z%I$Ow͌3"W^MΝYz5wqO=T ?rgee~N?tJ*ŋsٷoK,)^$I@.]ʠA"{9J,Ijj*UT᪫bĉ7pSn]{=V\Iݺu#ӧOD4hЀ8.bNʽq;wzH$IqjQeE]Tp?RrٳgӺukT@֭#[|A;pyѿ*W[o; 7Y2wӼysZlI߾}GK.m۶^$I+вի_|%\߹s'%J∜|'4nܘ &pmrOF5jԈ>Xnvz)CV$IҟG;G;vdܸqٳEMΝ#K.O \hڴ)gq-Z?.$I$K͛7SBLBժUرc rK*I$I)P8*W/!222(SL)$I$PK`v @ll,%J ...?O/I$I&n =^{-UV\r+WUruב5J$IR t/-Z`nݚzEڟ{9f͚ł 8SXI$I*(=C%HMM3_rEq=*R$I Zeuc駟N;wn)$I$P GYYYL2deeB$I Ep԰aC?۱c< 5 2$I$@жm[N=TZ֭ _ͳ>֭[?/J$IRA Zjٳ4h>`dy.@J$IRa؋/T6nyQڵVZ$I$gvEƍ8q"ժU6I$I:pTLV^M(z$I$Hڭm۶̙3'j$I"({ャ\k n:mۖ$I$w6d_>+V`ڴiw H$IR ̑$I?@hĈT$I$@I$IEp׿C7lؐ SH$IR^yڵkwK/3fB$I EpSNC'%%d I$I*Qr~V^MLLL)$I$P G\p&Mbݺu֮]SO=Ņ^d I$I*5jM6~\~_"3jԨ|)T$I Rpt)0| c='^z $I(4hЀye;NʠASNRJU$I$@h̘1{̚5I$I ]gvE F$Iyх^I$I*2Ѹq;v,۶m˯$I$ G xs=-[;I$Iǂ@2of͚4i $I$(M8oP$It hG F$IyRe]I$I*2Yb/`l۶-I$I*=st)d&Mtq2$I$ՅBE$ILp4bĈ|*C$II$IGxvyԓرxਏ$IrThڴi$$$Я_?Ν{؍۷/'pӧO\$I$zhٲeL6c2qD9IJJbŊa233Yz5_~%YYYq?ݻ5H$IR`GBݻw{opB>Sn @ʕ9SZnM%(\0իWg РA .gwIzzqQ$IR2p UK̞=VZq2`Xn6lYbW^y%{.O=6l'`͚5̜93K$I<2f۶mtڕ-[2uT }@/^瓘@[={6͚5;eK$IG4.4i?3C@dggٶms̡sΡ`еkWrX#G/2]t!^L׮]0w\ٸq#mڴaӥK~ibccYr%{va*T5k|cӦMl޼9mڵGX$I$Ea;vPBt@ pњ5kسg^{-7|3Æ c5[ꫯ @Rr_T).\x}JK$Io _~9s93<O?=}oBNWjjjh$INM౨#OjժƍG^:,[,Ϯ]Xb5k<>J$IʗNGyocdž3h7nLѢEiҤ 'Nd>IOOq?$IS} l 8uɅ^M7Ŀ/CF?>SL_~)sC~4jԈ={a|I5kF-"|$I"F:wLTT!DGQ:l/ ⣏>f<<裡>]ts%..{ѣGs73uR$I눒K&MRѮ%O ,8p5hЀŋ$I$((u֍N:Z$I$)bN2H$IR^$I$ Ñ$I$GpQvvC$I"ʑ#I$Ip$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I@t Nu/^;1%&H$E#G$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$$ GC%Pz>4h@…93;IOO@$IHpmذG}VXW^ɹSO=ņ x'Xf 3gΌ@$I.w}\r%deee˖ex̟?Dʗ/-ٳi֬Y$J$IT}Nʈ#r,۶ms̡sΡ`еkWsҌeeeqwУGjԨcʕ+ٳgkk/T5kdMؼysXڵk^$I| /+ IDATG/=suyjj*JʱTR,\$͚5EǭfI$I ?p\tE̝;8F7ԩS#]$I;Gg7hЀŋb$I${ȑ$I$H$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$Iё.@$Isw#]BntrH$I0I$I:I$3l´/.Alj#G$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$$IN "]o8r$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$Iё.@td໑.! t 9/.At9$I$ G$I$I$ 0I$I`8$I$p$I$IkP bV $qH$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ H%ߍt 9|8yK$IR>ȑ$I$a8$I$p$I$II>cnvUF||$o4x];w>qѬY3{=6lΆ hذ!EG%=='x+WtRy8:Zu߾}yh۶-wu_~%F/wߍH\\}L2gyL&MDZزe1=t论*j׮ @=(Y$O=ӦMcѢElݺ?s9[nUr=/G}-[:uдiSƏOϞ=Iq]SOѥK^~жTwiժ?81I_/œ9sBmvbԩt)GlFAjՈOW^oa <gu //ȱ9Zp!ڵlٲPL222MII!!!7ҦMHJJ#+++gFW\~… Co .L֭OXfM^k #&MPJ&O|@RGr]/Y={бcǰm}8Up$|ԫWW_}56sLrի߸K9r$ݻwW^y޽;f\p<}4k%O2;vЧOFE5j]v7++͛SdIx 5jēO>ѣH׺u(Y$;w$...G… >a޸q#6m }R:u|cX9zΝ9NNSN׏ xWhԨgyfXE1f^y啰Q/-Z0e:u͛yhٲ%ӧO'裏Çٳ'*UaDgffҡC @޽袋;v,}yNEiiilٲL/^̐!Ckf…l߾"EP7L(UT*U_;w{%h\{G/^_p°~:q8r$G`ƌl߾3f:nʔ)-ZMe˖OZHHH=fܹڵ;#Cg`￳eׯO0޽{oi_5i҄$ʔ)CǎIHH7ޠtӇ[ҡC/_իYl@h-d%;G㺾袋[.Çgܸq|w̜9^zQ`A#G~$%%ѤI&MĎ;ʢm۶9Y4sΦM\r/^<ü[9eJKK {KRRRX[s'J*DGGss9P/^uU5x.J*1tP~c4}efftCtMDEEq`VZuLyb8SNr-O\uU+V,GlyWrƟCʑʢiӦۗUƍIII!;;;TTT)ԩS'{޽;} f͚;UNWjj*%JpJtt,Z5kO?Qre8 <̰~:1ի~!\TXsr饗Sr4}١͛7tDgʕ^ &=aߧIxիz=w\K?@%%%kҥԬY*WʕCCKRSSIII9^(#x4h~}dee=Dz={uV dԨQP#F#A 9ra>_ohѢn3f~P{իi׮]$Jtw]Yvv6? q?G֭7jԈ^z1l0VXAf(X k֬aʔ)9mۆkhذa\s5\},_3gripUVbŊw}lܸD^{5!Ӻuk8 ^x?O߿?SL/箻"==5jн{?;"335k{n&MҥK0aBؓdub0IG /@Zx߿?є/_Ν; ?# /0o<֭ٳiٲ_`AOΝwɰaÈo .8և')U>,+K; {0@2eX`{/< e˖lݺu۴k׎vJٵk-o_|Gt$2 G,&&&%p[:vHrKRBn6֮]o %I:QZh_|1TX_|1G?s{nLʕdɒ4hЀ9sp_+V ))ƍj9s&5H"$&&r3iҤ Ү];ʖ-KLL eʔ{!###>kڷoORRqqqs9<9mݺ+FѢE޽;;voĉԪU8J(AǎY~#==cdž*U]wz=n8 u;_{}\~.\ҥKcsNHJBgΝ=FIN4Iگ+WҬY34h{a~\oРA 6=zPNmƲeOhڴi|4oޜڵk3mڴн6Ǐ禛nZj׏bŊ|rf͚EN2e ;vO>,YK2j(6l)SBϸ(X ={|[ӧ3tаzڷoO 6l| cƌ!99Ç :о}{z͛5j 6d+Vlh}ԯ_?Ojhݺ5L>[olnPC9_-Z믧}L:}RF *iݺ5-gϞ{\~իWoR$(AFX:b| g}vN2\wu̚5UVQlY+jԨAVVV[˗/Oƍ?~<5k䬳bƌvJJ SN%==ŋsWsek饥QL;<ϟOlllh`0zb^FFF|w5jCm֠ALf͚QP!ZjE^HOO祗^"99PMrG.]>u],VX0666x9 Z6o޸q@o kׂ 4UV vmUVyrիWo`僅  )R$x饗G ;~0666X|Ã׿r`QFjժ娥[nrʅڵ+8|`jՂ111ŋkժ/|+˟f̘!C6Ҷm[ʔ)C G~vv6Æ B r0y3<͛Vq%ߍt |81ǪU(PZĐ!CPBX[#T͑8q"k׮;̱lڵ4i҄7Ҿ}{zMtt4}Gfڴi|<4oj6lԭ[>۷/O<zVZtЁ жmP믿n//#Iyv!ڵkGcb\wu/ԫW/lСCy#TKMMe֬Y;6ҥ*;;]vR|駼KsL5jD"EHLL/fҤI .]v-[ʔ)=CFFF}|״oߞ$8sxsۺu+)))+VEҽ{wvؑĉUqqq(Q;~;v,W\qRZ5FYgE6mX`AW^wڵmۖŋSpaիǬY3@ |oG xwYn]hZ`JÇ~(Q8j׮ogϞ=n^~e;1cƐC}ʀh߾==z`͌5 |C?---N;Y&[&::iӦѫW/ z [otЁ=z˜1cڵ+ksKٹs'wqŋglْ7x֭[}=o'H"[t吶5yd233(Q~!#GW_ [b 6$&&^zQ\9֮]}7p+V˖-cر~a3x`:vH=شi< }˗/'11>dJ.z={RvmY|9W^yeݻi֬ 4'`={ ӪU駟zv`ÇbŊ`TTTX%wXq@A DQADkT$آXbL3bGK R(&AcbbIDB;] Mk<<7;;;Λvg΂:۳׻oGfKر̗ϙ-ׯVQQ0X._˟/Ņ`۷og1VTTUV5 ۗޣGX6mXDDck'%%Ze׾.]]]]p`***\ydž(V\\СC>]3l\/^0'''&J٣G#c bciQCCM8/|0V-Z^]]b19sf\P!D ?lmmzחdp`iiYoތ _~ؽ{7չeGÇ1|k?}1>} 0pY?3xrY SN}ٳ'RRRP^^TdTWWc἞cccXZZ"##Ql׭['x[[}+++CEE<<<ǏsOcǎpqqʕ+\Zjj*\\\xOyR)&OƦz7Fyy9׋Σ+c̄ ܹ'N?D6mx7<~x !VCGo\dbr6d2 )˽}nnnǁ1 nnnu@M EBjj*yRUUEhh(N8I$!##B̂ E1c7o^)2,:::弹~+HNNFPPy׆7 gggÇ򨩩a֬Y =~cݍkZl](_* _G"<}Tiںђ%K0x`XYYGPP:v3 qVXXŋc(---+++F{fHT*E^^cu6nݺ㈌ɓ'sx묨wcRXX^z _l#T[n5:kװxb8pkB󨭭‚7DHA]]prr/$l~+V+*~իy ZUzV!-X1;˗}+--"X 8qw=z͛7C.sCWYY4D"#FK.j$)~)j166 a_<2***:=z@QQyzՅ\.Wz7D,CEESv._cŋXv-/ߓ'Otttxu_`9_wVX)))h󑒒J^u]kMUUHHHP⢢">>>qgk֬T*EϞ=Լ@EEwOP:lO"Jݻw7|Ր2\|bWW6mSSSnΖ9[P!DhgϞ>}:*++fkgg8::gΜAbb"eZZZ8pرcpppT*ŪU0i$899aԨQǹsl۶ 666͛7!JtSLL bʔ)077ǵkpAdff6r,[ ,k]]]\z)))2e k׮Ett4222x=c |}}1yd<|066(5Ղ ooo̚5 2 믿=ԩ0w\A&i舤$RPWWGJJ #FƮ]`dde˖xQXX$&&rsjSKҥ ի8t\r{իWP̞=0p@b0a ///8::B__N޽{111߿?{=ܹs q…Fݻ#<<W\5?/^cᰳX,FRR߿#Gr^eΑAP\zee%x=OuqttD\\/_\cc&SM駟رc֭&O [[[?cǸ9GSN͛SN 8y$֬Y m300@`` VZڵk^iE(3gjjj\Wso>deeʗɽgߟ#gL<۷oXfff5k+<HNNƉ'wB5۷0zcǎ1GGGڷo6nȅP˖-cݺuc2iii1|r .OP gvvvؘqg...LKKIR֭[7r/^<==kٲ%cLOOijj2ggg*X7//Ӈihh0ccch".G\^^Fd2 ]RR-Z6d:t`}{./;;ӇI$fhhBBB؟)scϟg]crE(Rzu(߳gsuuPuc>>>ۛd֖XUTTpyFx…ͱ‚ihh0[[[m6A>E(ﰰ0AKGfL&cDz3g{ә5H$LOOuޝ%&&*-uʻ`cƌi[n0^BywIP{y6m4ֶm[ƌ'۲e ߸qX˖-:ر#o۾w `M>}WVVӧ-[2HTgX令/${m+Jl2fjjՙ{v>###u~ɛ'b!B xyy!77f~ ++ vvvo:5%&&bܸq(((hKÛL^9"Bwݻ7o~ y==z45#V\Yf!́zy F IDATB뢞#B!B5!B!5!B!5!B!5!B!5!B!5!B!5!B!5!B!5!MHKk׮ƍפrƍfYPvUkXhD"D"vV&D_vUеkW|Go jB[HoozMcԹ ر#ttt:`}رc駟0wܷ]*_:u\v-f?Ӂdɒf/wͰ&nݺF3̙3н{wO|0a :ˬƊ+`nnMMMt |CLL ]_B^ۮ!e*`7гWdRm^͎;Yf 7o1uT󈍍ž}pŷPWg[pΈ|cƌ(--ƍ~ M.sڵhӦ y}ӧO\88,^\nBCC1l0ѣ ųg6~PPۇٳgC.#>>DZcУG.ߎ;w}[y?GHH#F@EECbƌذaCB!W~  @׮]v5ވ ?3,_}[]ݾ}زeۮRx455vU lڴ #GĶm۸@XYYa׮]8?b?~ <뙙4i*++I&AOOO8DZ g?OuVWZfaa!+᫯prb1 o݄4'zK/'''hjjB.cӦMJ<稢Ѱ&Zh7779reffx^zAWWRNNNصk6lLMMmӧO۸|2CCChii .{ƍL===?O<۱c9W[lA>}`ddMMM#66VM6DZcxlΝ:t(=z =='.."7nॿ< BAA7,P1&!!و4@OO˖->=zCE۶mSSS|x`݋/bذahٲ%`ccrii)LI&).m;F͛7yy8*xzz+** hjje˖ٳ'~Gv/ 4:::044ļyP]]˳rJZZZڵ+RRRxy*++!0{l|7~@II ?~<uƎ hhhuGaa oS<ϟ?GVxDjrmڴANN~GZS esйsgdffgϞֆ%prrlmmqƍVZACC{.&Lݧ;wyy\1c`ӦMSUe(--y3fÇVPSSäI4---? ox;55w^TVV$0m4\~NBAA,A=GЯ_?"** Q&** +VIЭ[7̙3?tӧO]vž}O @&ٳHOOǨQ{'O0m4hNš5kp ٳѳgOaʔ)h׮ c >XbO+W,_>|8&M"Y8{l& ?lْѹsg 4طoBBBCHHo\1&M¸q`tjz?\]]s̜9/RRR0hР\ŋ1gܽ{9@WW~5q#!!Ϟ=Ì3```Wƭ[r233 Ji 2r| Μ9-[UVh9&M½{'OٳJjĄE>Ô)SеkW8{,嫨@~?Ƨ~ L<˷zjbxvڅ@m>|w掙)1h o6mlll y@@rss1sLݻ8|0nܸSSSG6P_+]]]8::b˖-pvvJJJ-Z~d7֚5kTkhVqq10j(1֭È#c̞=ӦMѣgaС믿 H+PUU̙3ѢE bxBCC W^Ehh(ڵk3fh~N>oѣG^ÖKKKQUU`9={,z pY9βΞ= Anݺ1υ2R)e*wޝKW4СCEH0B++((`u.Kyߟijjׯsi/^dbaffƎ}ԩc2Dc_~aRgϞqy3rJ^L&yݻwg΂:۳׻oGfKر̗ϙ-ׯVQQ0X._˟/Ņ`۷og1VTTUV5 ۗ^NNܹ3/ͭw{l1ץ+ .0LEE>}K?x 0VebbŠy:t(羫>s޽ ĤR){Qud| s^{ǏS1gEEEۧR:u*sϞ=rHR$''Ç =n:j[YY***၈<~{ ;v.W\RSS{"+J1ydDDD ''666 ֻ1˹^Pv]]]'&&&sN8?mڴɓ'X :wMLLо}{dddp銋!ېdB~~~zxc <@UUC뀚@^AT󶥪P8qKH$PSSCFF7T 4*T-xR)ѳgOn'|?~ d2obttt y(+HNNFPPy׆7 gggÇ򨩩a֬Y =~cݍɫ}->}ZҡBQH$L&,7G"<}Tiںђ%K0x`XYYGPP:v3 qVXXŋc(---+++F{fHT*E^^cu63BR_@Ǐ#22'Oy*++5^޵Iaa!z%gkk ~z5R)nݺ׮]ŋq:cc Σ6P]]@иc ,]D0` ꨣ#h|~˗/ǹss.]5,jabbmmm^ˍ(---|ǘ7oУG 8l>ӧj*.O>С P7M6Fڶm+H;wÇX~=֯_{9VVVԾ[]K={9`uh2---ܹsIe2(yqDiv(((}paaժUظq#o|||o>cMVUUPRRy7oĸqK,+MW8H$BZZҼ\xzz_~%ڶm uuu|[Cun~4f p#TVV eee?>lllBL8GX4\qԢE ACzͻccc_ ..Ҙ󓑑xxx`Æ 066␘(XU~ݻ… b =z{QVV֨ 񙑑K. ޣccc+++믯Uƪ7cb̘1JvԩIui{ yx՘utuu'[Ƌ/PRRk?{ <;SoY[VN2EZCUo2c(++$M!D@-//O,''Qe`ewwwDEEG";w1l0Ã[x]p,b۶mw}\YA.1sszŽŋ8pBCQcjj^|`ff){ ^׏>???ٳ;v9sWff&sN.ADs娪\.箑A kѢ&L &ÇpssCTTTՖmmmAm޼eA] #<<999ܹ3K.*ی3F=|Y߾}hywEE*++?z !HP]]-N2333 z5껧jk=]tDRTfҥXhs3gΠ_~\SN1-KΝn:}ҥK榪$$$(}IqQQoܸq׳WQQ5k@*gϞj^y»('HPUU{ݻu>jxyzzBOOO 6@GG W{  qiO>E||<\]]_bW'6mSSSny sDQ*::ٳ'OJY8|0003gȅ}8>}`8v Jj*L4 NNN5jq9r˖- p5CWWW^EJJ Lpڵk ^XC___L<>Dll,'M`$$$f͂L&C||/9rvvF޽eʜ#D%K ,, #F=ݻwcʕ`_}/_ǏsruuE@@Ν۷oC.c֭q}N8p}ii)>.]@MڬYW_pttDrr2N81o߾=&o&(!MPތ1v1YƍP׵{ٲe[nL&1---fccÖ/_^x[Ύ<.}Ņiii1Tʺuƾ[nŋ'a-[d'OBnݺld2d,""[ؿ"zW^'%%1777&HD"a666lƌ,''GPfFFᄕٻw/spp`ܜ},66VfDi8iWWWA㼼<&svvfuX>}366f-bplȑL&1н%%%lѢEikk3MMM֡CGwrY>}D"a,$$矂0nj1vy6x`5-W.--WW={0WWWy ]1üyiK,aNNN\]lmmي+XEEgLOOOp.\(,,,e۶mS Xͽ4zhd2;v,;s X޻wM>Y[[3DXYbb2,**1---&ɘ;w/_ii)`8iu0`+wNe(W:waӦMcm۶ejjjؘyzz-[7lْ;oF{ ӹxee%>}:kٲ%Dun72+++,,,XLL y9'O|&֭;r`}=oĉlٲeԔ3w{|FFF,**}Pu{qG@kh!!##^^^ͥ߿ʂۮJLLĸqPPPeh!H{F޽ykѣaDrJ̚5!́zy B뢞#B!B5!B!5!B!5!B!5!B!5!B!5!B!5!B!5!B!5!B!5!C"QQQxD"\vKk׮r5D"siQQQDoRBQJmWMw* dߘ]G"&&'N@II d21~xպݺu GΝj]! 9"5AAAx)vU,22{Fvv6BBBqF̙3=!Ck׮:]h>}Fw-DGG#33nB/#B[#!v5,11K,СCk.q̙CUUUJ_B? Bꤘse >R-Z@XX={ի:ut}kkkx{{Y9G FΝ ;;;$''+]_~Y`hhLx<@p9IDATp0s1+WF CCCgΜDDD_5a]Iر҂Fx"z mmmO?=zNNNC$ =B!DG >Ϟ=Ê+ヘL2@и#;;ӧ1c>)//#Fb bذa8r ̙3h 4 }o[wĉ={6ڶm+Wb^o._ 6y|r_~%fϞx/oii)N:/ ͛4-,Y2e oߎ۷ݽK!ѸBHͱo>3 J~zcذa9s&v؁O>[gǎH$ 77IIIܺ'N^^^ZBjj*D"O||g 4ڵk c֬YXz5Wއ~(aҥK:4yruDFFbٲe裏@tׯߺu | 333lٲ @V0`,^=zx*!B4c 3gRSSoUUUD;>KRٳs/ĉyCԜĉ4X]ʕ+\ZRRD""##ۯ/vyy94kQrr21|pܿ366%222xuttx uuut֍Bi:jBdii,ˡ Faa!?p]g,,, +++O255}mV^ZZ}.((;&M*>|ؤꓗ,--ahhtݻߦM!BBhժvwwwر|u+ڝ5 ++RH$BZZz>׵ͱBg8"4(//|TWW]vj~5 Xr%݋ɓ'r|0x\r9:&YYYի W c \oh !Bau[y͚5piAAA(--EHH=zZAnݺsyy9tƯ\nmC c тe{` qehcҤIa8pu X,Ftt1Fr;B!ԍz! z* ~Î;0j(tٳxw_y{VVV8q"N>VZ믿ݻwu@޽ѻwoOu1^eĈqY{033Cqq1?b׮]\.Dze˰`\v ~U`ʔ)ord2lܸH$pvvB!GA!!;,^χ*BCCg cܹAk֬9ssss|wPUlݺ;vĖ-[0gk׮pqqipe˖O> PRR}}}tàAT Vzڶm~5,PSSöm۰`L:غu+5!z%)B'o-͈Btt4вe^>]&"G!iɛGs!͂1-[W^0"Bȿ #Ǐc@VVD!JqDy-EEE5jd2>WC!O@sy 4B9GB!jB!BjB!BjB!BjB!BjB!BjB!BjBނxD"\vK[Sc=z"G}Uy%?n8kՉB!?aaaЭ[7̛7=ڵ _}NRL$R)zꅃOF!o4i*4Çt0alllP\\cÆ 6mttt41{FOB]]IJMMźul yyy!881\~6lݤm5͛Q]]FA!P$?D"irب˖-[PXX_...e导gϞA]]***lXYYa̘1!CW~#557Z>!oA!uH$ŋ1j( pe :D׮]~A.\@>}6m`ٲeJ{)9Zf }}}tv޽`T*68x _ gS̻Qٽ{7-Zhkk9G'O!HбcG^@͜u-Zl7ob„ hժ444`ooZƍD"}<\OٜݻwJС?BB4l0XZZ?c .\+LLL0|H$$$$IIIܹs{Fee%/66ZZZ ns͘5k0<{ ϟɓ'1jԨ:333CUUoߎc֙o…(++Í7j*),]u:r֭[#,, Ƹt80֭[8ro@YYJKK!ywE! CCCaĉ(//>}}Eaa!f͚wy۷oO?ඏ9{}ʕ+.]¯F՟B7!A:uxzzO`pssüyʕ+QTT'O[ncҲm///7gϞ̙36檪֭[#332[VVV8rHۺ>c(,,ĢEPUUC-\UUUB-SN{g(@KK EBB .c IIIc;߇79[51eʔ"c9r'B qDi9|0CCC_dd$޽{ׯ+%np̓uKKK̘1kۺuklذoFNNbbb`hhŋc˖-*w]s]2Ƒ#GpAnד'OtQQtttפc 6;wѺuk\|.\CmY X#G[qxb\O>pvvɓaggPRR&2ڵk?[ۡ>M4 %%%ӧڴiׯc͚5ܹ3lmmt|!FyemW㍉dXQQ`YAA fLMMD^ϳ^z1MMMfbb–.]ʶlWrzz}޴iswwg-Z`L.9s氲2.OFF"##yۛ3g{wSUUe[fÆ c'n=bFb2`fffr#oŲ ^/¼.H$cǎl͚5J6sLfhhD" ͘1C-lݻlƌm۶LMM}X޺ׯ_g bڬe˖,,, ;v,1nj;Fm0ن\ |:C_ dԩB!pݮOS^_,zƽ'99`;er  8H"#$Vs#<k2k#XaZ`cZ3>o9OPJI5˲$H=˲֚R ,Zr~4M)0l=Zk}y{ď#X1Ga* $GI@qD$GI@qD$GI7=huIENDB`python-diskcache-5.4.0/docs/_static/core-p1-set.png000066400000000000000000001303541416346170000221100ustar00rootroot00000000000000PNG  IHDRGe@sBIT|d pHYsttfx9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATx{uۛaf5aJ"e"dWT1m%.t@I+%Yi}SHkٴHbQTT Cc1לv\z>^o4w 0$It+H$IRA`8$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I :P(ŋB,^P(СCگ BP%a8S_D6q< xӣG/'ӣG;_*UD}v "))&MGwyI*bIRѣW\qETٳYv-۷N:Q}_qtԉJ*Ugロ$233ټy3 ˖-7W_ aʔ)ߥH: $I'W/kҡClL2{G*U" D 5kK,YfW\RX1jԨeH:KNrҥKi۶-+V$66s=FeyǨS /^ƍ3}q=zyF-ʯ{~( qWk.nVʖ-KBBM4aҥ|w 8ʕ+KZx嗳k4oޜ$Y&>(G9Z[lI(bڵ'1cP{7Ҷi&zMjՈTR\r%~ٳNZ"W{ィ?=zBѢE)[,;wO>rK'7mij>Kڵr%_m۶%99XRRRh߾= .r pRLbcc98p {2J*TR%YR%bccV?8p82vСTZI&E}'N9z%xHriݺ5%J]vTP46lq2dHd޽{iѢW^zz;v йsg>#}Q:tKef͢~b޽\~Wo&--\s5,[n4ڴiCFFӧO禛n"%%FE[o_bŊ\$%%|r~a|Mx bb~?iݻwgK5*KIc4hk9|07ofq.]:gt<8ӱcG222h۶-ժUc֭`ѢEԫW/˱n.]J֭k)\poȐ!<#/^:yw2e W]uUdljj*CTRiӆd>|I͛Dze(QD\s5l߾VZٳy8|p;Ww^FͯwȲtdN$)Gu1 ׬YoQw?xTC\sM8 W^i_hQ2$Wj?Qm/bc ‹-j@n gffF_z0.YdM6CEz0СCԱ^|0<<ϞϡC‰eˆ322v.\p^z1cd{d%;+W͛7G[.oV8IIIҥK?裨~a8!!!\nݨO˗oڴ) ,Unݚ˖-?ƍӣ{ׯ ϯUVQɮ]‰i߼yswVf͚זөW~$)gi9{aʔ)\zw}Q"KMbŊ1rH tܙ=z4qqq7TR5kDg /dL~aJ.ԩS8nFvł LBff&ݻw=&$$<3 :~]ҠA:u]o~^z%Kjj*]tQ/bzիY~}w}%k?0j(*TbŊ?3{q=zN:~cƌLi߾=;r^I:IRҥ *]v7t͛7/%'333+222ذaC^Wί~… SlY;;,P+V>x k׮L2<3''66?=zs1i$Zni4iEsΑv1x`˂ k˹袋Ny;G~XBWxqj׮M׮]#c-[ڵkOظq#wSÆ O8 Blgk\lE_>_=wfϞ=QK VZ)))ܧt땤$)uؑ_QF /0aׯc=W_ xsAbb cbb~IOO'{nfq4iByHOOdɒ?aݺut!^ʕy:t(Ϗl½]wuJso޼g ;x~r܉{ ݻ%Kԕ={pџ86qٹO+IqY$֭[t|MG}D6m"KYhQ~J:խ['?vD9ݺuȑ#̘1Nf͚̘1={rJFcǸg]'Dfw5+))t:tR,Yg?ʕ+;r+3IR.JHHE"---G٭[7 *ĤI")S&j~-gϞ#]N;q2 ̟?ƦG-'yQHr[o4]v?lT?ܰޥKV\ɰaN͛#/ ꫯri|z'|nMzz:Nx)))hт˗3zhvMΝ)RHԸUVo߾,ǜԳgOHMM#cǎ3NmۖmW^l߾=˗dɒBE.s$I9쮻b۶m\~TREj*Sre:u;vX>S?1yd6mJٲeپ};6lg].B*Tw)Bʕ Br-إCz+Vbܸq\s5TT46o[oEϞ=?~I{,\G^ɓ'3a6mOɒ%ϙ;w./ҥK3sL:5jĕW^IZBlٲe˖g>|z!}Qj֬yѮ]xiԨQW^y%#F`РA\p\{TZ_d6mzRWSxq.2.]J.]^: ]vԮ]J:$) Toȑ#ZCQB:u=_S+p8LŊO9U*I$IPs$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$>6{dRRRr$Iڑ#Gزe ͚5#))o8 `ɒ%t!ː$I#gϦ}>Q)))~jI$gѡCptժUVZ\$I$oyqCI$Ip$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$bI$FCw Q^%,#I$Ip$I$IH$I9$IRnv.!:#KPʑ$I$a8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$I@G>wqj"!!J*q7qƨq=z  eQFc;v'xUGڵ>}z^$I,& ?; 7@ڵٹs'cǎ^z,_/82666>Y2bzE 3g;w& ѩS\?'I$IW G `ڴi-Z4vM7q%0bLik׮?ym۶1j(رc@f8p 7p Ν$ITeuM4 F\pjbÆ Ygffl7g222ӧO- ?[lٲ+^$I ڵ2eDˉӒ$IT1?o߾4nܘݻG{챨q:uz<̜93¡CrܸHO7nAOC$IRuF,۹s'[&113g PB,\0ϑ#G=|pӇuE̞=4F$IRAT۷VZw^.]J=.]H[rXhp8jiݎ;~$''YH$I* ÇӶm[6n믿E]tRoo8s"muYv[bE_$I٫Ln&-[/Lƍ9|0~maÆiٲe})RqE0ǏB 4i$wND$I.{xh۶-iiiQ}ڵ+;wnݺ|ԨQ 0oC{=-9>Ȉ#ի 4`Μ9tܙP(DNT%I$6 0iӦEnK.#F0ew}ǪUT 6ꫯfĉm۶1j(رc@f8p 7p 㳔$ITPeuM4 F\pjbÆ W^y6mDUW]EE̙CFF}􉴅B!?uV-[g#I$+D0vL2Wk.,c6lի#W^MBB5k2x$IW]Vw"SNe۶m<#رre[\98rرe ؾ}O_g>;s$IT1?o߾4nܘݻp!bccO)ƍ#5559H$I*ΈpsNZnMbb"3gΌl#GQcOj\v 7gѡCS<I$IQGUVݻKR|H%qǗ؎;(UTjQrXhp8ji'Lrrr$IT0 >L۶mٸq#:]tQT 8sXre{ԩS'N:G_<%I$)/R8NkE{%I$)/V=}]nZ$IrEpGRxq8p@%I$)/ GFB5\C?>)I$Isӽ{iӦq2k,bbb8v =XB%I$)7 GG!..._Z"&^tE7.Xѐ]BuI/EeuUVe…\>-[FwEŃU(I$Iy Еnロ׳uV*VH6m"j \$I$@;$..yQ~}~HKKcΝ~9R$I$@W^+K{RXreK$IRȕJ$Iҙ攮h'B曧>I$IK;F(j۲e 6m"11;͛7w^?|RRRrZI$I%/^ߦ]vc +)I$I-І |Ad)݉TZ?0$I'3c =ѣ̘1B$IDeuw~;5oZj|駌?5k0nܸ)T$IrSpԻwo .̃>H޽#;مa9ƏJ$IRA({\/ʕ+s饗Fv$I.GKLL 5QF9q8I$Is9֯_ϦMHOO'g֭[NL#I$I&P8ڵ+ C@(2I$I*n?gy7,Y2$I<(; <;3$I|!eʔ!111j$I|(~L2̜G$IEeuիW'33_z뭤Pp,:vdI$IuM7{1P+K$I @hѢE9U$I$@Yf9U$I$@֯_ϗ_~ @ʕ袋rВ$I9s0`⋨USOѮ]SH$IR y>|8fb֬Y >p8Lǎ?~*I$I)ЕaÆQvm.]JBBB]vq4mڔTZlPI$IM}t=*@=L!I$Iy"P8#---4L!I$Iy"P8jѢGfٲeYVX1cꪫL!I$Iy"=GO<7iӦ4lؐ /O>{d)T$IrS+GUV>஻"==3f0c ӹYv-UTɡR%I$)~Qrr2O?4O?tN#I$I"ЕGlѣGL!I$Iy"P8뮻hҤI_~9sO)$I$)O Gw]7o^)$I$)O G۷oB /_m۶B$IDpTti>l7l@%L!I$Iy"P8jٲ%&L`Y׿VZB$ID iذ!ڵVZ[s璜̰arPI$IMQYr%<sa֬Y(Q.]0|pʗ/#J$IRn r1i$0ws! .N$IJpt\("66ŋ$I$qmrJZlIb(]4K,o},^8$Iw}M駟ҵkW;+S c„ $I( <5k~zyX"$I'={{*TΝ;O`Ȑ!lْRJ 8qbq=z  eQFǎ'jժQvmO~5J$Ie!C"Em۶Qx>7|#}"mP?lݺe˖v$Ile]Ƽy֭s{&33yQv)֬Yç~ʱc8_~= \rw}ԫWcǎ1|ƍڵkYx111?;([lʕ+۳믿fQm}Y$I8WNԩS;%=XN:Qzu|AfΜhСCfy\\\?;ƍ#555$ITZVf͚,`/X\ve=:PqAߟB pH[|||+Vu]_v-w^|IW_}W_}]D6mx74iTVÇsf g#FdɒL0'r0e:wNI$Ig@GeL<$&NHٲeؿ?3gΤo߾ /~vɓOx bРA 4(@U$I~iŋ3ulnJbłL!I$l%dQaۖ.!{,CT)T9yxI$I5vK+zIٲe)^8ŋlٲzQ I$IRALӦMٻw/W_}55k֌K̝;~ /0G$I(=*TիWs%D[+xYf*R$Ir[euK,ᮻ.b/^d I$IQFF+V SH$IR֭?Ͼ}߿oԫW/$I'sJ˖-Q={z|'L4={%G $I(hтy1p@FWN&OL(I$Iy!C`*V^Ν;#5\2{n$I$)=G~?s=..`$I$sXbl޼P(H$IR[]˖-Y`AN"I$I&P8zٸq#r o6۶m#---ˏ$I$t6dUׯgڴiَ 2$I$@OI$IECeH$IR tϑ$I$R G=uɶnݺB$IDp4sLZjm^ˌ3L!I$Iy"P8ꫯ8Z*_~e)$I$)O GŋyfL!I$Iy"P8+0a۶mҷe׿Ҽy SH$IRahذ!j}䡰֭^ 3lذ)T$IrSpttRN~騾35k*PZC Cw$@v,YoM6pyQLI$IR^ +SH$IK/-B(9ݺu;$I$)R8ѣPN:QhQz B#I$I)͛7PhѨג$It;pTr|-I$Ig@$I_սۼ lڴtpT(bڵA$I\(=S 88.BJ*SuI$IR F_ܹsILL̩$I$)t`$I$(5oޜ?0j$I|(=쳼<䓤TM$In6x9(QDԏK$I$ m??TPK/ $I$(?֭[3{l y$I\O֭ F$IxRM6mXtiN"I$I&P82dׯO>ZݻwG$I @]xY &d;.3334$IV rI$I7СCs I$I_n3'I$Ib8z8p)O~{S~$I$S GӦM#%%>}xh!## һwo*U+I$I96mO>$Ǐ'66/URdI0l޼u֑%\رcҥKn$I$vJ( ѥKtի={6˖-cٳҥKSF ~ڷoOzrpI$II[]ݺu[nN"I$I$I$ Ñ$I$#I$I G$I$I$ 0I$I`+۶mo_5_=+V$33}H…sbI$I5a @ժUҥ  `ƍ8p*UH$I#G2zh^x p/11;+.R$Ir[psѭ[7N:u׮];r%I$I @h˖-4i$d I$IQrr2[lɶժUTT)$I';2~x6mi B_bĉp *$I<(R\9ԩCnB<4mڔVZQvmSJ$IR Y|9w۶m#..%Kw^ ҥK)VXN*I$I&C`y衇x衇rI$II$I/E+G_~%&MbӦMG=~ؠaΜ9A$I\(M>ݻsQHLL2u$IT G F̜9իTM$Is7p$I$]9ꫯr4 Kby5]$I @WyL̙3sI$I]r%SN$$$PbE .5& v@EJ$IRn ƍǝwI\\ w v[w Q^.A$I@h4i҄_`$I$螣}ѥK$I3^pԬY3>ÜE$IMp,Y'x={TM$IstEq1 ĠA;nu TNP~WpI$ G_=PA\$INMpwaRX"UR$"])D`1hhF$ V-` PP4&HEQR,,,mDŽa^f}y3{f5>gƍwʐ$I:k$I$TqXg^z%zA( ?>={~e$ItV8JOO' q5OzzA B#I$Ia~ǒ$It;pTB>c .Ljj**T8^uI$I u7d袋1cE$IbQǣI$I)o-I$Ia8 BǺI$I# G^{-qqqSaA$Ib−KVZ걮E$IbQ^޽E$Ib2H$I#I$I G$I5GǣI$I)I$I#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$P0Hkmr.!Jc]$Ȇ$I$a8$I$p$I$IH$I|6lڵdɒB!ƍgovڑLɒ%ѣׯꗛ˃>9Cbb"uW_=["I$)hÆ {|f7oβe˸ywhݺ5;v{=0h Znرc)_<ݻw^;ޛ#I$)׷.]4uY̛7 g˖-?|4l֭ؐ[3n8 ڵky衇y'ӧ-Z;K.ŝ$I3G uYӡCp0hժUVe„ ᶷz;w2`p[(Y>n$IFGbڵ[GMkذ!??>EFQL$ItzEFF{޾J./IHH ##3egg+111'!!!ϓO>Ɉ#Eɒ$I>%%%}i۶m蓔tH2`tѶl2:utdK$IWNpgHܞu{ȠdɒEKf̙A1nϼg}~דFZZڱ,]$IR>rߐL22o޼isΥnݺue֭|K$I:=ꫯfԩ^:dɒpW\q ' AO?M2ehܸ [$IR=lܸ1|')Sfnf+wĉ袋[bԮ]޽{UlYnVFΝ;iРo&g_ `%IXGcƌaʕǓ'Of\{+Vr1k,?p]wOy衇N?%J30n8T޽ .I$IKG+V8~5k;h 0x`|I$I:I$I2I$I#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I@X )Y[\KRfX IN9$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I` $Igtu(I$I#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$ ƺIN%\1>%D$$#I$Ip$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I"裏>" +履~JӦM)\0gu$+++FK$I/ ƺci4h r/XK.5jfƌҥK6mډ.W$IR>rJf͚ѹsN)Q})))TX믿6mڜR%I$3İm޼]vEoڴ3fp׆@Ϟ=INNf„ 'LI$I)zMJJ \tE̛7/rJ ꫯ.3`ѢE3f͚駟 J.5ҥ={ױn:֯_Ѷlٲc$IbG7q;vsԩS3}tHHH?111<}|IFql $Io(/+W+`䐔ۣn۶-<} @.]"ږ-[FN]ђ$Ib Gʕcǎlٲ%9I$I:ٜg$I$h$I$ Ñ$I$#I$I G$I$I$ 0I$Ip|ϑtz.!JX I#9$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$Ip۷3h >lhԨ3f̈uY$Ib G3V^$I$iϟOժUIIIhoذ! ,EY$IjX]FFKj?wuֱ~ElٲcXZm Vĺ(Wmu ~ܴ&%DYsgKob]Blx\G<*q}q}ۏh*egg՞?O>$#FsZNMb]@c]ɡVXW0>Dx\|t\^ .;QRRR)r۶m3`tѶi&,YBڵ ]ҁ,[N:oRrX#N='۷zjZhqDVtҬ]6=##>{󦥥~uZ\25k֌u!kq}83F{V7d[.K,aӦM%I$NpԹsgrrrxgm۷o^QF+W.I$IjX]Fҥ fݺuT\_|+VǺ.(otȓ]v%;;SyfN琺'RX1Zn͆ ?#999|w>;vp7zHl† hܸ1A 7Yf|.IӪU+RSS)W\s ɼ)SqFuYd z+{o^gt .QF< XiӦѯ_? *1}̑jՊW^y[CΝ-]L\κuXr%UTZO%ZϪU:t(ovԵLIMMh+QD|/ UV`yTVvx饗2vX..*Wȑ#;INNƞa8{۶m[DI߱8aݺu ..?̚5ŋ Q3Iн{wzr饗Rx>/繌}Cʑɡu/ 4իSH֮]Kzz:zvkذa qM7ѻwo+[.? ))&M@mܹԭ[*[*U#C-ZDFF'TC^s$@rr2O=Çwtڕ?EM۵k7nvo.Tcǎ%pG}u9|AcH:>S&O{+n꫙:u*W},Y.]ĢTIhrss;)\pԵ:9xH:^zpz-ׯFbiӆB tR&Nc=FΝ54j(:te]6mgqQzu*UڵkIII_")V\I׮]ر#gu| O?4uԉ wĉ袋[bԮ]޽{h $p[nm۶Qn]v+ܹsy#"H:~iի3>CyqG}_~ аaC>È[{ #I$I)9$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ .@:ڵ_, u9$vBɔ(Q}{# |7' Xr%,IR ڵ .L B.I')IGadggSX1J.틱$I1dffyfRRRb]NR^s$M6f0$)FBiii6KGp$;wR`AIc{ܹ3֥$f8B(a$IR~P@o:(9N:Z#I$Ip$I$IH>|xЅ+~XHOO'99Wvlۆ b]qӲeKjժ2$IwŖt< c{aj~|r|Af̘?H||HVV?|D0ڣrr-/_|1iii$$$pySO=-ZE]D…)S >`T۷3l0*W޷wy'۷o?6Jt̑Zp!mڴ!55Çk. ƙgyΨQӧ 6dӦM̛7/֭[9N۶m_>oVZqquQfMLř?>ӧO{L8[ҿJ*ܹs;v,k֬aĉu|W4k֌B ѷo_*V˙2e #Gk׮s95/{4xp#G2dvJ>}X~=cǎy̟?ŋwM2s=ƍI=SԬY;R`AL€o ;믿Ү];*vʤI4hkK/ 77;2gK5Xp!<K,7<%I:YFX:b={nN2\yL>ŋS|y[j׮MNNN[+VH˖-7nu֥lٲL:uNOOgҤIdee'peѬY3^0Lʕ+yG}Dbbbx wΎqgnVXE̟?:ܶﲆΈ#x}*>]V\IJ{k~0bĈmڴbŊqWrk۵kҥKY|a-[2k,^z%z;PM4aҤIbzb֬Y4m4gynO>Ý$,$)''{N:E5jжm[xq.]z̜9mr%0yf̘͛뮻"C䷠l† hܸ1A0|֯_u]=.kn!qfٴi'O&77]aÆYgE*U9s~y2-z}133 6ТE {233C_\{4l0vQիWƋ/(Iau~zRJԴjժw{w+URV-ڵkG=SNDm۶Ѿ}{իDŽ (X0%i}'ϪU:t(o6kĴ=aϛC~}T%RRRXt)A *eyCO>aذa|gQ>effRXC_e˖ L%J૯ ?^t)~-y.cݺu\$I'Ñcy,_z{Gy~>}%$$pe[o1}t:tpɡu/ 4իSH֮]Kzz:G qqqyJK(bڴiy=ܦpg_R-˗/K.z<Ô+Wx}]y#ƃmڵkٷ\r^I3Ñ<[^C/^|K,I޽ݻ7YYY4oޜÇGP(/W\A.]6m-[ OT:ʕ+繞 d^|Eznx{Ɵj 9J*sUV=;t>g}ƅ^xSLagv(pTT/K.$ϡ$jHRh۶-o&V {xLʕs||<'OA\~̝;7}O?E-mk333y"9u8vڵk_5-;;-[2%I2Gm۶Qϙ#۶m;<-[={F_|dggpNs=СC2GEvv65:% Ę1cׯct֍ йspoz/(#IGTyC ^z)׏uΝ;+㏹ #9ѣGǨ×yc]Jrssٱc.%—_~_Wz,(P mw~'PBWVG[nGO>4i҄o*Y\\W_}5/C -w~+i̙C HLLRJ<3y;w2bTBbb"JiӦ̘1[`lْpihѢE%%% +Ϟ=.]P|y(WvQڵ+$%%QZ5瞨~7n$==ŋSX1z֭[?zꑔDɒ%kXzp<\|ŤH͚5yg-[N:1k֬_l2:wL%(\0^x!ӧOs XfMDv4mڔ{˗V\ &_3dȐ`PX1>(<'!!yqXh]t3 ))ի_gϞ+VŋӧO</^|ň{]6ԩSɡUV;wdذaT\D8 5kƇ~իWӱcGIMMeРAFyhܸ1%K$))oDٵkP[o^z;>`0=]D zͦMիeʔ!!!ҥKөS'VZH 8.];p8ʖ-ŋÿk{9jڴ)ue4k֌… SJ&O ̙3iРIIIԨQQ\f y$$$PV-ƍ履~ u{D]`^{-< 999,X?ěoɮ]0`@- ѿV\ܹs#nݚ˗_:X̑<-\6mڐٵkÆ 3<ÇgԨQӇ i&͛_|A֭?m۶ԯ_z$`9]w5kd/^3}twĉٺu+TR̝;cDzf&N^W_}Ef(T}bŊ,_)S0rȈzv9èQ/xHKK9r$C k׮Ӈ3vX7ǫ?q?ԭ[;R`Az-G/b%KЭ[7Czz:=={~TV }I&l߾o%J0n8ڷooAǎZކwO?Ę1c(Z(o6=z8eM0m۶q7RdIc?ꫯ-X͛@~P˖-:J*?ysgF<#F`Ĉ\s5Ӈu瓒~JZZeʔXGFM߾}_>|̟?K.$oΝiӆM2f}|A*W_cqUW;v+pUW1m4ڵk^{-ʗ/Onn.;v>T^_޽{G+%Kp7SB~'}֬YC:!q}ǫYxAq cǎ oJ.}y~g.rwNn_Bn?~[UvcRt˃˗wzcs:u$&&+W -Z( }PBЫW??h߾߫WH"A9s}m}6n-Z4hԨQ1nnn[nZQP(Q̓EFaÆ@puE+RJX" Fo…A /@?{k.䒠jժmeʔ O> edd   n馛 mAJ*_V^3f@0{p[۶mJ*EYv}v0ym  k֬ 5n8(VXTm{?sO}sgyfe˂ <@D qqqoFEXf+8w hSN2۷ojԨi&ܶs.I x#7n8A xG\r%eeee˖ ~wx@Ue&M 0aBo @nw"M~*SL?GsA%¯Ucƌ ^ ٱcGРA %%%oAK.Zw~A`` o-lmmS||X~=lR.hAVދ{ӧXz5V^x{-,,S3Ր{˂4Ec'QpDip...Gjj*<8,[ k׮=[CMM HMMEzz:S}EEE>}: pmMv-PpqEEmЪU+Fttܱ溨GMm:XYY!++ wޭqnHYY<==QRR3f (((1c>( +GM4 d͍wG\\ǥ6OFF5k***CrrܶS???޽Yxb>|QRR0ŻԸsΜ9hݺ5quooׯx#Gb ˶o߾NmiUmny8~rYJ-Z3PRR›?IG9Lnrj;ZOOF&/ 22 lݺWWWnn~vvvCDM6}nf6mpu5T LMMW{7ow^ޏUX{xeh%c5Ew;v 11SN]֭[DLU&T*Eyy9R)wT SI&=z4FOVYJJ 455b~ZabbGrCe욙!""r ?pYBCCf=|W=lyrs*:Dz" !PQQ!]&&&ɑ)3UY]>SСCL,XgϮ\m#!!999'Np++((@yy9{F & ݻy)}/]j~Gx`ff"$;w={6/^,7^vWvwVX+lذA.M@("**Jn{<|/_V :`>q1y~֯_T !;WLaqH$ ,X@./vNǛc@tt4oCCCtqqqri}߿?j>~(,,~޽޵!J^5 PRR]zWx /~YYV\+ ϕxm9s&:T_gV-^vEEEf̘]vWWUZCSVV?>7nݺ+--ELL b1u#x) D"i }ZUxդ/_~P(䵕1uؘ%o$!sDQ(** ֭BBBPVVnkccWWW888@OOOFrr2]ػw/ѻwo9rvvvXlƎ GGG :8w^xM6 R}6b1RRR3>?~:ۇ:T b̙~:k׮a׮]?~<"""+WDTT222x=c5򂊊 |||0n8<}044(̙3///L< y&vݱn߾=1m4B"iWDDDb>>>PUUŮ] ''' <NNNPVVFvv6m,\055hii!99kT,]z׮]Áeeaa(̙3W^At5ܹaaa2e O> _0zh^ppp.N<ݻwsՅѫW/|gw… ]tADD^ KKKоx"zAB)))x! •{9G2Y/QΝ-/++ *âE JahhXT]|8r:uqEEEqnф ~z1'O p pCe˖1֭[V@ {ҤI }"++ ۡ|s9r;vqe.211ɓ|r~عs';~jxۻݦMJMx|>GaLUUiӆ]Ku]ٻ.\:u$ `VVVlѢE͛7\ʩe>|lll!ٳuڕihh0X:u~GnŋbM6eƍcΝcƍyfL"0uuufiiɥf yRo_v<%%9;;3HD"bʕ+rufddW9"wfvvvLMMʥ622RNI.qnn. `:::L]]uܙ߿_n\Ԙ!={6KOOKG6d&HEEElΎijj2uuuֶm[W_s岳;DL__3gȥ9fϳ~yqeyU|ǎɉ>Cf^^^eg\[ŋYii)WfذaLGGGϚ5K.esll,333cjjjښmڴI,wxx\, 6ikk3DFN>;<`!!!ҒD"út’Y_U...fkΝ;wLKK>T۷Cgy=6qD֪U+ +Ě6mTUUYv͛455 ᾫ*_eee,$$5mڔ *zEu)+[y3WVV.\Ȍ*}W.g``"##L><c1j@8ddd999o {쁿?`cc! _&Es!Zrsso~ 6Ē%K0yF isDH=*B!߃_&E=GB! !B!GB!#B!B@!B!B!BPpD!B!(8"B!B!B !u @[ֺukթ hii5`>|M!0{l<~ٳ'&NXmDm<$&&6j;>_~Gc7|;vW_} OPpD! !!@ߌ3yu*ח`޼yh׮mb̙{?;r~7L6W^^իW}҂!|||_w+W͛N{ 갰UjW0uThҥ ~7r=z4lmm33*묨ŋajj uuuoIIIrOhֺ/n!} ʕ+PR{-c055-kּDar۷ocРA0aqy"55/^lVKK=# DGG#00(..ڵkǏu\-[D`` o;^| 55j~ڻw/0wsժU Ç#,, ^—_~Y#F@jj*LT G~ʕKLLΝ;'y9}t|wv܉CII   44k֬isB"BcAz;6v3>RѣGy?`ѢEXti#޽t7vS7oM7on: 26m۶m{*JJJz9̙~q=3cǎEYY0vXTcǐe˖aʔ)@b߹~-6n yyy ,((˗smrrrBDDnB͛1g݄4$KGGGC*bݺu ˽;稴QQQ077:4iggg:teffB__x<-- ݻw6b1m6nѣG1vde IDATp@CMM Z/_4hKKK̚5KǏD5 /^+hhh@OOC ͛7=κ; [[[ʕkٲ%p{uVyyy0`tuuO?2qqquos;qseh9sF r>UVPSS1KzJnۋ/bhڴ)444`eerqq1Dc*.6m{۷oݻ-/--ży`ffuuu4mݺuï*7oo߾҂>O ^%Kk׮Ӄ:v]vʔA `ʔ)ؼy3lll_~PTT.F'OȵΝ;9r$͛rex55k[nhh@ :ٲeK\r+w EsaooLt 077Ν;pttuЬY3 r߿ѣGsS{{{lٲWyQ>|8֭[r8w/_Eqq1BBBxCCCSU}rr2TTT0vXnF?7x4k֌~GEE!** C رcDGGĉ8{,b1w `ddٳtR?;vDII N:gϢG\RpA|033øqr+V@@@ 7o`۶m@ZZz}v}1qDXYY!%%F;I`bbuϞ=SKUU;Wppp@||<:w ggg!** M4Ȯ8ej#bСϯr.F{_~~~L]]ݸq[vE& ٻ_&&&lȑ3j9r$D1&{WL[[uܙ|}EE/^տxb&xwqqaڼe5o<=Wߟ5i҄{}u& ٢Ex岲wmܸPWcѣ-322b؟-{.SQQaӧO疅1ܲfll̤R)w.֯_7os!=z[ŤR\;۶m;g5Qt ,`ݺu[ֵkW#׶Y6~x^___֬Y3u^^SRRbK,dBK.srme؆ ]vru{^~ͬYϞ=e  ˼ xv-[0+,,dزejzʕ+ޞW̌TXZZ1ץe.\`;u|߾}s(####޾ tuu`۷oʼy9::2X̞={Vec,77WnL(*<zM ,է>]]]6|pd,--yן̹spdc6{l & ٤ImkM/R9")//ǁ=kkkxyyan/HpܼڲEϞ=}vr:Obƌr *9OѧOVn?)--;l2n;ڶm^n(вeKYGGZ[{/ݻObXzºRSSqAaٲeXv-otӧ*//'0}tXYYA$ ^[BPrُ㊊ ),Ɂlmm?UVPUU?hcuQuBVV޽[|2xzz3f̀455QPP1c|Q("--M1W4i"z8^Kmޟ k֬!TTTdmGieݻq̚5 /Ç%%%PSSό \tI9:VVVYvVU69ÇWX}ujKC|7j6wyx xWchѢں7ode5m_UǏu1d!PpD#䖛+ʕ+COOF&/ 22 lݺWWWnn~vvvÕM6}nf6mpu5T LMMW{7ow^UX{xe <~7|M]~bǎHLLԩSmWff&uV.,Z JQ^^T]#U¾}֤I=GӧOZ G,%%HOO Z~}011ѣG2U}v\rl G\Pÿeee>^CCCD"TTTe'| rrrz5LUVtA." ,ٳӧOgϞ\'O1ƭ=VZgϞn8q@EY }|M(//zP(7!DP(vK{%8p=z{3332TUUsN8::חgϞŋWÊ+x 6ȥ)~BDEEm㝃 SWGѱSW8vajjׯc߾}̬JXp!fΜׯڸvvڅ#""rJDEE!##3V///ƍӧO CCC|]͜9IIIɓ!H7ob׮]ӦMCaa!$ /veHIIADD UUUڵ prreeedggc۶m000… akk SSS|(((FҥwЁK~58pU[œ9spUD׮]Ν;=O> _0zh^ppp.N<ݻwsՅѫW/|gw… ]tADD^ KKKоx"zAB)))x!/9ܹ3Ǐoʕ+plYYTTTx=OUqpp@\\-ZT CC:}oő#GЩS'7(**#Gps&LcĈ8y$LLL'N &&ڦ,[ 1n?Փ&MTTT3H$aÇcXd /˱h"=zۗ1m4ܽ{R7nĭ[甙{xs3С|||Qd666Аrúv444X,f:ub?#ŋÃiiiMqq)f7nGvv6g3KKK6gn yRo_v<%%9;;3HD"bʕ+rufddW9"wfvvvLMMʥ622RNI.qnn. `:::L]]uܙ߿_n\Ԙ!={6KOOKG6d&HԽEEElΎijj2uuuֶm[W_s岳;DL__3gȥ9fϳ~yqeyU|ǎɉ>Cf^^^eg\[ŋYii)WfذaLGGGϚ5K.esll,333cjjjښmڴI,wxx\, 6ikk3DFN>;<`!!!ҒD"út’YWϟ?gƆihh0D|}}ٹsx劋I3؝;wX޽/}xU۷o/WJU޽{lĉUVLEE2/W.((5mڔvڱ͛7ߟijj2===}WUXHHkڴ)Uk2 Xtt\gݔ/^`_|344dSNСCr>cƌ+[VV.\Ȍ*}W.g``"##kDNN}Ȟ={,4vsȿPrr2_eR_4B%777אa(0"UZd &O\c`DHC#BPB!2/9"B!PpD!B!(8"B!B!B !B!GB!#B!B@!B!B!BPpD!B!(8""## ~:uӧ?߸HHHEFFB 4^!rc7ٺ~o&|>h;v EEEH$ܹ3FFm۝;w ???7j[! 9"4#F˗011iټylcڵ:u*={c۶mUn;{l|Ν;Bff!1#BH  ݌:KNN1`l۶ ***ܺS(--r{eee(+/!oC=G*\| X,F&MW^w+^^^U֯hΑaoouuu`Ν ?0ydC" 88o޼Ǐ]]]bڴi`ꨨ+жm[C__zӧ=/ś6l d7U՜D888@CCzzz2dn޼+ ;;;\xnnnԄ[Ç5jܼ'B!ȣRAիWXx1x;4ms)`yѻwo,^8p :$WvҤIETTX̙3(//_ ggg,][lm;fL2Z’%K0c 㯿m/_|lUYhann~SL <~WzB ӧOGZZ?[l-[`%B>F4R#SSSBCC!zjDDD`4i7p$&&B$Wbpێ3 <==ye5kC  $$yyyXt)fo֭[cÆ  ddd !!'OƊ+K.]h۶m*7nypB|WtW-s6oތ#Fx{~LLL޽{Yfݻ7ΝO?TB!#9"(44zҤICGGÏ?駟HThkX@={3f oZΝØ1ceB;vիWe)))7oKh^;w Ç?CCC###W^KK𨪪SN#BHQpD9T*7W(008z(_~233 P,,,@n~1﵎UVrˋhѢ6X xiNnn.c077>ҥKx|˖-Ώ.!Rw4Rg0Bf͐$&&-UeSreeeʪw]2HKKSn---몎!B#BHrssajjʽCEEZn C"!!K,ݻ1nܸNӝ/n%JqթN@@ IDATHMMŊ+m c \Y}U74B!Ѱ:BHVZ{ݻ7lĈ(..Fpp0={V$w]O<͛aooCC1(u{` pe(yYXX`̘18u5k 6ظqcC #F@tt4rssѫW/TTTѣpssCXX9 Z,,ZgϞg}P}lܸڵC||ɇGs! 1xtޝ#B!'Ѱ:BH<{AFFM"By/BꥰCD"W_}^c!B h!@c !9"B!PpD!B!(8"B!B!B !B!GB!#B!B@!$$$@ 2WWW6Zj8|pc7(jPPZnhm"B-(8"|t +++hhh:u3ܶm۰|MGddu'ѽ{w۷i!BRn1mԪ7 r;vē'O0zhXYYѣG8<֬Y'BKK (;;SLu...x%TUUԮcժUUH c 7n5k담4xyyi_u~zTTT|}B!BD ^o]ǣ'v[ɓϫW %%%7DSy,,,0|puacc+V|HEEO!Wа:BH"##!pE :pvv\| ѱcGٳG .hٲ%.\BќBSSر#mVm! ѥKub l\]]o>ܸq&w#}v̞=FFFē'Ost x{{CWW"ڵÊ+ӳj*!tձFӦM/=z45k555bÆ rnݺ???D"?ׯ)s}v888@[[bm۶厇BXQ!F9k0p8993f̀H$BRR{憲2\ll,444j1yd 0xΟ?'N`СUngbbrlٲ#GܬYPRR[naٲe Y`TUUׯ_Wt!͛7Gxx8 q%ݻƝ;wp!lٲcC*߿]t@ @XX1cɓ'/_G(((ɓѢE lٲ[>t>3K,\t 'k~B!俈#BHڷo1N:555@HH1}t.8Zd q t 0rH׸};vS[Ge˖!((| \]]ooop<==addbp^zӧOW̕#88͛7Gff&$ 1OaaaCU1̞=0`ܬYP^^,4i0a|gDpp0444$%%aqơ}5}A, 5'B>4R &p.**oAӧx!>|G }6ItFaÆոOD[nԩSujkfp9L0Xv- ,X Xjcȑ5r={׮]Ô)SxU}}}cǎ_1m4|\RRR w>|///̙3ޞ͛+MMM?ƶH$<u !Grc ś>o޼ynܸҲ}N>ZZZԩ?Vm޼9֬Ywʕ+>ΝZ*9AvvvW~СCطo7ŋPRtaa!?~Xs?j(soff&܇{F˖-1zh!VGQY2*{ָr ݋t`՘;w.jU@ ,,,ssslݺcǎPZl 76m0! ëOծ]z8pҐ7"006mwBȿG:iӦ e?bbb\W\վD"͛7E0s:nӦ tuuq]nY]UE0!;;Q}cٲe={6!mmmggg1wmϽ*|}}닊 `ݺu3gNBȿ #ԉ\]]n:^!SXXN<[u#kUUU؀1R/pe<|+w <\'OѣGae"%%%5:| LMM|r<~&ٳ-Seee|嗸tRSSBGJJ y߹sܲ/^ 66}{)E !BlժUpvvF۶m1n8iq-;w0m4lٲzBxx8ϟv={!ЬY3\t +W x0oy1Pn]~~> dLEE>}d^ϳݻ3uuufdd,Xv5\Yݹ֭c...I&LMMIR6uTVRR•`ؼyx:u*OSVVf͛7gdgΜٳglСL"0ĄW;[.##?`L[[D"֮];í/++c&MbL _Xhhܾc,22RngUVLEE=zX޶7n`}eiӦ,<<7rHc,99ٓ0UUUfll̂ݻwB->ɇ'`ym !399Y۷|||ԡCb1cƨZjZjoPu;vLUuOAH݈zF ,6m.???S~~~Zz7n,__ոq+((H;wԔ)Sj*mڴIJ}ٳUB=ӏ?_]k׮՚5kH(66/IL}0`<==աCj?+VTŊe˵rJo^ ƍCiҤIjժ6m~i׮]Zb<;wÇռys:tHM6UttΟ?+W*::ZfRoXKպuk}7ڵkjԨП5k֨N:^$??O ɓ'sN͞=[akDa?CPÆ .]Ee˖$effEڱcV}C?fϞ.]Ǟ={V-[Trrj׮}*77WW3<={hĉ֒wyG5s=#G?WV}vUZU/kҥZ~zey[]ܒT7RSSW^5tbH2~? IFFLٳg_~١RJFJ ۹sg… }111$shdDEE~pww7*Wl9}ѩS^ލWWW#11~Uuֆ$ߘ?q}5$K.uh2l6pBLG1'Nt= ,0$#F;Vvm_?uM3 Xnu_uCqn3$xg|/1$ 2^8{u^ Io0ŋF۶m f$''[$c̜9Ӑd<cݺujkMMM5$z*plap#H-8q!x7φKpЩS'C{i)Sơi caW\1J.mԫWϡFA_6$+W,SNqܹzo9s IFӦM%&&q۶onH2vZK.5$o .Fppq! ծ](Qqs_O^ШTcѴiSCb,Y>V?C |bYYY7ӆQn.Fo}>h/_6܌:u8UznIRTTT_:|CM>L}Y._SN̙3*]t^pA;vP``{xzz*%%k{ںuk'OիڷoԩSQ֭\rT`yރ_M/-*?裏IҶm۴guYݻw׈#駟VTT}Q)SsÇL2ҥFƍK SPPP[*77W͚5Sɒ%oX֭[uӓI]lٲ̼ἷJ^)E$|A̙3rʺn̔a:uMSXgΜ$MHs̱9sHz0v T||}{lɓ u&Y ؞Ga{nZ`pS{WX777]zs +{kܢ_Y'Nx.OJn:oZnzk_ox}[n̞=Pkܸ|A-_\gϞUNN.\@{VϞ=?̙3/կ_?}wj۶mjvkcǎzaÆpuTq vڒ~R޽jn3t#ח;?PcoU,SG x!H-޽5}tjȑw̰a$I/Lϟ׏?xӹ˗/o߾>{*33~WҥK$_ }|]KdݺuմiS-YDqcvڥ'O={ΝsJy֭[W?ys(Qs֭|nL2z駕W^y%?++~9lPPwﮤ$M0?+55]{KPpܢ0[1bjժzJZzΞ=5kjΝǴjJoƌ|PO<ÕÇkjҤݷo_m۶M\ڶm+*##CԧO͜9aO>DӟTvmYfj֬z!}۷"""*U(''GGц TL-拾X`Zl~W 秴4ܹSw֦M l jѢ֮]+777ըQvY>>>jذd6lؠ[N:jݺuQ/>C73c ޽[3gTbbڶm+jZ|e3foh޼yjҤʖ-Ǐ+%%E[n… ~klѢ\\\4f޽[G ) ޴`VZht8vu-7l`t 1ݍ@G1 flݺalA~YbѮ];L2QlY^zkf8_/FPP}k1;w4zeTX0cڵko=Y2.k4?+0 ܹsƛoiԮ](YeO<1k֬n{ͳoʔ)N:ᆷoDFFovsng02&NhԨQ6|||jժ/Rmӳӧ52J*exxxF˖-iӦOη> ̛7  Y~:utpKcǎ#HABCC%j{ہԩS'f y=t˷ݰDR`A L"HI)0 &$DR`A L"HI)0 &9uRLL ͦ~X6MSLןwyGR͚5pϕh( @>N:UKP9u:}Ə=#:f:ru_{55JmڴUbE=3OƥYf:p+/T6mteKPn$$$D VRRիw'O5j(;S^Ќ3$I=4rHuM8?^۶mSŊ%IW6mjN =~ѪZzQ`e˔l6yiӦM?\۷(IjݺTO?VnAʌ-[hΜ9zd %KZj׷K}suIխ[79ׯoԗa2dz)5jH*p\zzʖ-/hH?nwmfdd(;;[s|R8pԚ8"H%$$h׮]Zx ]xeϛ^WlllX)Aܹs3fFVvvvK.0c 2x`u͡ԩ P< 5e]|YO=4IRff:rC!!!Zn p/RrIKگ~%IAAA *p>~#G(33S Wxx6m*-õw^IRdd.\sl޼/I˗W2eo-[7 5tP}?f͒$[_|%I;v3g|jܸ'ʕ+uQ{ڵko߾|8}3fٳg;X~ސ!CTvmծ]ᘼK"""KP^~eMld-YDԣG:[o%͚5K z5|=3BCC~z >\GڵkS(w?aƝ.^gU^]wVDDĝ.Y#4DR`@Q谴ݝ.!坾%nHI)0 &$DR`A L"HI)0N4Y}K'֝p7)0 &$DR`۟@q7v+(wn+R`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`S,(::ZlJHHpuA*YW'ҥKU&///=>}z;?SRԱcG3unw)#$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0ɩTVVbbbl6%$$86%%EQ@@}Y:u*߸\; ux IDATj֬ Z:'{۝.FN>ǫbŊzGXิ45kLSVVL]vi˖-}[oW-[gyF6MO?-ũTHH$իWqqqq:mۦ+Jׯ6m(!!A $;vLSN /3fH{9EEEiȑ֭\]]Mǩ/TppM}j߾=HR֭UJ}e˖)''Gl6=JKKӦMLǩ*cǎɓ[nkժU*YUo\^&ML 'Ow/Ձ &έtI]{!!!Pvv<==ef'IǏ7}΂+66 uEI*0xyyxzz8,խ[7SNZV샔$);;;_ߥKx{{z\aY z l0.˻Z s'N0|$\r STUL%%%۲e"#####u8ۼy9{}'|R+WѣGmk׮վ}Uرoo3 C3gTոqcpq{f̘gw[b$IC Ǝ>L-ZK/,M}P^~eM|F-kNSNw/[o%͚5K z5|=3̜ԡC 5.""BW83Fcƌ)sH &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HIE.]:8-[=011Q˖-?{ŋ%JZjjܸ:u͛UL͚5K:ծ][=z PffRSS5|TFv0<˗/W^ڵkpmg)..NSLѡC NT;vz-OOBSԩkٷT 8SAj4- l~e?v8KAO>Ѱabcc#???uYYYY gc)HM:ᛧ7*66Vm۶հa_7ߴ\$8[~$ի /BnnnI&Y.oe7oVZZ X Ro%IIII:p|||UNҥ}K/{*--M*TP?""", R2dj*թSGF$)##C'NРAPp$_$s[ w3SHl6Mk׮5}8+SA*77W6͡ѣ:x|}}uKRSSuYU\YEW-8SA*11:裏>R^ϏrfϞQF)!!j`iW^yE}Q~O͛7[*&vi ڵk)X RʕӢEtʕ|}W\ѢET\9+Sӱti߫AaÆ4hxI5sLm߾]ER(8 KAjruukw3 Ceʔ̙3 |X/g$OzRRR>,IT֭k&Et԰aC5lذ(NNH޽{uAeff0|={,i)X R?z-[$fU,j׮]zԴiSU],~AcǎՐ!Cpz(__ߢKAjРA?^ZTӳti_*UtU=#۷BCCo\.]LNRzꩧ+8f nݺ KA***bR޽{uaIRJթXR˖-u!pС)X|ժUz'%Iqqq/_(..NaK.믋Pp0aj֬ 6dɒ:_T&Mh˅Ν;իW/dɒݻvieBۿ~iUPA%JC=… 6nܨ&MD СC|5jʕ+'ooo5h@k֬CYFKȐ) ѣ_|}}/* @6mRLLmۦe˖Io߮VZZjzw)Shꫯٻwo-^X/|A%$$'кuԤI۾&Rjٲ)::Z5rۼy}=c ,yٳ!I0`rss5w\eff_cǎUT)IRXXo^-['h SիW׫75p^.{w&MQFݻzFqo]T^׹s$Ie˖uh << :T%KԮ]tխ[XEFF*99ޖ*U8.I_.p M6MӦM+znItt&L8-_kiĉtI}K{!!!ڰauzzuIoXɓ'u)r5 u]p!77yΝ;%Jr^05kLO>J./Rqqq ֋//J<==e/^w\^+66r81K gС{}Q'|h߾}P$K.ըQEޒ~.]d$oo뿑[nmPN- St_]^k׮Zj) %>>^jղ<:tЅ l,/k\r!!!'alA^d)H?~\˗nrt1+S/Wkɑ%իW\|Y۷oWdd-22Rg~.KAt駟۟rR*U}9/\P...Y|}}պuk͟?_c͛,KvW?eggkjРBCCo8/KHEGGk֬Y޽jժG~a{n#GꫯRӦM/tZr+=sK|M5nXQQQ0`4uT=cA֭ƌ'OМ9st!, &믿VաCEDDHvޭ+V(((H&L(BoYfڸqƍx9sFz7ꫯծ][~FaÆS~4iҤ|;w^u͛7OYV\f͚pn60 +'HOOѣl2=EJRNwӍ{Qյ{n{"1v+ȧ.!ZY;]ܳ~nO!!!3g ð?L2ٜ(ٓrm6<==CpWk$%%%)::Z%JPҥ~zIӧձcG%&&Z qF5iDW=k ԯYfY. 5vXUVM{U\\\-Zb w KAj֭ӧ<== /|:qℕ)X Rޱcce p:TÆ xΟ?ٳg+**t,X%%%]vꫯ$I;v?թSGN믿^$ hժUzճgOI҈#$I+W֪UTfMU@ޖ-[꧟~۵~rʪSpWDFF*22NN=R۷o… V^f͚Af8pFԫE_sJMM$ >\~ X R;vP&MΝ+WWW%''kڵfΜiHp&ԯҥK_ZJmڴQ``$M6:p X R!!!JII$k۶mzYYYrq48KuQӧOץKyfyzzs;v\$8KAjĉ:u͛'???%$$lٲsizPp]/--M%J28"{ $]xQ-)X ȑ#ӧʖ-+lٲ۷>\5S_5iDgϞU6mTZ5{ܹsb }Zj R=z\\\5j8޽[Zѣ_X*K֯_C QTzuJLL28KA*''G/QrrrLNRU_w9TڵLN=RC=>}J*~Isљ3gH ga)HlRVȑ#[o9EFFj޼yjѢX~ o֭'N؟UR%[.-#uթSG3gΔ$AjР! ]햃T%*Vӳk_ttV^]T@`)Hڷo}Y}:v222f{jwU+Sx pϱƍWDe@a)Y R_yZj)66t,ŋnO>>7 J28KAy5k;ѣբE +Sӱ T~}EDD_~޽[ Є PpTժUa 2DӦMsk֬}UVRl,)IY֯_ӧO_gd9H $<' RsΕ$=쳲l7ӳgO2z-ͦ~ZݻMl)wSA*55U%TJn "˻}uAeff0 ~ͦ;vX jȑRժUPTuӲ&OG}T+VoQN=R.\P Q)T-k׮KAjZvL j*SJ,RJ9p&x *_֭KhpOfΜviҥrqپ ˗ծ];B{Ծ}{mذjbR޽{5x`m۶MNRFFFXGjժ۷k֬YwU+Skf+ZXƍWDe@v{` 5i$eeeܹs4i R ,Phhn"oV PŊpB30uΝ;`M2E3gΔWp0 eff*55UwVNNjԨ3f{k 2l6wݻ+99YK.զM?̙3ҥK롇ҨQԱcGծ]w-WV-ժU(kb]$w5]G"ɑH1,ᦆ5OUp"VUJkUR\C~UZ5DEI~9_'-ǽg^{0  D{[%&&ٳg/tTrr|!Ww,^{5/_^/^{5IRSSU\9͜93O "WAjԩ1c M6bXyyySNZjU$WA?VDD&M 5kִޡETBB4hpJII) UUBBm*SLnNNTN4go6$Ioh"BgrƏ/???)""B&ISLQHHZn5kjԨQyU+ R^^^5|p%&&M۶mӥK4vXm߾] ʫZ _U$٬zKו+Wj̘12yQ=۷o:t ooo*THmڵK!!!*Tz) {OGՆ l 2D*UҢEԦMmٲE!!!{x\'OjtE/n.>vܞRRRm*&&F |mԨQ*Zn*OOOIRrԷo_}7jѢ$iϞ=4uT 6L@ >\vz*H}ٳnܸ"EˮO*~hĉ*P\"lRRRi& :iСZb5HI~nnnӧF?qȟrȑ#UjU믺pႎ?ns͛DUREԀ.I:pnܸ:u⢠ ZbccUre%InNk\r^s_=7nW>}4ydmݺU3gԥK駟*))Igon}t~t̙;sY;wΦ->>O RөSWU:uk׮iܹ0a$IvYKRZZmeoٳgk=[M>]K.ULLL^s_Y?aޭ[7Iݻ}222OOOYl6߶߭绝6?k֬10"YHըQC'NT׮]ҥKɦdO?"dɒ:tJ(a+Ix*T I)~JJJRɒ%c݉?#5{l[P|}}UX1ooJj׮-Iv'Y&Y{s5߿_AAAֶ )%%Ŧ?`ɕ 5i$5h@gΜѾ}e˖pI҂ lϟ/ggg5iD^^^j֬.]˗/[|'JMMUXXsԼymZpի.WSK/Q3h IDAT<޽{nܸPmݺU+Wȑ#S&N (44T:}}hBZ^z ȑ#uYUXQ/։''OThh8Wʜ9sTL-\PWVٲehȐ!>j͛5b :T .WK,ѣ'ŋY֯_ƍ?aȇLr;'$$u֊P>}TXr! pt9'L-]g>tt vK'Vn?3RիW5rH]6?y-WS^|ELPxr-ZGe#WSIdԒ%K$I=zd@>e(HEFFd2k׮rqqQdd]1L)CA$$1ʖ-N ʖ-|bM6զMD-H0,˃,W2Ly]<2+Hu]NNNlh= +4kL+WZp_AgϞ֭[^R`A 2TVVփܑR`A "HA)0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0 =Ajĉ2L ۶k.PBzꩧ4x`Ј#TdIfիWO6mz uiM4Iv߯{NW^մiy),,̮oddM^zI3f̐ڴi;v>>9IKK$msssn{~vhE||:vxx$<Aꭷޒ^y1͒n.kW}ocݎ|}}vG>H=zTmܥ:q<==*))I%KSbbb$yWKLLTVV[~ũ|0a쬽{5߿_AAAֶ )%%Ŧ?`@^' @eʔիէOyyyYfZt._lO>Qjj3M;wVff͛gm… U^;O+^x8dԭ&N (44T:}}hBZW4rH={V+Vŋu -X @ߑ2VZڼyfyO>d 2D|ׯkjܸ*Ӈ1 SLΝ;5k׬YTV-} $>}Z7&MT{:p#1|M;۷֭kת[n2Lڵ x,k˗.]FzwtRIҤIt*S$)88X͛7עE-IJLL￯Ai֬Y(_WXX(Ծ ؄(IT/XVZvYC$5kL+W֊+mk׮5p@kdҀti޽@~XܑʉbI72={VuԱ:66VV]!!!=ٳgힻ_ l2%&&j„ $I]_???]pAruuURRJ(!dOΜ9ssϞ=[Ǐϋaȇ 믿jРA_z)IJKK$wssquuf; | Rڶm+///X0͒ }m{w;A&%''uֺt6nܨ%KZeO˞w$y{{[Bb'%iSܑR`A "HA)0  D]O/ut v9xDpG "HA)0  D'8+FGB|q'@>0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0  DR`A "HA)0  ##C#FPɒ%e6U^=mڴep0DFFjڴiz饗4c 999M6ڱcK@Ύ. ڳg>3M:UÆ $EDD(00PÇ׮]\!GmI6777GwVBBHۈUʕi,Iڿ#06gמv̙{Y;wΦÒ<>usrr%ع|겣Ks&崣KsuG`ҡC.㺾'\{322kmծͺvfϞ縭cǎySc x$lvGh ttu}o5GuBBjժex?mizzu 8Paaa6m)))S5r hǫcǎZf*VrkuhPBBBCCkm)11Ѯ=))ITd+___]x"UXQ.@?\׏&n#((HqqqJIIi<Rѹsgeffj޼yֶ -\PՓHLz),,L#GٳgUbE-^X'NЂ ]r7n8G_o^iiiZtV^-͟?_=K]M4KGdX,.%<# DR#\r޺uL&n갚4)>-ZH&I&I;vnX/ɤv9B˭L&rzXǫs*Z *mٲ%c/jժ<<<=zܹskHA\׳fRjRJ^ӕ+W֐B^ ܴ|rشo۶MO=ƍ&zw7a/_^ڱc>#}W:xOկ__NNNz EoոqcqN>ƍK&MRjj{=8p@{zGw}W;w֫Çk̙:t萾k R6mrJ}rvKj]Ο?@_@=s7[V:u$IQQQ*VMkjǎt<*UHUjСǙ4i\Qeʔ$yZh'T^\III6mz%KX]re+Znڷo\ajK?jӦMֶk׮)&&Fݺu럕ӧ+ @nnn*Q맋/X,zUti*THM6աC쎗3R۷oWXXʔ)#WWWkСJKK722RJLLTǎ! 6L|g<쳒ǏkzglIRBԡC۷OGZJڵ(Ij֬*W+V<s?ݻu uXٯ?쳇T=A ȥrʩ~Om6lPrr_ԯ_?jذf̘^ziٲejٲ_n7f=Z4uT=jѢ=ͥ^r^h̙jٲfΜjٲ+{Oz5o޼|Gd;v$XbȐlSP!I~rgZ?Uppbcc`~댌 I~x0ݺuȑ#&٬e˖)44T%Kc͟?_˖-[մiSjJ+WTnt9j۶֭['$Iz75iҤ3eUbE5JN;==]]tѣ%IWZ` 0 W INN畞;wj„ 2j׮Km߾]/_V…d/V( I;.\%ξcsN5mo6hÕZ~}V\)///5o\ϟԮ][U~6oެk׮W^(I2d=skrΟ? bivm^7jH= i֬|||ﯮ]CWVR4`]tI]tQll4dݻWSo7<_xp⺮Uի)Sh…:q6lؠ~`\ӏ(HyG͚5uUeffsv=dxgJN<)ITy-zzN:1c/{*99浛|||lڊ-j׿ʕ+Y%JP*UT,[n3g7PZ$I+Vĉ5|pyxxHB*==ݦ//k泏]tQ޽%INNNz״m69r FHnԷo_jݺ)b'++KZlYkj޼.\#FjժrwwWbb"##e)pSppp6e{իW/rqqQPP,X ]M˞w$y{{3x⺖RJiǎ:z~wUTIO=J,iG^xO7>*T͛հa;~\lYI7`=sNсŋ,.q몂]׷޼yf6l(/[>>>֩AڳgZݮ[UT:JJJRdd*yg<ᡏ>HƍwA+33S?cƍt钤 ,3gbXL>udau?Ţ3fo׮]էOyyyY_|E_^ ֶoVqqq sDUP!ghzyׯ&OE*X=+Wjƌܹ&OvکM6Ն Tx;jժP DyzzjժU<8ɓ':詧ҡC4gլYn5QFiʕjڴ^}UjԩQz+#tZ|٣ŋ۬GA x̙ڵkkܹ5jU\9uf o-777͙3G[lQz7ߨm۶w<~n: a%I H IDAT%L\|Yiii򒟟q׮]SRR.\ wwwyxx8$}F?~*U$777+VL!!!ڴiϵ~I&JMMoذA*\<==Un]-_ܺ} S2e* :Tiiiv_.fUREo]K.)22REzꥫW[tj׮-,ooouU w$JMMՂ lBT+W_^p}YUիWG}~e;|6mB TRzwdddhرX>|8_x x,qG @8pZh!7N7nرcUD;7n8M}Z+WVFT`AEGG\r:v֭['kڷoϟ/___M2gĉ=zsi̙jܸbccUH۾G֭O? 裏:Y֭AY%I/^TVԩS'+&&F#FP5ԺukIRVV:t;v(::ZժUӁ(..Nk֬xT,|3Po~~c /7ȑ#*S$_~Q5irʩI&Zh$)((HKo{H(55U;wT6mԨQ#Z:U099Y^n*777źr`ZZݢ F'NXk Ullseee)<<\ϟv9RJv۪TN0A?*W@jJ=zP͚5mm۶]VX!ggۿ;N:1c/ŋmepߟװUhQI7yzzѣX,9?T`St{Evܩcjvj%''/I*]]*Z~gG_~O8{= 7ncǎiڵo4|}3g\]]զM]V7nTv +33S͛7ׅ 4bUZUJLLTddk NNN9gOgʒd҆ r{/Tɒ%u{رczTjUM6MrqqW_}>xI7XF M6-Ǿ @~Fk٫4=ȑ#w[zR^ƍkܸq6Ad2iٲez 6I&*Tt󹣊+x(..N/VDD+fϗr7*TbQUrekNݻU~;]n222_)Ժ{yP~'=s9Nq3RrI-[Ԛ5kt)k/6=<cmiiircqG @?~6nܨFiqfΜgizjҤj׮-oooݻW111zso6~z=jݺmۦ@yzz>PTT֭nݺhѢ駟tU-^XUVU 4l0%&&SV{VJ>CVZVu }ڿB z5rH8qB;vT…uq^Z6l$i֬Y?~lbVB-_\]tQj@]vMvʕ+բE }ׯRSSWIII֚2GZb-[aÆԯ+V믿V:u |8vرc.ڶmvO?m3geر[)[gϞo%88RHlTZ2qD˵k׬}ziqww9-իWs9rD 0aʗ/ojҥKmWf͔p_5o<]Vv@gԩjٲݟ쭷Rzzz3k,.]Z6>sׯ5f̘<;ɓz#F{ァ~vեK(P@;wԩ >(OqZn:u8zSmO8QSNuPu%%%iƍZ`KQVV]&777Gb㧟~1c<_ (ƝܹSe˖$թS4}t꫚>}$)**J 6԰aԩS'iNNNzd=n*80#uu?^*U+mڴۿ|||ԤIZ7lؠP.\X[/_nݾ}vL2ruu4s fT7|ӮߥK"EKzիW-]Tkזlv;ӈ gy+]:vm۶-[̮o||:wEPB_6nhg2L:}M_E _cǎY&VXQb 7nܐdҐ!CdU^]ڼyS]hQK)))v9sF={TR*???uQN{0t:r䈾[k9=# ߿_5RBTR%}璤-[nݺ2ͪV؝ӊT%@-ZȮ޽{[Ӡ }'6}n_]޽ΝL8syvk֬э74p@kdҀtIٳǦu1}Z+WVFT`AEGG\r:v֭['kڷoϟ/___M2gĉ=zsi̙jܸbcciqd-/ngVPP:t ggg]VbQ~lS.]H͟?_ST"] *##C-Em۶Zz:tpךo5f?{I .,I/$I=zcXB4hkƌ:s>SkqruuU~TlY\ɋ/ *waQUð b(n_QADTLwQĥod,j ((h).",/Qy=<s瞻0s>O>ӧemۖw#GbҤIsq 9sbp1Ԕh\SLA=PYYSN̙3߿? VZCO?%&O[f 1zh<;wDpp02220p@޶:]vq uuu2d?iӦ)))?~1 BAAf̘sssܾ}µk`ffxa婫sJéSfhڵk#l׮]딗# FBHH֭[$&&b٘6mF+Wbذaz*D"ϊ+TUU1c "==ǏÇ x˗/#22;vDRRPUUӧ+y&>۷otEEjkk-G$qߣJ9楻p{ɥ;99ܹsԁ7BވbV\\p̿e2MMMv.… L(fرc]veM?vX&cL,3S..suueO<_WWc/_LWWbY-b؄ xy! Bl2^\*[2 _ )ڷ3kkk^)~7.͛LMM͝;KdǹJfffƤR)w,6oW򶓕GriL*ճsμcEd&صk׸4777'Wq 2e /O@@k۶-+V0PKٳ'suu:th6zh}Ǽ.]ȕqxc Ҫ& ŋy򻹹1lǎ1zսWy=|oߞ0w5ma Kwww%%%qiϟg ;u~ޱa;Ԕ5l0}WZ`v<9;;3X>|hcPnL(*<zs%K4z544ĉ]"___;h^z]] lƌ-Fki2!-A=RWR[["00+ vvvEzzzK$?j2ovv60`ڵ ܲ,۷?y 6wMMMѩS'dggs!J!H"nXcc^E6<1ܿY6ʤC]]-UUUDFFرc\H$a̟?E7044ܹs]uH$@сT*MtuuRWWTwm"99999puuEzz:LMM1b.fΜP=zT'UڵE=]h,{#A!Ey%eeexFM ŋcС#Pt҅ӧh>]ii).\}~ 4"ދ-}}}jb1 k֢4lѣXhN8!7Gאzβz7<&ӧ\>;;;+WZ!%qƍ/))… FcK Σ6 QWW@!˳d ;wƠA*WG}ݷo-[gٳg\kJQ+Wڼ\ZZZ1w\W^͛y־}{:t ;nƒ~z_^awP3՚<<TE\C5&&;vJJJd %%%077?;-.`ر3f¼]vU.otd]]VkĶkǏKQwx1TVV{OA )B+E+,,[ ?~<71@ ~CbȀ\K0\`g˼!SN\YA*1 &潊}ؿ?Jsbff^x"{~߿7N~ `HLLĜ9sWNNrAD 5&ZT*Emm-R)w4P&L xxx 66ECJII6233y\7o2qQnLc]KKKDEE!** ֭>s.:F|QWxLeCJJJ]]]\|iii2e _~%␝qk/ɓ'شiLLLaRʚ?>닙3gB"`۶mz*Ҹ;]v3>CA"B7䄔DEE bPWWGZZ|||CUUyyyعs'tR888>JKKdnnTCݻw_|~5K.qx|2RSSٳg P&L'''ɓسg2í[$ϟoQAAAٳ'p% %%EG 0p@1 HIIݻw1rH.̑5>"00KG1NNNHHHe JabbgJ~)9L<vvvw9r#5uTl޼8y$͑'N`ڵ:c`cǎd1cx9R{Enn.999s9}wɓc1fnn3g/gT;v =/<;PkީS' }NLp@Bȿ=#GSWWg:ub7nƒ7bK2&He˖-cϟ?4 .s]fooLLLXaa!o>ƴX,f..._py{{3֦M6ydvYmݺ<$ d666\xf,\˗y)))ÃD"&->}:ϗ+3;;[!ٳgsttd‚ZmڴI.4r Ypp0cՕ˭[XX444 fr!ȑ#D"aB߻wEGG3GGG455YΝG}n߾cc"pʅfsαCEqe+**x5}ݝw###y!c1___^ŋ3W;;;|rV]]=z4ӓ; , si&fii444۾}\>YYfɕXgiLWWI$6vXviޱs`666L$1===ֳgOWX 32nܸ tttx! ޵kW2}V;nbӦMc:t`jjjĄy{{-[7nkӦ SWWg]ta|ܹܶÂ6300`wUkEDD6m0@h(te4ƾe?s555lҥ̌3GGGp|,662'`%7B& BȿCvv6|||PPP@Vo>!77o:%''cܸq(..n[_&HB!A߾}ѷo_| j1zhjDKX3g|+(BZHН/B!/WE=RB!$jHB!B!E!B!J!B!(RB!$jHB!B!E!B!J!B!(RB!$jHB^X^Zǎ1n87ntttZfCw}U! ::o iӦ)Θ1c HZ.(**@ @bb[?B _}U!A=Gj'!E!s۶m@ P7oۮRJ,Z]tйsg̟?7o|5}uG??mWoׯG׮]2K|7X˿cŭ^͛akk MMMX[[cݺu-^ӧ3gyhiigϞeffb„ ppp ,---˗/455ѵkW$%%;w.QVVTv!=o }^6Bp^ŋaaaKstt|Ky9(**̙3ׯ_Lj#0uTܹsشi݋ .Z+WWy0L>ظq#<==qq899)]_~#,,ޯ_?H<}|A뇆b޽={6R)mۆȑ#ի/11v5Yܹsj*   ӱaÆV=&(B )Bo6 4=zxx-r/[l2\-Ny7oDff&l򶫢P]]?MMͷ]<_}F۷sΝ;_!~]=z111:t(3i$ ..&M^;v Xz5fϞ  Ν_~駟b֭PSSQTTR|5k NBpp0wN(w7|儴&-Li+ TJaHUWW#..VVVԄ!<<} X gggܹ[~Q >fff@ɓ'r۸x"F###hii ,w}7zzz?~<?~,/11NNN҂FW6زe ccchjj6m׾}{ȑ#s-**°aàmmm < v/Ź(8x mڴlmmީ@XX H0i$yqԨQ~:/Q[[ ooo^zuu5-ZKKKhjjM6ݻ7~'\zC 0w\Xnnn000z聴4^̞=| p=n1~xTUUƍ;v,LMMv!00ry3<{ m۶奛@ @KKK2۷o|Oܵ&;Hyxx[nA޽ +++ ---)v57m۶m6|oƄ inݰc^q8wm̘1ꫯP[[#駟PQQ^ddd4~rr20i$.MKK Ǐǯ;@MM:ٳ555: L6 W\ɓ'y}||P\\f&UP!b022Bll,jjjh"Fb4i\\\PUUӧO?uN:___{~hm۶o?|H$9s5j`x1MCCCtv3333aȐ!8~8M[[[`r<((1cqm:t׮]Ç /RWW玕.eC\\ y?[jڵ~67t5jBBBn: 11gƴi0zh\Æ իW!V\]]3f?~<>|H=}˗;")) aaaӕψܼyz@mmmD"{̙3 @3g`ȑurqqc 999͞ EebX[[˕)[޳gO.]փosJm0BQ\\̊. H[^V`` dW\.\B!{kܜ;{ߵkWdcǎe"1دb1gO>ܿ2WWWuuuߏ?+L tuuyi/h"M0'((rKJJP(d˖-er/ںu+Ր}߿?楙2~n޼ܹsH?~KdfffL*rb z*o;YYY ;z(ˤR\=;w;fQK,a]vKsssczzzrukx,X)Smr [b/_NN ={2WWW::88C6oGfKҥ\/gϞ1;;;6`.`B]x?99`9/vc`WnQݛ{ߟ^~~>֭/%+((hr{McKwww%%%qiϟg ;u~ޱa;Ԕ5l0}WZ`v<9;;3X>|hcPnL(*<zs%KPX>3fe2666Oٳ KHHP^c e/~1V}EGGP(d3fhM_&%GJjkkqArwHOOor}DϣVVVMF@@ ]vA]][`޼yro8LѣGx Ù3g`ff2/5ko^,Kfԩ{FZZ :1ףdbb+++dgg(\u6p*++Q]] ///ѣGsҥ r%.-==nnn;b'OFLL akkl[j Eݝmjj[nرcо}{-=??~ mmmnװax:uBvv6RTn(**j2B^D^iQ[[ }@}YoLzz:yRUUEdd$;ƥD"!;;ƪ[xCCC{X Í7' 00GAe*D Z`ooHR^+p:"44uuukɁ+ajj#Fpy0sLѣr=b׮]-!lx->yڐ 5CQ@ D"GS׎RWRVV'O(l4ېZx1 kkk8::b E.]x>} 899!))In@qq1ؕb…طo***x*++RK#P?F,F-#ci*ѣGh"8qBnVee%!benxLJKKѧO|vvv+WZCJ,ƍ-_RR bǖ^2MGmmmNa \C1&gɒ% :wA!44T:::rP?$rٲe8{,={ƥ+Er LMMK?s΅1z#,,7riJuu5___^Kׯ:w>Ln8о}{:t ;nƒ~z_^awP3՚<<<^GKKw-5f5A2s޴p-dQ QCVyzz{šCիWcƍ޽{+Z޽{;w.lmm!pu7Nn"K BuuuP56\PPooo888?G~r\؏͐Enn.n޼̓-QZZ'( p6 =зo_i&|gHHHdgg#00^^^ذaLLL$''˭2?`B`` كbX|9>lQP_%"[[[X[[~{zTcǼ%;cƌQk׮Jե5>S@hi׮?{O>;4YVv>MyCee%o)!5!DѮPnY~~~000㹉ٞ5[ :ÇGFF^FL梠۷o=[:uj R1XXX494U۷ϟ?y?*~333ŋsss빹>o@ ݻ9s4Yo傈&h RJ5[[[8l0a&L-PJJ yannGreZZZ"** QQQGnsO0E }6ōjԴxzS&&&D"sss4jHt]."K,Att4[nӧOc\'O1-oLnݰn:<|wĉoTܶm xߡ2_Sii)jkk>B^ Ny%Bسg/ _6~yy9n,-- QWWGjj* y;`brsdwewga͚5FFF_-ezl! '>cw ޽/* EVQQ{.p1q~!6o T 5X>FauH$BBB`oo%Kȅ2P~)׺:111By|wҢثW/]?/^ﺺJjP( ҥK\Ė$SSS/r+KKK>|dee5jL0]v?uн{OC]kMUUAAAHJJR貲2o???\vcX]]kB,wLP8tP$]v|4Iܶ6l 4K}_56l=y۶mKHB(W_}333nޚy#EyeqqqD޽]8w\ NNN000ӧ̅ ~ߏ~aРA8r!zjL4 5jqY<~۷o-R)pub(?)S`aa8p999JTKb())A`` tuuqzIDATeaʔ)|嗈Cvv6ǭ9PSS?&O`ӦM011+kHJJ/fΜ Dm۶իHKKwQVVD AސRRR'''bC]]iii񁻻;BBBUUUaΝ066ҥK (--Q Bwޝ cequ|Y555PSSh5 XlR)LLLL)O?ő#Gɓ'#GpsN͛7#44'O9p ]^g```^1t? P3f̀7Ը`/3GJ$aŘ5kBBBÇc׮]Xb/_|e˖ѣܶ?7oބT*֭[q5e`oTTTpϋ޽;͜9_|={'''رcy!߁N:Qsss;rsrrbSNlƍ\x^ tR$ blٲe\e޽홉 +,,ܘŅ}w .0oooڴi&O̅ݺu+oyyy,((I$lllXLL \eeed/_KOIIaL$1HlmmY~~\r5 Ȟ={#`lժUlӦMrMMMvww ]XXȂd,==]nB֯_?LLLXtt4̔ \UUF$  ޽{,::9::2mmm:w>#vm._^^ׯDȈ?S.44c;w :w-Z[. ^QQ[wfɻcϏ/^̜ٱ˗j.ѣ1^`\M61KKKŸϚ5KL?KGfL"cDzӧO;wXDDa"鱞={de*ѣG,663---&HX@@;{,/_EElnq4h\o,y׮]PYix޺uM6uЁ1Ͷl"oܸqM6L]]u҅}7r۾s bEDDpU ڴiAЕqFfmmՙ%#/311aŅeeeɭ/)zM8-]1uuunoؘ6*c/14Y$VBヂܷ}!((!C7n}(<_&HB!A߾}ѷo_| j1zhjDFX3glEHk)BE!}eGB!BD )B!BQ5!B!DIԐ"B!%QCB!BD )B!BQ5!B!DIԐ"B!%QCB!BD )B!BQ5!;m۶A Kر#+@ `۶m\Zll,۫!B+@;mW_ÈDZcp=H$b~uq6mڄ@tօB')B^hh(Ļヒ;w6ntt4}еkW׷͑9tuMMM#55Uế+fΜ ###H$> }}}|`ʨÚ5kйsghjjӧ<.111000_kD69ϫ9Rprr 0rH\z p mmmO?>|C "BbԐ"#FӧX|9)Sw99u 0f AaPUUÑ%wƌ(,,D\\ M6!&& \;v;qD̞=:t+0oj诿tYj̕+Wh",]}ݻc7noAhh(cnn-[`РAh۶-  W^/՘%B)BH>}:3СCwq Z| H$Rz{baaa8s nݺ;qD09WWW00qD.M(Gt@EmUUUЪQÈ#p]ebb+++dggGpqq!B^5!ʊ^*BEER=z?\,--3 7̌^OOСC }qq1y(U7X xR51XYYȈ믿p^}}}B!>Bkx___mDbb"LLLX?EM k)[[[@nn+%SWW@ ol[c!!Ei5;vP~ԨQضmVX={`/ڼ1^m*T޽{JJY[[{Ś5k9/[,,,WDB!4BZͺux׮] 4h ÇƍHKKWUUoAn`bb618e {vJKKqE8cҤI[СCؿ P8^%[\lnڋ!4"˗/cȐ!8p ?D5ݻw1qD:u m۶_۷oc֭;}"44(,,QWWGo߾P?ȑ#NHHrssl29sQ^^LOعsg"JtR̟?%%%\h˗/#-- SLATTR'J!HqFB$Օ׳H!yԐ"o |# b޼yPUUEdd$V\)/,, ~Kڵk1g}}[nE.]e̙3zzzѣܚ]wҥׯaܻwٳ'݋!C(UyWz:t(]a?>NlݺRBH3fFBNw-פ"..eeehӦM׬Y%%%r!74GF1ưeӇQBǢ}7ѣGطo{*B!4jHBވ25 }K!B9R!4BU)B!BQ5!B!DIԐ"B!%QCB!BD )B!BQ5!B!DIԐ"B!%QCm6pi^^^zkujÇC o*/EQǍ;:B!'Ԑ"UVVYfZZZ066 Νrv܉/)== K,O>8p!BGmW\70~mWYjrݻ=z &L-q9lذӦMT^^fϞ===+Ut[ƔÕ+Wa ##JmKY7oF]]k!OA )Bk#DV/WنIclقRopss-z<}PQQfkTcƌ޿ǚ5k^{CJMMO!>B+@ 0j(pE 6 D=o>2Ο?~AKK ۷ҥK~(#vZ888@[[ѣvd! ѳgOebkyyyr 7N6OH6h׮])QUU'NDҥ ֬Y~Һu5mڴAqqܲׯc„ h۶-444Z.ߵkHcccx\>Esv'''B,sB!f#Ei5Ç>c0py̓H$BRR [зo_p6m--fyf̜9Æ ìYS;w'NQ]رcƎh ׮]ի'd#** Ϟ=k7+++ Fv0k,௿1k,ƍŽ;wDEER)/ٳ'"##add L8UUU0'O(--̙3;`ǎ矛vVV{=+V_0k֬՟B!tڕ 333:u xxx`ܹ\CjŊ(++É';v,ݻw+U &`7n>xyy~~~!u =}OnW[[pk999H$2W^FVVVۺ{.c(--Ett4jkk1l0^ 044L:bcc---lڴ HJJ'OF׮]=X,! O!>BH:u*{?cĈx޽w(,,Xٳ'׈###=mJ$\v NRm۶ٳg1uTTTT`ƍ5jdqcǎm̙3|2fϞkDhv^C[lѣGOCJJ ݻwJ}vx 1mmmL2ٺH$!wF )BkѩS'!e?cnnBmK$!$$!!!x9l2̟?_:u>n޼ɥ)31`yyyMeիW#::AAA022.jkk[tm^]]@]]"""W_!&&UʄB #W_}kȔqɓ'y˿fS^^{{{{0P]] x1.^wrN8GɕwI󆶉D"_{qTV7vJS"t0ZL #`I)V@#tC`/AX/[( JW/~myfa 3?GgݺuK>|(jz>֗oo}x^]vMdR=$y<9sF>ԫWmxUMLLtoϽ.Mx#෹}:zi޽JzKItq ?P0Tkkvءd2q8qB@@p֦h4aIR,ui577BdRSUU c477zZZZTSSNGvΝ;Tcc.\P(Y~ZO<)%IW\ѱcxtܹW7n診$I7oT"ק(hzzZO>U&yqŋ Bb?=/*ȑ#ڹs466F߿`1%J,J"${T*e=== Y8'OZ</733cHĪ,ȈMNN${m_$H$R}];|mݺ*++m߾}688h˅>D$Y4-oppКl˖-z- ٳgmzz͛7$+ԺҖH$=zUWW[CCڳ٬]|jkkrחiI߿n,3ui]v`0h611Qܜ:u~m۶w±q۷[EE޽.]d߭$}2s/͛7;ʇ2~s!8DRA "H%"'Wqr\(33S.*w)R@TVV*iii0@dY-..*˩`ー@ykmmMG'a)+J~᰼^o+FEJ(+NǏ.%ϧM6)@ 1G "HC)p !8DRA "HC5cp%IENDB`python-diskcache-5.4.0/docs/_static/core-p8-get.png000066400000000000000000001324531416346170000221050ustar00rootroot00000000000000PNG  IHDRGe@sBIT|d pHYsttfx9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATx{eifiBaqLNʱbIE)t-&rv[kik[mc)9:B S4a~3qVus $I+W $Ip$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I"i&z*R RZ5Um„ &LP 5I3IIZJJJr_a%%%~^ϩ| <8{Cڵ뮻غukA.$: IRᐔĠArmڴ_~UP||$KVxqEr߿~$J*Eҥ袋2eJy.̢Eh׮6muVjժELL 4k֌!Cs֭}ݜ{DEEQB:vŋf6i֬%K$66ݻ2w\?Wo^I:IR.1b<˗gϞ-[/%''3x`bcci߾=*UbŊ5Yf`ʔ)Cj4hB8OaB+yw8p "66G}Ν;/dee׿5yfϞu]Gff&:tf͚lݺ7||G4j脵tܙe2ydFIb938c v̀B}zbʔ)ԯ_[nm={6_~yDߟм{nڴiҥKiԨvGaΜ9уիW>\ ,駟e˖vmܹ%Jd*iժ]wg͚5 <'x"t\y啤sUWquױsN~mZl[o5\61cxwر#[f…L:˗l2(W b„ l޼9ǭ~t땤0AIR/Ŋ VX1W_ڏ9޽{?׿@. ڵ+G@^j`ժUl> lݺqlܸ1{왣gϞA Xj֭[Cv VP!Xd`Ŋk֬ edd֭,QD믿˕+PBp9YreTR Ԛ3g k@pŊ`0ܽ{w07n<|p;wԜ Ah i&oP{n98p xUW@pҥ?' 8vذ<VZN4)˖-9jQF0***c\jjjJ*:+38#v 7ԩSsn:g<#W$)M<ÇӿB@aÆQhѰcyQ\}z"))I&my' >>>\rtؑswRnP_TTݺuСC]6+{nW^ׯ_޽{tR֬Y_~9G{ZZs̡aÆn`( Of N;?))) <ӿիǿ/*Vo8q"M4q|tt4Ç' 2y'%%ѷo߰3gi&:vH=>п/n:Ǹ*Uv .[޽sK$JѼNrҥKhٲeX_ժUIHH p+Tř6mӦM ;СCرo/ 4iVJ7n֗~ X|17X~=k׮ OGkѢkf̙ڵ?<'٩2eСCfΜIRR_=o )Y 9s熞)Q q<0xbrlcYfǜO?ꫯכ7o>yhoU;֟uvsHꕤ$)ٳʕ+rao$''Yc=g<ωC=Ⱦ}N={2p@^{5N+Iŋ2uTɓCDGGӥKFu?c4hqw˖ŋp3c:M=~|?VP ɹOG$JѼNrQ2e믏eR|y Vf!j'e2Snf)nҥ\2[0x`֯_W_}ĉiٲ%'NK.Pk裏Ž=N١ǻ3f8GT$)5lm޼-[7oޜ]vzꓞhѢyF͛7`޼yrڴi… B!UBB7x#s̡f͚̟??t"4k֌"EZ߿{ǒ]?U_Ñ$=zPX1?B`{옿fswl۶-=3B ر[)W|ȑ#9-z饗2e +V}9ر+W߳o>+*;TTo%K0dȐc|lܸ١CU;s$N:QF 0k֬co߿?[D꫈Γ_Ju#IE5j੧ .[nsbŊ\ve 6{Zjq5Pzu͛;w.-[d9Yx1m۶UVDEEqСC^rPӧOky\ve$&&زe ,o%##yRL?I)^x14h@BB{w%--{38#ѣy'yWiٲ%+Wf۶m]ŋ3eW~R+QӦM+G7͛ڵkC/-^8o&W]uڵE$%%QdIlŋ/پ}imJ.cڴi\wu\s5PjUnS:O~+p$Igeq\uU1+2\ҏ=#\|<̟?3fPlYӧO&ݻ9s&1YYY__;g͛G%R mڴ?,Y]K/ǾZj$''G}Ν;cذat=Wv2e0w\^x&OoAFF+WVZ|֭[r8ʯz%:jnIRڻw/+W&)))$I*<|Hrَ;rlC pa @FF^{mU&IN+GƎ˓O>_NBBf$%%'SeJ̑$ /-[;s8p psҹs.C$Iҏt锏3E !!_fF$Iu۰a;w~ GȾf͚$&&p5$I~ $I$ Ñ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$I(VH$I:5 ]B-]Bļr$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$8^]rRdI*VHV9sfصkҶm[J.Mll,7|3;vwFAՉAL2%?#I$+/ݼy3}={J*߿7x;2n8֭[iժe˖eС۷QFrJ-ZD%B8p Æ w4mڔ3fУGݻw/J$I*`0XE,7nLFF֭_~L0uq9pWQT^>}0zh [fƍlڴER=W~Z\\$It| ]B-]BĿꎥhѢ$$${PoAC/vڼ롶3fI~Bm@;3[`Y$IBVٳwy{nݺ?\ ohҤIq͚5c֬YK.TRԭ[7l\v˖-[7|ӆ N{]$I B q(R]w]趸۷v\\\J*y\$Igo۳gy,Y$lܢEHJJ }NJJb]6Ǹ %I$zp7߄eff+Czywٲᇬ_>3B:ux3& ;v,h"W#I$+ۗ{ҪU+IKKcҤI[gyҥK3m4.R^ȑ#9[C;쳹9r$4mڔ~y1iҤS~$I_BuK/_W[8 7nرch\BBsGDk׎gy&lwaÆQ|yƍDŽ U'NGSׯŋ/eˆs 6޽{ӴiSf̘A=t=$I$ `Aq,| M4D??.]0qD+GӧOg߾}'<_jj*իWO>=`0H֭ٸq#6mhѢTի_>V"11W(I$.!L|ꖂ.! mu-ZjժEbb"k׮ ޽{{3fI~Bm@;3[`+^$IN G 믩XbSLʖ-Kll,wuWؕKRT)֭YfoXzu 6ª$I$c4iV"11ˑ$IүDj|BA&>uKA#I$IO#I$Ip$I$IH$IÑ$I$x+oI;JNW$I$ Ñ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$ŋswHR8soƮ]mRtibccٱcGظ#G0bWNtt4 4`ʔ)I$I\.xL׮]iРiii=FvF IDAT駟R~}nJV([,Ce߾}5+Wh"J(:6l{iӦ̘1=z޽{A-U$IR!Ph<ɓsnݺq3l0&NСC9@f͸+0a} 55gyѣG֭[CѵkW-ϫ$ITXZh#ԪUD֮]j{7h߾}(\~Ԯ]_=6c 233ׯ_-pwuV,X$IT+G ILL~7ФI͚5c֬YK.TRԭ[7l\v˖-;7|ӆ N{-$I U84i}zhㄘ<vLFFF1111'5x*UDJN$I B!C={pW{nfϞM*UB}ٷe^c۷o'666t(..4`8 y%I$pAX~=.ϙgɒ%KŽ]hIIIIII߿?Nw . K$I*(++nݺ`ME]tq_=.[l }ᇬ_>3B:ux3& ;v,h"#I$˵g222t 0wy:zkn iӦq饗ro>Fϭgs}1rH233iڴ)o6cҤIV$I;pŒ3YfMh%KRn]ZhAΝKN˖-`̙̜93?;%$$0w\x}QJ(Avxg‚ڰa(_<ƍc„ ԪU'ңGӪQ$I/G x'ɸqxgٴi4jԈs= ٵk7n/TZзo_/kwW~Z*bZI .!L|ꖟ${}l~~JWj֬ɡCٳ'oiԨ ?aڴi :QFiӦS.P$I)^zsE7q z+~{Γ+F޽Yn< .HI$IkmȰbŊЭtRzuV\$I/" GUTaԩ>|8L:*UD2$I$勈n{ᇹ;h޼9wq5k?gر,[1cJGj|D%>uKA I~" G}hѢ 8>}v y晌;/$I&ppӳgO,Y͛Z*M4 ^'I$I]bŊѼys7o$I|+h͚5|ڵ`0-4$Ig" G_|7t-:f(#I$I^Do߾\?7|U$I$嫈?NsI$I*bŊ-[6j$IQ8;8q"YYYU$I$n]6YYY\pvm$$$PhѰq]w]$H$IR(u->1,I$I*" G}Qn!I$I*pԺuܪC$I TD֬Y͛Z*˭SK$IR8͘1xM6h^:>,;vt I$Ism=k,z[o[oСC \wu̞=;W $Iѕ!CРA͛GRB;ve˖$''Ӷmۈ $Iѕ+Vгg([Rի+Vd I$Ihӏ۟Nttt$SH$IR(iӆ?O,X o…<\~L!I$I"gFE]D˖-i֬w}-RJ >k׮L2L!I$I"pԶm[ƍҥK /pWG2$I$勈2dgϦYftؑDVZ̙3TC ɕB%I$)/ET’%KxG1coeʔodСTR%W $IK`x ر3<@ qq$I_"GQQQ.]`$I$g' ,YB۶m)Y$*T`ܹܹN:$I" G| -[?禛nȑ#+gƍq$I" G?8ue͚5 :4K/e…L!I$I"pxbnVQ||}1{ƏO֭#B$IED(99%KЮ];{=/_΋/HƍٱcO_~o/R$I$奈Q6m5k=Æ їīʥ^Q$I"~ _ҥKIKK רjժuY'I$I崟9ڿ?7fرuY\x\x#I$I?;J,ƍ Y$I$vk۶-sɭZ$I$Dx ֯_7IMM%===K$I 6dHLL`͚5L<㲲"F$I\D'#I$IR$IT"zH$I~)" GGRRq6lHrrr$SH$IR(M>\s SNd I$I5jzl޼9)$I$)_DJ.}qF#B$IEDK.aܸqmٲ^xK/4)$I$)_D!Ch֬~®ZA +J$IR^(wy̛7skժ? D۶m%0a„qWSN#G0bWNtt4 4`ʔ)]$I_\ې!/ܹzs9 .㎍_VlٰqdذaݛM2c z]P$IүSh/d׮]@˗ֹؾ};guK,iӦ[X1n/55gyѣG֭[CѵkW-zZJ$Iy;ҺukN޽{B 9bccOQQQuY'=>++{ƌdffү_P[ ;d֭,Xk$Iѕ#Gr3sc–O2eؿ?˗n`.]:4fҥ*Uu8Yf-[| ;vѶaÆ\^$IQ8ڿ?7xc8~a5jđ#G={6cƌa夤Pܾ};+W&m۶1fn$I TDK/eʕUi{s|޽;kfL>= ;>:::$I&я}wܹ3<3Ԗċ/ڵkW^}…~I$INcҮ];~m"i 3338#G!Cm6֩S'~ƌzQ0dرӢE|]$IRQ8:tڵ`4zhvIn̙lݺk.6l 7@:u3gf͢m۶t)t>#GIӦMy7o&M$IүXD}̛7}V=aF͛C|M|Mn&ʕ+Gyyʢf͚ :|0, 63n8&L@Z8q"=zȳ5H$I*" G [n׏os9W_bccO{M6W_}WH{1{ӮI$I/OD`ٲe7㲲"F$I\ĻܪE$I LDhT$I$[$I S GO?4;IO?}I$IR~9p4ydׯ)))'h!33>>}p90eʔ$IrJXɓ'3j(ƎKTTקz/_`0Ȯ]ظq#V"33ޝX| f cidYRdK-$]E:A[ԑEV R??|}ƍOشi7pŋg@TTTvF$IN,Aʔ)CN{Xjiii.]#GfK$It2e)1g}}2{l`hYBBmڴaʔ)Y.R$IN,W^y.]cQz ˫UI$I,Khԭ[ؾ}{Vv!I$IDQRRׯ?/%Kfe$ItJd)iӆ^z>3f ڵZ$It d) 3>6nHLL se۶m 8y/_U$IN,lll,> >`v#I$I#I$I:Sdyhݺu;~-`4L6-$I*K7ޠk׮۷ I$IRNpԿ*VɓPBv$I$I\9ڲe zH$Ii/KN:OU$I$EL3<øq!_|$&&RTU$I$R@/ٳgZ$I$)b;Q$I$E$I$N0C$I"M7DTT1}}\|$I8ҤI*TݵH$IRĜP8ڵ+;vZ$I$)b| $I$a8$I$p$I$I stQ$I$E#G$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$;Ht&yq.!k"]$&9$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$9<1p@ZhA… 3&Ӿ~--Z >>… ӹsg6oޜ߁>|8eʔ!&&jժo#$Ipey[.۰a 6d͚5bժUa ]wݼ IDATuɓ^x! y饗(Vu=uK$I9r=G?<۶m =Inlذ; I˹;IKKcĈTZnݺUxq.F޽{Uo6c~$It'`ݺuSNeԩtM$$$PD Ν=O޼yiٲ%O>d=*T_~1cP|yƍGǎOqI$IYr|8_ʕy/W\ߟg2I$Ig$IthvHAK'%1Iu"]BDI9#V'I$IYe8$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$I$I9~KtI+̑#I$Ip$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ ܑ.@hvH%$I'ȑ#I$Ip$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$I>c@_}YXO?/_>;ԪU+\r_t)W^y%*U⩧bÆ <^3gr%I$ gT8jРm۶=PP!>c (@ҥ[h֬٩*U$IRsFL;Ԏ;طo_۷3{lnP0ҥ L8T)I$)9FuFZZQQQ4hЀ#FPfM-[ƾ}Bʛ7/իWgɒ%GMؼysXۚ5k$I$E 7W_9Ê+x'hР~)\r )))-Z4Ee޼yG /OJ$I"Gu֥nݺ׭Zm۶TV3k,ΰ~LLLhݛvڅY֭[gH$I3"e\r\wuL: ݻ3ݵkWh$%%tRj$Iygl8(Q{M;8P)))IfhѢE@$IR#G[f˖-}ӧ1_> ,8I$I:ݜ#G$IU#I$Ip$I$Irϑ$IiaP dbV r G$I$ Ñ$I$#I$I G$I$I$ 0I$IࣼuivH%$I9r$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$;G .tqt$I:i9$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I w P6t$Icȑ$I$a8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 8 ݻׯ>ԩSٳgG,I$Ivօdz):uij>KTTW_}5Ϗti$I"(w 8-ZěoɈ#۷/]tJ*w}|P$IRU#G'O&**=zbbb_… Y~}$IIgU8Zd *T@ak`ҥ(K$IRpVMKIIhѢχ]wӦMl޼9mŊY&:r5 ^*'\ߗ޽?Qzz:cbbB^`.kݺuxƹ5x3/%Tt >cux]1AK{*f"wZ~8{]vam۷ogժUTZ5%ɚ5khݺ5o6ʕt9׵t>޽ӨQZ GEeƍSRR8nRRIII/+PgrQrH!)y]KgljtV=zZ۷? -$Itv:Q۶mٿ?F ޽^{:uPDV'I$)Ϊiuuԡ]vߟM6Q\9Ǝˏ?ȫ$I$EPԠAESk%==q[oѣ+#]Bqqq4nܘH")x]KgG  #]$I$EYuϑ$I$H$I0I$I`8$I$p$K&999?&G&IH:Ę1cϟay0DT(T9A  && *p/YmRP!G3gNoiѢ.\Ν;ySuHYd\?<*U"::bŊq=￟CR6;>V:V111L0뇵ϝ; 6}R߰aCɛ7Iݏ{G(S vb⋼{,_[reŽK\\͚5㣏>aÆlذ c=FZZO<˖-cѢE^)]u~>|8m۶;dŊ9o?Ge82qW3i${9rd„ ԨQ-[ʕI檫f͚tޝ"ESO1m4ϟ϶mX|9^x!r +V/m/dɒԮ]M2fzqN:KeuSO=EΝyCۮPwqӧOk=,qZa̞֭=;Զg&OLǎ3?p< +W&&&s=={o </^|q7d^f͛7vQdI)Qw}7a&''ƍiݺ5$&&ҷo_ߟ3#+~`޼y\r%7PUV|嗬^:>eP0hҤ *T`ĉ$ep"… ٷo7xcض~7OQN#)K.7͜9 ?zɽKzxg֭Ǐyݻ7桇/fĈ\p4k&O4;wҫW/FI9r$]tw4oޜ"EOШQ#|IFugDAk׮H"޽ } ƍٴiS/Շ]6K,9K:w :0:vHIOO'66ӨQ#?~gь?>lT/EL4;yfN˖->}:@x{ 6,p=(W য়~ k]С=z+^z)*zy6lٲ]v`ybcckxw7o;v u>eƍo-aE_e'~FIȎ҂ C͛OGh߾=̘1;v0cƌLM46mʖ-[B_5j >>>t?={pw]wuL~wlBݺu [o {ݠAcڗӤI)Q7x#[+V^zm6:t%KXjwu/M{= N츮{ ;t7 @TTsseʕe#:v-_* ,ρHJJbn!D߿M믿ү_?*VH\\7n$99>%vڙ+tNnɛ7/իWW_xjtB…R'Bq]+VzjR|y;CիwĿ*U c .o޼#:˖-cժU;6 >MORqe^R^=7Pi9ZhիW?eJ:6GU|̐+VBrr*U{#_dРA۳~eXo>m19O<9`03#A ygx$|~)SN+ n3f~PG}ĪUh׮]$Jtw]فȗ/_{uzpH:]qyFٳ'Ceҥ4k֌K۶mC54tPj,Y̙39s+RlYƍ)PSL")֭[Giժw| /ժU0i$.rN1bUV[n:Iv-[qyaӇC__|Oa(PE믿RX1<`%J0w\~K˖-y'HA纾Kxg?~~7.GN  >>B ;oouAI', n:Ӊ$Io>O|(Y$@ %4;9) vAzz: -Zƒ$E@0$%%TvA"]NSs$eHJJ2I!@$~7K'p$e޽{ɝ;$IRt3IY ɕH W\>IY:)N'IRdeH$I0I$I`8t 6ut$''6ʲmٲ%ҥ47J*.CGlI'Ӡ0y7dk׮e̞=yRjUڷoO=t$5 Glʕ>k׎htB*Uسg{oaԨQ.SHREGGG?7HRMѢECn6֬Yû %I:^IG4|jժELL e˖_ݻS|ybbb(RgGҥKILLqƤgΜIFȟ?? VZL0!|޼yk׎%KM%IOOϰۓHll,^x!<@~۶m#99 @nعsg~ƍFRpanF֯_>|8iiiarqw^k\q$%%E]ċ/鶏vZb_~9Xb >1JtqHa-[f͚ȠAطos=z bСtޝڵk}v/^̗_~IӦM3]?yԬYiӦ3f 7|3+W,X%K0k,:vIعs'zH",Z#Ga&M_M ȓ'=ztҬ]ӧ3dȐzڷoO2e:t(_~%G&))aÆ 2zӽ{w6oȑ#iذ!K,`=GӧO .nݺG&/"+WUVΝӧӻwo8mw, ~Eiӆ3ydGժUꪫ8pZbуJ*l2~iVZo}LKta}\pw8p3k,V\Iɒ%oZ*} yҥiܸ1cƌz/^3fvL<4,XW_M 2eJh^jj*%Jࢋ.?&&&&~0 =1/===à q ?QF,Y˗A1x`nf^}P6m'bn:ʖ-#<€B/_%\}v9`dv-Z`լ]Wƍ;w.:;w`Ϟ=*Uz1ydQ]2w\ׯ/̭ʂ 9IҩpQ8NRO֭ÂDJh޼-X | W>~̙C͹+:ujKgfǎao!S ;[lnݺA,Y͛OÎ:[o {ݠAn:u*}lٲ%uyQ|y̙sc>cLMMe˖-4jԈT@||<7tSu޼y]v1"WR%*VvW\qQӑ$ejͤS| .B{î#puQBTB-ܹ3ժU k.ZlI58q"sH:8"r駟xyw– 9@*Tc:ZXz5`0'On@رjX`d…}JMM%!!@3B _^^oLiӦc_ӁHRkذ!k׮eڴi|=~^zݻEGGsW3m4f͚5\sڿ?M6__~TX86nHrr28cʴT9sf} gTڵk+X"O=%J o޼{<'tG;>VSO=i%J~%I G2uinM[rQ/\0ݺu[nѰaC Ǐ뮣]v̜9ƍ-[>ree˖j*Ǝh* IDATK.]B~2HlٲAʔ)C {kQFpB.#>}:ww c9_ǣlٲ|W\y啙N=$L=G2Ey駟B~-ݺukxʕ+ԩSU^{-- -k֬gСڵ+l#G@^z)=zL2㏼,]MٲeyG߿??#[&[у}3x`̙+[,&LCTT.]PJç~ʤIBլY3˵^KϞ=IKKW^!))PMrGΝ8q"z+s̡^z߿'Sfڦ$I9ZP [vmpڵ.㤚;wnFy/K/8p`J v5G ֮];X``lllbŊ!Cӵk`\\\ؾl袋w^pաwy'Xn`lll@ڵkx+V4is9[n%W_k˗`111 /0C<͛7k?>eʔ`qqq`Ŋv[pʕ9gΜ wժU[n%Xt`޼y֫W/8r]AjՂ111ҥK ?iMG;_5 V\9C-]v *U*mϞ=aÆ+W*T(XF!It6^IY)Hs{YY=G$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#IiРAҥK|\INN&>>>+o˖-.EYضm[oYfֹ馛(X`r<֬YC `ܸqd ̟??ҥj֬ɀ"]t Gcƌ!duG2n8{.OMMeTVxbccZ*'%%Vus7w_K9믜s9~?믿L3fGmm۶(Q@ @ݏk0tPʔ)CLL _|1'NЯ_~}2,[f M4aƍoߞ[oܹs_3j(MƊ+"P1b͛7=ӑ=ڵ+y)^8]t k+HOO'::: f̘ѣyᇳmC%==:u%O(~֯_OV'11~q>Æ nݺ.\Xj֬[og߾}.^u."?k4h@!CӾ}{ʔ)СC/=z4III 6,gȐ!?otR6lHtt4={TRY&4nʖ-?ŋyW9sþ߃fxtޝM6s%KP@>S(VX>|AFA=Y&|,Y+2o޽4k֌O0|pʕ+-ҦM:uĞ={0amڴa̙h"l|of蜕,YЪU+.\H^X"SL[n_ϪU;(U |6ldɒӔyAo9ƍcʕGƑ92t+Zٺu+^{-;vCC7^zѩS'FA۶mY~=qqqJ:uȝ;7wqEޣ[nqӨQ#~nvJ.ĉҥ ۷on;ݻ7)))|nj; loѬ:xTP!vڡ_B5jZj 2AI'lڵkvygEDn:\n]mŊt*U*صk/8زe#nk׮`0 Ο??X@`˖-v ٶm[0:u?p@wܙaC 6l̟?X۟5p \"E^!C-[,;w kLٱ]y *+V,,XjKII ɓ'د_P . K,,[l\+A ~̞=;͛jk޼ylٲZj9;̎o[07lj[n0!!!Cm~xcâ8? bP X],X`"`1͈1b,1HQ% ͂]s=<<7;;;Λ`&Mc-Z>1b ^ &y]ve΂:۳ֹo#Gdا~Ko߾̗ϙ-ӧV^^0X.]˟Ņ`۶mc1VXX+W6ݛޣGXVXxx8ck'99Ze׼.]]]ϥ?`***ԩS\xdž(VTT>]_2l׮]\/^0'''&J٣Gj#c bc[dIQCC?/|0V-\^UUb1>}zTP!D?lmm퍔:חd8صkչeÇ1o<k>}1>} 0p+fΜۧR+lڴwjJhC֭isq<|֭úu딖}=JTSrsskBKK w+iʆ1(O!ɹ#??{šC+WbÆ wkhhh{EZZmUVV Ř;w.lll HpM3F0ٽbtŏ㪪*D"*/Ɂ'_uPWWǏ?hWƨGMC:( 33o߮wnHEEPZZy(((yHMMU5G͚54dgϞ=&&_}bccy=. 9?֯_ccc!66 u_wNIXXgWSSSҥKh5eOkݻwc={v@^^v#Th r5R8p@f͚aܸq7n>|777DFF6h8ZMFZZoM\; T޵@XX;믿梲M6Mi×ݛWPPlT1Ç~066D"AUU :̐#)랪1tIIQ%K`…k;"..999'Npk*((@ee%{F &bx{{cϞ=/^ضYGG!B HJJx!a]]],_\0^WwVZohhwww|0ůҳX(11>.]4x][II U#lڴ rzh\ a% 6 vvvXd /P='IMپVUU!::1\\\+3*qРAPQQiy֭ ׻.r>b***˗/s/^ޯ5kxΞ='O`֭\.GXXn޼ TDß&M9^ ##QG.cҥ?>^ ʕ+HNNƤIXf 7닉'Ç17Dϟxx{{cƌdב=С0gB&iDRPWWGrr2aÆΝ;addKQPP$$$psjRKԩʕ+8x p\|{ѕ+WP̚5 пbO7n ///8::B__'OĞ={۷/>ܹs q]",, /_5?/\}bСX,Fbb"߿Çs^eΑAP\zEEx=OqttDll,-[\ccFSѣҥ &N[[[㯿ѣG9G'OƦM'O 8qV^ m300@`` V\ ڴiQiE(ӧjjj\Wsw^dffʗgߟ#{nL8۶mXfff1c<HJJ?Bսm۶0wӻ2ѣёm۲ 6pkz9ҥKY.]L&cZZZƆ-[xS3366f\} bRu҅} .0OOOÚ7o&NΞ=-[𶑕L&cښ 5? y)Bo_rܘD"aذiӦlAjVfϞ=ihh0sss_Am]]]sssY`` cٙezbؘ-\ Ç3Lz 2455Yv'|޽bzb 3عsyXDD\ʻ^m!w\]]y144w2Ƙ-^999que˗/g\#G2===1^` dsLL `֖mݺUO{̙2FtuuL&cGfO{S2kkk&Hڵ+KHHPZ-wII FUonbc:::ᵅС eJmΝ;lʔ)uLMM3OOOyfA1cư͛3uuu־}{w }=:u*]U`SNe͛7g"ְލQWHV#/slҥԔ3p|FFF,22*ɛ'b!B wCzz:C}ٷo ;;]0f7M_&B! IDAT ԳgOٓ7t9F+V`ƌoaDHS#B^="B=eB!B@#B!B@#B!B@#B!B@#B!B@#B!B@#B!B@#B!B@#B!B@#BH#EFFB$ڴi1c41c@GG k4w].\H4y})SΨQ ɚ.H۷z ?D"~]t|ۮGA$)7oۮ^l߾ѵ./--EDDڷohii]v?>n߾?;z(~̙3mW_֭Cccc?|2׬Y k~,^ݴilll +++]>{ g{---t _ZZƍ{{{¢2|rCSS:t@|| ܹsחW+@Ȼl߷]?Ʒ z*/^ sss^[ͫپ};0c z ޽{1k,ršo߾8z(u۾};e˖u9w\| #0l0`\@L6 ׯocB28"wЯ_?tmW(//G@@믿~@e_5۷͛7(UUU/^@SSmW/^ƍ1|plݺK vJڨ+ &QQQ0aj]HHHʕ+1k,@pp01w\\?[l틼\222`hh044Ddd$***!Ldd$/_ &K.(++ӧK:N7:w{r?χL&Ù3g#Fvލ'O`ʔ)h֬N<իWƍؽ{7sΡ{PSSäIЦMIJex:t(ͱ|r߈VXYl1tPL0Xz5q̙M/--h޼9u֡cǎ0`TUUw^1z9996l&L1c 66ܹ3T~>}:___$''cֹEaٸ{.K.`߾}4D||<={iӦ'VZ[nedd 4hr9>3>}7oF-x;** QQQ>|8&L{!::'N3g JT?=722 o ._|I&s(--ũSpݛW^^>} _~%:?8q"oժU ȑ# ܹHMME߾}y>tv3SSSTUUa?0e 11cǎ`033ݻwq!ܸqG)mL]];Vptt͛ 777#** ͚5nիWsW߰"aĈ6l֮]aÆa5kL#G/quH$3TUU1}t4k )));v,=zPս2=z+W6m >>(++ôiSN۷q׭咒TVV[D"GϜ9^j'''D"9sÇ3gFDK.`!##sLT +++A]v=;ڵkרm(Y~~~}߫gڵk\څ X,f/uѣGs;t|}},L"0ߘT*eٳg\0]]]̞>}['O/_D"^ݙ./"""6n8^֬Y3իWX,f˖-dmٲPW}ݻ7⥙0n߾ܹsP\Zii)355er;6mby9|0;ƥy{{3\.gvxǬ>uɒ%L$7npi...LOOOPq 4i/kѢ9//+Vedd0XKڵ+svvޞ 8}9r$>S^ze|?lmmY>}rbv%^}׼... ۶mcB\Auw޼Yǎyy,,,XNNN۫`;).]]]ϥ?`***ԩS\xdž(VTT>]_2l׮]\/^0'''&J٣Gj#c bc[d < QF)]`mmͻΞ=XݠXcG… yUUUL,ӧY_&>sD==[[[x{{#%%e2Ο?\XZZ֙7==~~~ӧvuuunÇC̛7O0Ct?~O19s(,,į3g&Oܽ{w$'' RIIIСCy=?ưDzzzBϮ]V䴦VZZrxxx <<?r@"˗JRL8ΆMn2!GWWWɴ ܹǏ?FVx7777^fÃPYY 777::WP!%%m"44Ǐ$ Ԑ !Uf ߬Y3gT {{{tz­[gǎAe.Lƛoggr^/3pJUU*޵퍄ddd)))011СC}֤tiMP|/׷~S) 5 䍣!DO>Uڰqxb 8VVVppp@߾}={ pttD||`\}~~>`ѢEطoJJJxJKK󣧡^n@"J Xرc'sJKKy묨wcRPP=z]d#T[n58իWh"߿kB󨭭\TUU)m4cl5kLА={|W4X~=X$$$}5{`,_Gf6g@CCk|ŋ kջj; .`5jҼ:thT]^5d]]]I˖- Ϟ=Ã{YV˖-LV4LJKKys2 yqDPDr,nP;v,7yƑH$Ž;0p@ 2+gee:\)33999غu+'/Gk۶-WVS`ܼaqc߾}xP_Կ*=.]kʞw1{:땑<ر @E5r9*++!˹k66668p@{Ú5kqaܸqx!٠h5%&&B[[iiiaP6mjpfff8v7tP{aaa Cvv6:v숯6m4Q_ֻwo.Zݻw(_^^OM^cccH$TUU  999^sO@NYd .\ر#ӧɓ`qkӱcG]==9q7e!''(:#MPބX oooٳŋ8x`>B uuu$%% ~~~}􁮮./_.cx«x\;c V744;[AWY X,FTT`}ܿ.]R .ޘyѣGشir97J a% 6 vvvXd ,/P='IMپVUU!::1\\\+3*qРAPQQiy֭ ׻.ryØ")|2!||| )TTT`͚5|O<W}8|p5Yk.^NB^^:uk" J_R\XX7n앗cՐJ޽;W)JI$TVV^ik׮=^lkA~4U\ӧOWWW9X,Չ17Ԕ_o^%!oBBZZwSW=Ν;Wvvv# pi$$$po_W^ׯ= HR\&LF}}}={O<֭[acc\0ܼyRJ?EGG >&Msss\z@FFF\.ҥK1|\zŕ+WI&!,, fDEE!==3Vooo'NÇccc|c͟?ƌ3 ׯ#99{bݡC899aΜ9(,,L&Ӯ #R)|}}dxyyÆ +TUU;wK.=:::HHHդީS'.$+WpATCYYY!** |2+W )) K/X~ ƍGGGɓسg^c"::}|;wpA;Ο?ߠ2еkW˰Fbb зo_ :vvvHLLya_eΑ3z͛7͛7f`̙\ފ zjX,[ rƍ?ѣGѥKL8(.._Grs&OM6!(('Oq ^f``@\1i?ӧO'Ը2H"`Ř9s& ///9rvŠ+x8,[ ǎꊀ̙3o߆\.ǖ-[p 222~ }f:u/3fo舤$?~?/|9Pc޶m[ M޼MP´T*e]ta=… ӓ͛'r!flFVV `2ijj2kkk-W_aa!o=E+WH$L"06m4-(3==]P^pٳ9880 fnnξK#mbb4 qnn. dzzzLSS9;;W^LCC 4A8Ⲳ26|p&AbpB&k׮Oݻw|YYYW^L"0CCCoAc;w8p +By֫-Dݻ+P:t^x޹sM2nݚ1ccc6o,7fּysڷoϾ;ݻ6300`SN徫j^lԩyL$ֻ6l:`т<{O}uFC!Kֈb]FKHHŋ1x`ܹjjjܲٳg(//u}UUU/!oC=GZ)\t CT*Ef0sL<{ УGtA|esβLIDAT:;BSSvvvHJJRo3f2 !!!x]q [^HۻyS9ھ}; >ׯ_.\={B[[&&&Ϲ,;}t"**  @LL J|pss_|m۶?~#BHѰ:BH-Z۷o.ES^pdffvY UUUDHMMUZo)BGzܜ*i@#F ..+V={0qWӝ6_\.Q\\ܨ#+++X[[c޽Xjuaܜ!{]u $Br4Rk>^Я_?.-((%%% G^+[}.++w};˭iРA`!**JfLAA.]["L0:7.ň0PTTs^tG!Q!^W\зo_ؾ};F{QNݻw+o ǏǩSТE |{.lٳ'\UUU8vzP=k 6 XlΜ9>fff(**BZZ~gܹuXt)ϟWra¯\dL4 aaa?\L 6@WWμ@B!Q㈐7(dB~E0o<"44_| _pp0̙ʁ,--zj̞=077?P e_Ŗ-[о}{l޼gφ:w z]t)zh_G׮]w^ 0Qu7orJ7uӧO555lݺɓQQQ-[PB ^B^"tr۶mrMތHDEE͛77Uի(rBțL4vvvXjo ! VGUdd$D".\#F@__nnnK.a000&:w} 8\߿ pBTVVb| ,@ee%233ѬY3ɓ 22!!!BLL rrr!C&N:{ 8TB,כByWа:BH&O >\ܼy@u]r #044ȑ#ݦL&Í7pԩFյE8{,&Olذ#F,Y5Xbr9sW\Yx #i044:wsG}a!11~~~`qFii)oǾe˖ƕ6&MTo]d2?~Ç7BȻGzsc 044EDDݻv^"kkz9w\K.Ĵi7-[qmdgg#::Xh6oܠ2~F1'*3p@>|z=y**|Mc{ A!~ԩB~ЪU+7iiiB #ԫf"BXXXQ,,,^{D[-BTTTD|}}aii;v`„ Z!sJV A={"00?~ԨQΧj߾k8x RSS-[ 88[n} !+jBm۶?+~ mK"`ذa6l^x@,[ ot8mB__o36 YYYun+$$+W… HCCCꢲA>++ 1޶z???TUUaԩظq#ÛK!Ѱ:BH7n54 ?ɓ'ywQvxaggOCzz:1qD١7~'Lf㯿B˖-m6hkk׻O&L@qq1zVZڵkXz5:v[[FB!?B^Y~~>xc"""VXX(Xς1SSSc&&&,!!ܹsGLSS%K͛73ʕ+\=z=zp7nYf͘l٬˓fϞ}f``TUUY˖-ِ!CۣͫG؈#L&cݻw [,==o1///$ k߾=[z5M>2Hj~`ӦMl1"##ۻ{.6mkݺ5SSScƬw,&&k;Q0cKXP#|J%% FQdț{ƒьpn6z:rzooo^knr}XoY2ߢ?8zMc02SsqD$GI@q9>s2S#iR`f֔R4>o8OLu]J)A Lj!t]7p ,L0cn[gڶM۶Y,s +FV}oG08y<y^q4MnFL"b@qD$GI@qD$GI@q$_ZIENDB`python-diskcache-5.4.0/docs/_static/core-p8-set.png000066400000000000000000001361601416346170000221200ustar00rootroot00000000000000PNG  IHDRR YsBIT|d pHYsttfx9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATx{|uw؁e6kl3,"MrcNi.\ 9$9t u9D"DC22-טp׷mx _ݻw+***߳{;^3Fu˵yf5iB-xV.>l٢o*S<<>^5k3jJ۶mS~6m(++K-RΝz93hܹ*S:v%J/ 6諯RWO_=n:͟?_SN?o>ѾqF${tt =Νh$ƍ%%gt钣}vIv___{6mo.4ܹs۷;w.y睫OFF^T){VVS_rrPB5k:ڦOjOr쉉N?.7v=%%^D {ɒ%?{ڋ+fQS{οSPPСC_n]=44ԞѣvISSS; :4kݺgr ~G{bb]Gy|ZǛ4ib7S/\ [N> vzyzz:E}7hѢ}}_tl3fHN\eʔq}钤_%Jp׳gOU^=zNI@@ڵk?#-J-ҵkW-[LuUΝլY35lVItҥ|ʒ$%$$܌ \Jt=8*THJٳgu}zOppox}9޽[zw\?={׼yh7o).]8ڶmc׺uԪU+5lP>u=hڴi.oVjԭ[71f۶mݻwؿ߉)Ei9N8!MwUqqq0aBX/JLLtٞ믿ވ]u=3y>(55Uy5o\MSNK.*RӸ;w?;^zDl)I6~fؠA$I/;mÆ $GǏ5ٳ+___lߌz}F nرcjذʗ//wwwܹS_ʕ+'|1v̙:p^y-X@5RRtq%$$~ТEwc\'H"*Wl6~;v3<;wj֬YPZje*%%EoԫW/͞=ѣ֯_c:^Ղ 4g5jH*T~ZJ:thc%Kjҥj߾իG}Taaal:zmۦӧOc}g:J*_omQ˗/ѣdnWF %I?պuk͞=[ׯWZZTlY9Rݺu3CzuUZ5>Sw};w*##Cz' /jժuxN}Qe׭[-[]AAAj޼:vh< T~}M>]Wٳgڵk{NcG jںuV\)o߾Nw6^ ,аaôvZ-ZHv]eʔ1R7^w[H)"H!"H!"H!{i ǭ.effѣjҤJ(a~MyfEEE2Ɋ+Ԯ];nI*VxnTTTtS$g;_Ŋv /f` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R`.[]B.G}qK\'H!"H!"H!"H!"H!"H!"H!"H!"H!*| yE%PVWpF tJOOWtt"""'ͦ+'++K>l6L?;;[oBCCjժiѢEyΕy{{OO?N:U6A㕐~3c K5jZl3flٲҥ>qIIIjܸ&Mhȑԩ *$I4iΞ=;wlٲplR۷o-ۅKPҥyѣUreu-+W*++K pl6=sJJJҶm}ڴiQԢE UTI~u ;K);vмy;f9&>>^ŊS*U3W'OTڵs[nנAԹsgկ__s\rrJ*+hJ?MIIQff<<<\ɓnJqAc {jҥWgtϫ/H͚5K111~n+}:sƌ#G*$$c?y-c2`uɩb}n}2e.\Ν;;%%%IRSSua]ڸqv|AAAK_N%''/߳Q9>綿įT)44TzG$]yhh'I^Ν;9o藤`{j˵ގ;ܝn 5x`-_gΜ9={j $kNEѬY={ՠAG{ǎzj=zѶa߿?׶=w7sL9j*ֽAf͚Y{r9]TL :T'OVVVԩ+Vh˖-裏㕤cjɒ%j֬ tM4c կ__o4.==]͚5͛5vX(>>^M4ӧo1p}ouWd.]ZqqqSN1oՠAG[>}T|yEGGkÆ jѢ$)##C/"##tRlM0A}$i֬Y:pvXu֪ZNI&\J.}1N!*G%I 70`^gϞ_|h[tԩx=Ox\:HYoImڵk;UڳgOq_~E]ɓ駟~S.d[%IqqqNqqqv顇5.gOs 8 ;"H%$$(22R˗իv׼yswujwUhQEFF:ڞx Na_'wFj̙JKKsQoժUJJJ$ 4HnnnjժRSS5rHgAIR T~}Ik0ayuIZҖ-[pBsoz#FH"z뭷TT) 7"RSLё#G-[e˖Iu&I:z$iѹߣGG."Ehԩ~[C qz=ܣM6iذa8qմiSۺ{ 8>\>H>|cvќ}Q>}:L2Zd|w5Rp3A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A A tJOOWtt"""'ͦ<&$$(""BO?SN7|STjմh"Ks\ǫlٲzᇵiӦ<%%%qѤI( IDAT)Sh޽ڱcc_z%ӧԩ+WK.lz'kNwRJNNVҥ:u9nҤI:{vܩeJղeKƪo߾cǎiԩz5sLIҳ>&Mhȑԩ *d4'KoPҥ:>S6mGZhJ*O?u\RYYY0`fSRRmf<'Kǎɓ'Uv\}Zfu||+*UߨQ#9r\R|M´nݺsssӘ1c4f̘厸F n&"H!"H!"H!"H!"H!"H!"H!"H!"H!* uyefft _7mڤ+Woվ}!I*ZT (**JM6-Z%,͙3Go>,???լYSݺuvRSS j*W^xOEQ7QX.\=zj֬y;wԒ%K4i$M2ER+ 5vXS4VZUƏs^WjT~kww~/r .ٳ7bj,O>DÆ sjJ(+==Rj,S:yVZiذaZv^}UE+HI/=z8^*]/_… +;;[}^{5˅tF*33Sպuk.|9=JJJV!KA*44Tׯ$p8qB*cik_~4d۷OIII*SڴiofHp%ԠA5k֨VZ5j$I)))Կ)\ %I}Q>}r)..rnyNftFy l6mذ}ઌTvvl6SѣGu!$%&&*--M*TPHHHU .(Hmڴ֭[նm[ѣQ/^ܹs5j(T,lbĈիzng$[=zP\\9"I*Wj׮I $.\XSz b:piۧC)55Uv=W bp /nݺiǎy(Il)wKA_~ڻwy=#-eY R~ƎAT=,=_>>>U ,k…tRA.־J*ҥKz3($$D 5CVb)Hu#F9fq RڸqcA KAI&U6,?۷o9"I*W|\ rJ >\vj [omZ]\۟YF;v$M4I˗/5i$vuAk׮-BUX:#5aUVM[lQbm۶ըQ#(""r*,ڳgzr+VL={Ԟ={,.RTJJJ)))KAy6mmۖo>}Zhae p9z7U~}5jH\$֎;7x@ WaThh٣+55U/ŋ!Ch*_| so.zY:#uE9s&3gŋVc)H TzKIII2e89z쩥KjС\7nTFn1p]ۼysڵk8lUPAjպi]`ҴuVIl͟?_رcM6x⒤˫O>{Lc}'ooo}G%%%hѢV&#Gԗ_~GyDTɒ%zj}zg[^}U5h@M4Q߾}SSDDcuSN3fN<+j޼y:|>~<\[^ḣ|||TH\"O7w}Zji֬Y:t~zwj֬KÆ {ァ޽{kҥ? hիոq~<\3R믿*::Zk֬.IWddU\9E^pY5jo<==5ydM< pQFJKKS˖-UJGj*mݺU+W.bX RGC9zG5zh-_RJ,]#yf <8WUjڴi%X RYYYʷhѢg9RQ?w}Y%XF*&&FzԫW/UTI?k޼y:}H*,͛k͚59r^uիkj֬X~T-~MG$+WNK\FܹsUfϞ-I*]֭uѮ;H-ZTlY|]t2R,W^yku,qPpt܍,^z5jPLL%X RK.U֭ǵxb+K˱~WUP!P9rr,)oo+DyzzZY\ մiS͙3GǎwQ{j֬%X 0Q~v&LP *W-[hРAz7nӧJ* Wc)HIRjմyf:t$YR9 O FAj~Z6jwn^( ճgOl6=rwwWϞ=FpG1 R$wwwp71 RʕkXz /܍,ߵo֭u!n;l6޽22,zK#G*W,?? \ 5yd5lPVOA.5RΝS׮] Q*Tfʹwނn Ԍ3aM2E)))U4KA*$$DѣuXb*^i,lW^ѫ`ծ]`)H͞=[Zbx/sEFFU,%6mh˖-U ,h۷O Ν;u);k*W,Iڵk̙K.YY\ln Ըq }p=0d^{5/rkWd>chڴio"׫o߾*[-ZdXpFHٳGLٳgCUVUhh|}}eەDC=3gk׮72 R6M]vU׮]+Vh۶muiIRɒ%hԨQj׮j֬yC [WF ըQ kwC)0DC)0DC)0tݷ?cǎoɓ'ձcG)SF.]!*T `錔n]jڿ$)ػ#H$Dj&1cU%\5SVoQJr E%J5R)Қ$Hİ%r"^#{kYG7y>II*[̙-@n ٳgkԨQڶm,uvkfHM>#GSLQ@@5kZg R:{4hpJLL) RٳgWҥr uv_L&I֭[tRgBe"""뫀G&IӦMSPPڵk5kjܸqU+ Y R^^^o4zhl6k׮]v&Lݻw@U+ YB^777[z뭷4#O,H>}Z˖-ӯW|)tׯi ROԷo_ݾ}[ ]@^ 5vXUREQQQTRvZtLTRWΜ9]!KAj֬YZbtT54ydC*UlL&Y*r,yW^lV3}j5Y RSLQ qFBFJHHK/DTRjҤ>]!KA?Ԯ]4}t]|9j\-KAZj4vX]6?,da/L&SvO,KfSҾ~PΝUH(P@l۷OAAA*Py >\IIIvJMM՘1cTD ^zڶm[N @.fhFj咤޽{d2Y_?H>}Wf֭[թS'=s?~<<ѣo߾\%OH}׊ҬY%&&j۶mի5DIwV^mmímfY u;ZJKK+0ըQnÇumթSǦEEGGRJ6K%Y"wZ.\/ڴ)ھ}{%Iv|}}{n'Iϟo-SDDCɒ'˗okδOrr$nln{~w^jXuDAꭷR"E+ܳ;5wG>>>U;'N8 j֬Y6KRRRt-:uJeyK%JX_*...~lx>>xNy&HSNUZ5%K>>vϾTPW2d#k ~r9;Zjj׮K.=˗Ofi׮ԩ#I SѢE5sL_^{ѵktU\Y4p@URE#Go=Δ)Stu}*]$)00PZҥKRq]k̙ݻ/_n=vJ+hÆ ԩSY> ڶm͛RϞ=k֬Y^f/Aի6,y*UJ Pf?/{vޭ`.]Zȑ#lohh<<<.]C5jҲh޼$~ݻsYْ st kڵkձcGk-[RJZzu G߯۷oG6x駟PN) ʖ-O>mڴI vaJҠA믫aÆ={맕+WM6u떵oo"##Ϫuz͚5qㆆ 9sM63gc7--MmڴQѢE5c 5iD.\ 'O$-ZTrssS@I~r .X?[``cQTI~x={رc,777\RM4Q%lٳG-ʕ+mf5kmj͚5ٳ.^ӧCڰaL&$7Ԕ)SXϴilW 4n89sSu]Ǐ$ }ZTbE.\9sFo {l^fy{{۴.\n?S*U/ʕ++_;YkNsoZjI*Tɓ'kB2-%%Ŧ/;kνݻwW%INNNz״k.?~<,#H٤gϞ8p~wkN 듞.\2c5<4jJW\ј1cTJ+..NJOOs#000{2ׯ~'( @/t]e,[||)²> eu-I%KԞ={t Xy(Q¦) 4h_}ʗ/۷aÆDL2`=/>pÊѲel.qS8ׯo}}vaÆm]t( jpt]߭bŊ'GU||BCCsTd#>CM84ݶ۷oڵkΟ?̙#b3k֑֬1t~Eg64߾}gi򲶿ڸqΞ=kmꫯ`G !t=Z WOflԷonoҤ SСCjݺϯ'Nh͚5={uf.Scǎj߾i&+VRʗ/QF)..NZv-<viszg?kY8Ǎ5k֨YfzWHըQCsW_UJJt-ZJвel'A aWڵ`7N*[zewޑlcիWO[nU{kÆ >|N*٬^xA/=OOOjܹrJ,Ç7ߴyl$i׮]zoE:t{Q@.b~4k,\RS``+ǡbܽ@# DR`A "HA)0  ۷uU%%%b8*&IL&Kd9 GX,>}Zrrr3cu-IR"EC#79 JNN|}}yu% =R@ILL$>\\\+6 [nٙ%}8ϯTG'A !EqL&,: ܁U)0 QlY:Fhhh|}mۦE5jPHHxjǏG_Zτ IDAT*88XӧuMٳG~g-\e HQ.oG*SXHR X} 6{Qݺue6U|y-X_u"""TbEf-ZTAAAڶm}u!y{{iӦJJJoڴIM4Q驺ujժUwVppJ.-WWWiȑJNN;/y{{M+W֛oiڵk UB~ƍvVXڵkMEQ=tSO$-^&DeP^}U%Ky񑫫U?0c?pQ5kL Pɒ%5}t>0a*T`}oG^$fdÇuĉumM0Aŋ~'NԩS@%&&ԪULԦMթSGׯtR_իWرcUP!EGGkٳ$i͚5qㆆ E3gΝ;5kXO?QFʟ?UlY4hp??Tչsg9;;kÆ :t5l0ky$իj۶vEEEi̘1Qڵk'IJOOWΝgjժ:|}?xR,|3#~WIҳ>k1SzzBBBt%3<+jǎs1 ,xnw1!!A.]R&M믿*!!Aÿ_^zY_(00KtgjժR7o.I#O"Ȳ/*99Y+VVre羓&M?J*_m۶U޽UfM~)))Сj׮իW}љ3go/իWmeߟװUpaIwyzzĉX,??{S>T-w^M0AﷻW+!!A^^^~IRRU…O?Y_8qBǎwǸpC 7n'Ojںu-Z_WXXڷokرsUVrƌ*U]qqq Uzz#)2LڴiS}eÞ*Q9PkԨ3gfyR,v-;~/R~)))I7ĉmdʕ++88X6mRӦM˗//}G*T<VLL-[>}X^|X,*W*Udx;j…ڿׯ߾6lPjj .{ˈT-2]@^=RImڴ3gXۏ;-[w˗/ۼP 2}d>3խ[W:uҁZn jԩJII/c$cfŢٳgVƍf<auUNNNbؼ.]/b$orwwWXX5=eD޽zj n?ze˖Kb,I%K؜ȑ#^xRP!lT\2~x]xf%KX$Y~7kZ,wwwK*U,Æ ?~;v{cbb,-[b)XaÆ9sXRRRlރ5kZflٲiӦYeZӃޯ&MXWnWK߾}-eʔiyeڴiի[\]]- Ԯ]aIHH;82/G !|_.#G "HA)0  DR`A "HA)0 ౙ8qL&M[ٲej8ʲG.]RoL&]nݺ bh^zPB^2LZbCxo.ɤ={8^:u4n8G<2rK.d7ptyXB|='$$h„ Y<<<5jhرJn׮]ѣG;'ʕ+WTX1L&}|sjXYqFM4)[SݺuL& ퟞS\r2իW3f>]x1Jr }G I&c?Ǖ/|ݶXlRqqq 쬟~I .uQTh"##զM?3[o,gܹ*Uc޼ys%''5 6nܨEζcN:UɪW#}1f͘1C Rڵg{ʗ/uf׵kW 6L~aW~yqvکN:.㱸u^x]|Y_ׯo}ɊtPukZxKTzzn޼)Rl裏o?_˗ƝݻWeʔ$ՙ3g4k,꫚5k$),,L 6ԨQԵkWiNNNz|r?n)80lgխ[WfY˗ׂ 2{nݺUXQfYEUPPmv:tHjڴ6mR&MT`AyzznݺZjuݻҥKU~~~9r/($$DrssSʕovBCCUP!yyy_~q]+VvrssS"EԣG={4bj޼|||d6Uzu-\Ю_RԥKڵlʕv}cccխ[7.\X Pyf>-dҹslz/JPPl٢'OZ&VPAzj9rDǏ QwyzΝ֥G*]'әG*88XŊT/W^U>}B ),,,.-[fسgOٸqԲeK[ni„ Pf+F髯;ٳgչsgyxx[cƌQzzMiӦA*RTN[Φ۷e24b-_\ժUo.Rq.\XSbb]=ϟW߾}UdIW]tљ3g>Ç+888(U?ZƟEfH) @RFT@UXQ}$iǎ[TjU?;sŋUZt]?C^m>޻֫W/-X@iiiq:;g3(>sݾ}[CL& 2DOցljJ'Oԑ#G0# >֭[['N۷5a/^N8QSNUXX~PV2S6mTN_^nnnKԿU^]cǎUB͛7gϞ5kƍ2d-hΜ9:w֬Yc=O?F) WٲeuImذA'O'$$DʕԩS?hѢEѴiӬ}&O+$$Daaax̙ƍ+::.``bŊYyΝ;YׯנAdX4h bbbԽ{w)44T-R>}TNU\YҝY *55U .KCZn:wo_3fH ,(I/$I{~c^Z)))6l)oFg'X:tH7 2e(666J^|E/_^<ŋx6P= .>з~hyzzJ',Yo"##:u(!!A}բE k[nu Ҍ3uVM>]*TfϞ]꥗^͛7j*uU6mR۶mmνuV}tJOOWΝػ﨨e ĥ-(( (H,b7(EA%ƘĈ5#1)jP1Xވf." ޗ.MQ9{;wܹe;3=qf̘+++$%%aĉryyy={6LLLp=>|7oބ1ɓ'+~OF||MVXqTSSc'Onpy}r9~w`K,0PfϞm/#Ey-8t|}}`mm OOO6D" ssFfff ݻ-ǏpB1u׽S<NNN`ٳ066Fqq1~7̝;O˒>}:}߾}rb$''F( qFuݷ2TVV x)wv '''\rKKMMX,ԩS\XYY5Y(//zCytvvc YYY022ݻwqq|Сoǟ~ Ϟ=&7kĈhddΝ;#33WRRT* DlpRŅkZ80#TWWEnxPdA( UUU޶Ǐsi"***䆱*hѢf=@__O?c aaaMH$^ hiiA*&:::YArr2QSSû6<==,8::"55FFF5jGEEsA`` ;&ד:vݬ¦Ŗh({pY@DB HԐ"b<\a#ҲɆ1l0XXXF`` vWQQooo#!!An@aa!aҥؿ?JKKyRs#olV#X Efm`ǎòepI9ZeeeT:]_~rׯ_oX,۷ڵkXt)8yl5!yD~~>jjj65cryVX???K.2dꨥ===#** ΝË/tEהׯ_455y\OmۢO>:t(xCxmmmakk+/֭[y֡C:v(;wǏi&lڴIaP{-,,S3՚\\\Zݻwe :ECch#QCNÇkb˖-g Czz:mUWW>DXX p-L0An"s B555HKKS56www⫯Bǎ~ rT[@͙!ceelܹsɹ,UUU@YY.\+++hjj'O~Q("--M>mH5z=&&_~%bccy=99?6o CCC 66rPb޽8t/^UVȑ#܍2:SSSԩ\\\p5ȵk`bbh7t̛s]1n8yu֢g ՜u[۾}{8qB.]F3PVVƛI?5!E.??_nYs'aĉlWWWDDDR= #G"-- nnnrY/ANNNT;v-S?B`ΝZT*c {˗/q6X9|2E ~o @={7Z,炈{MTjHRi

&MI&pqqADDDՕMMMnݺeرcE>fff Ehh(rssѽ{w|W\tYf)XEEEUxLeC?~o"555rQ311A^^\LcZ=zETdŊXdI{C^^ɓ򺊊P]]OB !E({AtjA%%%ZZZ033 SQUUErr2z^AA[[V s,\>c ֭700+[ίcPHcc\|Ya(ڷR.׫yɓ'غu+R)7K`߸|UUU CD"XB.<2P;JCOѾ ::!+UáԬاO]?wmmmHR{SB!x=pW\" 6^|{\AUU6l3ϕxuU6 IDAT_h222|}&تU{EFF.\WMkZkmCBBFs{yy͛J_b}P%%%g ¡"ռ@޽Y磡˼p 27ؘc&xs6 )BkDzz:틙3g ׯ-Ο?666pss=p$&&r0` 2Gb1֮])SW^3f tuuq9<{ ;v쀕R)BCCq-b$%%) 6mLMMq5R+WĢEp5B[[W^EJJ MP L^[S<==oooL:?FLL aR-h"$$$śD"A\\nܸNxnЫW/,XŐH$u#)) X,7TUU8;;# PVVFNNvڅmbʕ)>#AKK ܨd!{z*:k. DFF"<<W\tU$''#$$ :B?3&M+ũSw^nFtt4?w\pYew ŕ+W`ii$/^1j(@(")) <ѣ|2GJxK___. ***#66QQQJ044lg%>s=z:u*CѣG9RӧO֭[SN 8y$֯_ Ӄ?֮] :uꄟ~Ia@Yٳg***\0W#o>dggN=חӷgL:;wc&&&3gkxHNN?³;w$ì=z3UUUֹsge.~444X,f~_x3---֦M6uTv9m߾$ SWWg\xf,\իWyIIIŅD"&5k͕+333S!ٻw/cjjjԔ},&&F.4r󙿿?aёʭ Ԙ![d KOO \^^F$   Çlɒ%Ύijj2uuu֥K'{qrrr؀H$b,88_rclذaklٲerYRz u߳gsvvǐuc^^^ӓ|r֫W/.lժU3vX#w/^,:&&1555fmmv!O|ܹre2VY;v,f?9sw,߿fΜ,--H$b:::w,11Qa祥 7n\eܾ} 2iiiB7[nre(4t<޽f̘:vTTT!swwg۶m7a֦Mʺvʾ;m߿1MMMfΜ}Wսƫ̙3Y6m@ h0zK4ƾe*rJfllTUU{nm۲׮_&oW@i1Y$VB233ၼ<ٰܷy!)11&L@aaa2y]4GByדcR#_b՘3g;iDG|B!."B!!B!5!B!!E!B!-D )B!Bi!jHB!BH QCB!BZRB!BԐ"B!!䍉@ u &LhQ9&LV+֬u*5,Y=j 3fhqA"z]Zo? @]W={'|A!A (|-\]WE2,[ ]v444ХK,Zwy5}}Gů UWWcӦM֭`hhooo矯\ wߵb-8˗z[naaa76{݊ ̟?444лwor1i$BII fff YSSUV֭򅅅!::ͮ/!J]W|]WFnn.ΫX|9LMMyivvv6&>>3g֭ܲ[5jOeee?111طo.^jj֬YOOOsF?Att40k,b˖-puuʼn'`oo27l؀: ((>`<jjjUwҥK[̍7"$$#GDhh(9TTT?nr@۷̓T*E\\GO>\x$''G-3,, _|aood@II #Fc֬YؼysB!xy ={jCII ~7ޏ-š5kQZΝ;HOOǶmuU˗//_oѣc.صk+5→O"<<Æ z|L*DFFbʔ)ipǏ#11k׮żyAAAEXX~7.۷CEEFAA2_cܹ믹:9;;#44 :PÇCxxrBZ&z:R)ϑDdd$͡}}} ##eeennnx ~A[[bz®]ǎȑ#all 555t}?.˗/cԨQ000,--xb|=„  H'ٳgraoo aѸqF۶mÀжm[111r:t___=zwξ{1btuu>} =='667oןC(ʓ5@GG+W9r#F@ǎccc|Ǩ[ŋ9r$ڴi XYY)S]ZZ @"`ʔ) ;v1cp-^J,[ fffPWWG6mзo_/r۹q>hiiaaaYz5 )))hu-]ǽ{_P;$9PQQYfAOO'֭[۷o~eeejjj +>|8R)>39s۶mCvx;22=z4L#::'Oٳg!ޕo۶-xXd ֬YiӦgϞ(++ӧqY 8WYYA_|>?fff:u*oݺuرcKڵ HKKy>|0v3ccc>'0c XYY!)) 'N;~~~ٳabb{øy&O͛  4w*,##?… ?#_wӧOs8991g0w\>/Kf}EJJ !5ףdhhsssdff6+\ƍUwPYY 777ӧsڵ+r.-55NNN;bSNExx8rssaeedjEٙmddwѡC=?={MMMn׈#x;wFff&RTn(((h4B^D^iãGP]] }@mYoLjj*TUUyRVVFHH?ΥD" 33ƪȢEx}}}{X [[[ ۷gǎ^e.D Z`cc---HR^#p$''#00555kʂ#RSSaddQFqyTTT0gرcr=c!{->Zu5CQ@ D"GS7RR\\ϟ+lYZZ6ِZ|9  a D׮]y*** {{{$$$(,,t",]Gii)oYYY@jnD-]]]jb1k؜9@0ƂM;v ˖-ɓ'hR,wcRTT~島\~Rbonvk׮aҥ8p@籹ׄLcQSSQ@ dYb`nn.]`Ȑ! \jDFEEܹsxR=022&/~KCC~)жm[CEPPo-lmmӘJ 0Xv->`t_~p7Cr ltQ. w.޽ǏcӦMشi²߿x[XXmg5x ޵TWEEEsZ_֠{9o{ndQ QCNÇkb˖-jjj¾}Cx[Ç+++D"ܺu &L\BPatMM m ꫯбcG⧟~Bttܾ5UhJsfXYY!;;wirDUU<<|kWTTѣGx-} &Kkj&MI&pqqADDDՕMMMbmݺeرcE>fff Ehh(rssѽ{w|W\tYf)XPYYfO]o7"555rQ311A^^\oIcZ=zETdŊXd {3g`РA\SN1-oHݱqF}XkkkC*681BJJJ+Wpˋ{LUU6l3ϕxu_h222|}&ݻy>}ѣG֦ ???$$$(|`tqq1n޼1!ѷo_PRR}(:(P]]{ ݻu>CGGGn[7o ¥ɾB1Ҟ?88;;R/B!N1|7066 8p !CpQA,cڵ2e z1c@WWΝógϰcXYYA*"44n݂X,FRR!XpqqᅬiӦ׮]բ#JrJ,Z׮]/qU`ڴi lذ5***ԩScАR-BBB<==1gH$ƍHIIw z‚ P\\ D A^= {{{bx{{CUU)))3 eee`׮]h۶-V\ [[[⣏>BQQ͍KbG\WСC܏沰@dd$qOW^Err2BBB:B?3&M+ũSw^nFtVIDATt4?w\pYew ŕ+W`ii$/^1j(@(")) <~9R߿?mۆGn† sryj=bccT CC}Z?ѣGS>_=z#5}tlݺ8uLLL'Ob:===cڵ`SN駟O={6ݡx9R"˗/ܹs9rwիyFkDEEرcܶ Ν;Jؾ};n޼),8p@MRyq=z􀷷7ڞ9s믿Ƌ/`ood?~?#/;P߹sg }N޼ocGeLUUuܙmٲ ^W+WdL"0 feeŢ˗/|Ȗ,Y옦&SWWg]ta| w/'' 0D"f``_%1Ο?φ ƻƖ-[-?/--PX={0gggy ]1<==yi˗/gzbmmVZ*++eƆihh0D|||عsxJKK&Cp3۷ِ!C/zCϻu&WJCݻlƌcǎLEE2wwwm6|&L`mڴak׮モdzzzl̙wUk͜9iӆ CԖ-[SUUefff,::Z.S?LgfhhՙȐ[_S|8vK,hn߾HdeeB!6#E  -˗cĈصkTTTeǡCPYYPViB!z!M65jb11w\TTTnݺ)\ hÇѽ{w 1g@" 88/_ģG]]]b`ʨuХKƙ3g=.÷~kDxzz6:ϫ9R񰷷0zhܸq vvvx"MMM?9rzL8@n!B!Ռ5 XjiӦwy9}40n\g# C U#G"##C.ٳH|Axx8|||P]]O?...Xf v[wɘ7o:vիWc…PWWǟh._ ___hkkx 㫯¼y/=---ѭ[7|嗰BXXX|9`ڴiعs'v WWV/!oEcF!̚5 b6mBhh(Fٳg#>>}N||c.]ҥK!ׯ_Dze˰rJ|'\?zM6o߾j mۆ!C]v2d.]>}RcBz!f֬YgBGGÆ ?5@?"{ǽ ٳgq]^ɓ'9::1ɓ'siB={ĕ+W$,[Ln&///VJNNFMM Fp/CCC#33_KK8RUUo!j!Ei5RJJJܦ رczLZL1caar󩌍yutt;vK/--ރ^&?nzc 000.]wAB!Bޘ?===Ѯ];044KCQ76dٯ]LMM [KK}n#B!Ԕ{_PPt @1c ..W޽{1uWm^PP6_T*šCJYXXúu9ZLMMDB!4BZƍyׯ_2dRɓ'HIIޗCahh5|p0)nNQQ._[L2UUUr>|hv]! )׫CIII˒MB!)BHz*> <'N@||<ƌ{~T=`gg={+o'OӧѮ];|wo߿?| <5558v=k ;;QQQ8{,>C_k׮fE*bʕXh]ƅVz*RRR0m4hR)$ lmmmD"8::z !"R wtR,\ 5kadBׯ󑛛 SSS㏍>Ul߾]vŶm0|gϞprrjrݕ+Wb͛Cwطo>e…ڵk^;bРA-. TTTc,ZӧOGUUoN )B! F y+d!;wk"""biӦ֭G}k׮E#Bކev)B[ömЯ_?jDB!BފObDvv6D!ʨ!Ey+1fH$|'4B!HXlB!/Es!B!!E!B!-D )B!Bi!jHB!BH QCB!BZRB!BԐ"B!!o+..׮]\G@ #GuU^O0:uzgu"BN!Es΅444жm[888 ,, O<ڵ _[Sjj*""".X,F~pR7B!!e]W`tƻB>*<|={Dyy9&M+++ؼy3f̘--- ̛7建PUUmQRSSqS c ׯ_͛ヴ4xzzh[-uVԼmB!Ԑ"O>H$jr[0iȶmPTT?NNNe导 BII QU 7{?|p`ݺuo!F'BIhh!EDD@ ŋ3f tuu|2F===gϞؿ\.\:`ʕ {?͑Z~=lmm ]]]vj΅ ݻ2X5pA\~R''$G{n,YFFFDyyysN< ///B$k׮Xn9H7nkkkirnݺI&]vPSS-[|7oބ/D"ڶm>/^˧hݻaoommmbt҅B!ߌz!fȑ077ǧ~ .\ggga…DHHH/{.*._LL 444֭[1g1sEEEΟ?'Ob̘1 gbbjܹǏo0ŋQVV7obڵ YbTUU/^4؛C};w. q%8psEpp0n߾ ܹ}2B*ݻ޽{C  $$HKKɓQ^^ S|9"̙3v܉_mgdd?zjK`ܹͪ?!OE )BH֭'8}43g΄ ¸իQ\\'O0~x7̓{iQ]'Mkb„ WWWxyyAGG###UQQ3g4𫮮Fpp0ڷo,H$nc ЧOXXX ##m=x1aɒ%ƈ#x/^jdggC__0}t|ᇈ@pp0444<$$$`ȑS[nMÃB,СC M'BMhh!L>Ç_1j(<~<PRROOO֭[j,ݛkDƎ6% n޼ӧOڵùs0}tb˖-3f ڶm+Vp?~|ggϞիW1o<^# @ڶm жm[ ,X?\w|8#Ncm$&&riϞ=CLLLۮ암.E !"17n t邩SsθwN87oܹs `Ν}BZ0EILP(dP(ymmۭ "=<ޫ덏gnfkVYYi^jkkׯvss5俼.#;\f]\\Hce{!8DRA "HY!XV) KJxpcfT*%˕R,)((P*a I&Q*Rqqq;YNu}}'nnބ%ft:d2)I| qexR@iYt:rxTRR" M A b8DRA "HC)p !8D~Cyg⇁IENDB`python-diskcache-5.4.0/docs/_static/custom.css000066400000000000000000000002711416346170000213610ustar00rootroot00000000000000table { font-size: 80%; width: 100%; } #comparison table { display: block; overflow: scroll; } th.head { text-align: center; } div.body { min-width: 240px; } python-diskcache-5.4.0/docs/_static/djangocache-delete.png000066400000000000000000001017261416346170000235400ustar00rootroot00000000000000PNG  IHDRR YsBIT|d pHYsttfx9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATxy|MwMd##!jb)bHtZNvժRjjPZEL즶!jIr~s{rȣ{{ 0(4;]7)0 &$DR`A L"HI)0 &i޼l6󄇇+<zU^=W!C_~q8o***JJ5j;͝wquVmV*Ybbb?:-Z8|wݏoSjW=+Vqx7Nj׮}v%$$hڰaJ.}ùYfbŊz'ﯟ~IVZ+WDR\\.pK۷WXXۧZt~'UT[vK|WZlڵkTRRoUVjڴ;vhҥ:xo.7;CyJKKSӦMsiٲe̙3կ_RBnZ~vء5k:gddhʕ[jԨ!I5`Ot m߾]fҠA,?a Qе233հaCsrssSr$IYYYjѢmۦjժO>ԁ4k,u>̙3jٲRRRTNGZbyڵK'NtZKrry5jH=>/BZ֭[UZ5IСCxb]V={Q .fnJjjaW;w6$oCѨQ##++ˡo֬Y$cС+W6*WN:ϟw苋3$C$#&&&ߵφQJ#==ݡ3܌;VX{{{ݍ5kۯ^jnڐd|q}1$/vh1l61|,6Ǐp=3$#Fp{w IooSi/NO{~$>4O?mH2h\zաq={~a܅ 6m6HIIqZ$c֬Y̘1Ðd2e;m6wSmvZm޼٩ĉzݫuKzʗ//I֛K[zz-=_"ޢ¼>>>>~?$mٲEvRNd߭[71B=z)GQٲeo8:t~oP%TlYuY#FPƍ%Y +88ة}UfTTָyf]z5$)''GRu~*W;ͲR/A nү*I{>~/$$)H>}ZW\[TVV ɓ'ox:}$i7w?Z޽=X`ʕZres=ի>#͞=fϞ-IٳÇ+((Hz{f)&&F'O7<$&&M ɷ=ocl%^o޼9'?߱%JիWo8ͰR/M;9~x*W|yk׮}]˯z"=_z`֬Y:_ƍ諯ҙ3g+((գGO:}kWڴi^Qf>CnVqsB?lذνz]b*n=Rpԩ#~:rS{Æ ]vԮ]Y  J֭[wuw=ٳ.^ 믿֩S3ãc룏>R^jn3t=rsssjKonWܤnݺCӦMsxMnnF\c &Iׯ_f:w~=|p]|Y}SYYY{)S&p'I/<<<4l0ݻשw/SӦMh"}ٱcN8QsCnnn3g̙#IYzuUdBYX7eꩧRFF^~evvr`uMɚ0aBAJMM5*Gy>|ynWzK#FPڵOO+VЙ3gTV-m߾VZ魷ѣWDDu!]VM47|sݹ-[(11QUTQ6mTR%eff*55Uz3f8駟Oԩ#5kL͚5Ӄ>?X}QddbccUjUZnʖ-tS˖-շo_jР۷kΝڰaC-',,L-ZЪUTD լY3vIjذe֭[͛7nݺjݺuQ/>C72}tܹS3fК5kԦMyzz*55U+VW_}evӵo>;VsU&MT\9;vL{͛5|EDD[h!777=Z;wT@@$^3}Q/b6{Ӽyڵk^^^FPPѭ[7ѣFLLL[z[ڵjxxxAAA?l 6ؼyγtRm۶Fٲe \rFW_}سg_~x駍`qqqconӨTiFUV{t#+d]h?+0 ٳoaԩS(Um?1sn{sε/!!!1|ѱcG#""1(.{ ĉ5k>>>Qzu㥗^r6ҥKƴiӌFK6<==0e˖ԩSSN9/?O(sڟϙHf&FpER`A L"HI)0 &+Μ9k*,,L^^^wv%9rD1117}\`zKSd=3lzꩧլY3iҤIVBBvءM6ӳ(ɝ.ɀ%t P,t UFFBBB_w'4~x+;vSѣG5e >}$SLLF]]4i$;wN[lQJ$IzGj.}iBBB =~ԨQVwoӖ,Y 4f+==]6lj׮=DIR֭UjU}g7w FʌM6iZ~l6[cRRRTT)U^ݡ=::ߤI=zT'NPz˗_'N8K~3 e'|R5RZZZ222T\9*I:v}ܵK.+y ;vh…w…|ÏhlAAjРAڵCձc@qPٳg5zh9RaaaK.9_x? 36? .;.Da$$$z'4K˗/KǏ0 s]W|yk?600o}:|EDDiӦ~<""Bw$EEEڳg96nh *lٲJNNvoӦMqM>H 2D_~̙3%Izҗ_~IRDahƌP7nlo'l29r޶j*ݻ'Gj:s}GK/mڴ>c=5jhʔ)4iҭ^>Hyyy)$$c<==BTN:Ico[zN>A9}t9} ~TVgz\:HYqqIRPP-%%ET^=u֕?77W۷ow'I:p8'k.[^җY;tzmrwwWppXOOO)SFǎ$effҥK u:o^۱cTZ|NLLT|||Q-+ԤIw)11Q .3cu8K w 4H]vuhۿ:vhn!\],X^{M}?˗/{ŋc'I.]wܵc]uʕ+գGmV3fp իWu ˗/*_$)00P^^^p:G^[X&HmܸQ:uRzgD /ۢ$I͵f͚N?@qpW={m۶ ײe e˖ |dɒj۶K.ڼyC'Gj:s}GK*==]4x`M6ȑ#%IUTQF$v_ӄ  /k׮jӦ֭[O>Do 4H}ڶm_~YzwU\916+r C_/ZH-$u]tIҨQٳ=HI$M2E_}4uTKК5k4l0M8Qj޼Ne:.n80 Sׯwq+Vn~w=Rp;$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$D? IDATR`A Lr 8*00P6MIIIݳgbcc@=:yӸ\;jժ[:'{K;]:uJǏWJk͚5KOOWfI&);;[ ڱc6m$OOOW_}Uokɒ%zgdSO9[\:H*##C!!!JNNV7i$;wN[lQJ$IzGK=)S^%I=bbb4rHuU җyyy)$$ kx$u֪Z>3{ے%KAl6ykÆ HѣGu իWϩ/::Z˗/NIIQRTzuqyM41u8q^zM\[R~ BCCK.K*Wl68I:vs'11Q7(. .HR>3{ 4H]vuhۿ:vX5pm>HH.]wE1>>>Ws'88X^ť7(.ǻVFF2 i$/_9{}Pʖ-dM6)**:**Jϟמ={mܸo=>HIO\ׯڶm)S\r̙3x@GVnݜ_sƮ]TF ܹS}fvNdw-~Hj\ddVXqqnnn=zF]dpo+ۉ &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$DR`A L"HI)0 &$TdAŋtRQ\V=p͚5Zd~޽[.\$,YRիWWƍձcG5o޼j`*Hh̙zw@թSGݻwW@@ PVVRSS'Wʕ5b 0@jpۘ R._={ԩs[lI&)!!AiiiVj`*H3FzW׭[Wu5k֬*\ 5`WsK?|Ν;w+N w 駟jذam:ulKLӏ?xiFÆ 7|7xrJn9Rtz޼y ї_~%J(77W_||M˅ԥKmS峇zH*c)HEDDモ$%''k/V!K 0@/vޭtUXQڵ\$KAjUn]+$eff8p` R~_~N큁JNNzzp9䁼p73T˖-MO`ٴj*2rsseڎ9Ow$)55UgΜQ*UVt 0֬Yzj߾>#+Wh֬YzWTTK/޽{o߾'-QB_ >\7nT$KMl߾~9_~"""c+S˱ʗ/ ʕ+N}W\т T|y+S˱ti_ 8P 6uKۧ3fh֭JLL,BUX R^}U߾a*[f̘z8$o߾ٳu!IRʕU^=.~p7)SD 5lP 6,K+ {nիWpy.Z^~X}QXXݝu4R,'|/rcl6XX RW.:ذbbb(6,k޽[$U\Y=PQ\ d >\iiizwվ}{SKOH&M/R_~&M$0Թsg}7ER( KHM0AjҺuTT){{/I&WllBUXFjٳCST)K۷o2KA[gffr,-[6lԷqFjݺ)XGwQFԤIEGGZjY6mRpp~")\o"""}v 2DYYYZ`,X,KڶmËTp #SjԩEQƏ;դI,YR!!!2dw%+*_|||ԠA\kD2e?ؿg.]rEGGO/ai˖-Zd$i֭jժWw}WJHHо}᜽z… 5tP=JJJ?իWI&|M\ 3g[n]C}ڵ cܹ:s֯_HIR9s(++K3ff{ W~$iӦMO5yd˒=zF_ ti߄ TtiEGG'رc5vXuY 4&LPT(or9M:{V\ݻ;|K֣G>-\P߿[}Ն tȑ["R*_3hժU8q&N֭6oެ+Uj޼$o߾ںu9 >А!CTT)رCW\QzTTTRRRm)))Zeђ~DyCCC5{l'OJʖ-+fŠՄ 4i$}WW_}U'N$eddP[:##qol\ω'Er5\ fK5D WfOL25i$_ԅ $I^^^Nz{{%… 뿞D[Yf9H%''^˗/oU˖-u)WÆ _zw|߿ks+~Z>>>~.^h$_ϠA6ؿ:vhna\{ɴo>u] 믚9s"o$11Qkv}:RRR]w /_:44q'88X?upMԘ1cTzu޽[&MroѢ6nheB_tUIҕ+WTF (QBc._[***{w̓kX R7oV޽}Q*TǭLQ(UVUJJ>|VZS֭'}ܹsp)^.]tU}K.i֬YjРn.KHyxx8\{G) eȑ׿M_T2el2_s=g7Pƍ+==]SLGנAuUG։'tkJKK?[7R 6… ;wf͚+SJf?nݺJLLСCuԩN>>>6l>C75̙3GCܹs5dhٲej֬-_fxĨm۶z%I۶m'O_/Bo$::Z˗/&M~8oooMJ*[y0/jT(pO֭[5|+VYfjР׿Z*\ E ,NMMUN*I>|ón`)Hm۶MM43gݕ7K.1c"X Rʔ)c|r= $=ڿ X Rڳg$)##C[lG{vv,M.Ү}:tдitEmܸQ^^^ԩ۶m, R8qN|X{Vr+___+WN}ѡCFp)&M̙3zGUzu{9stR_^ժU+bX RFRRRTfM;wUV5jKKE+tiڵk5d%I5jЋ/5kX\ #K,+S˱j׮_Ξ=S)XG*>>^zջwoUZU?k:}I*,-[j9rz-(͝;W-ZT mݺRRRtqs*W+{Ο?ujƌ5h@ 4 Dt*YRSSeيpyv틍Պ+(,_]{ճ>ѣt&"##%Iwּy wU+KƎ=R9Ըq㊨ (>,#"KA^STTTkV||)X R .c=V`? X\ uaUR:tr,)__Ty{{[\ ռys͜9SGu;r>Chr,m>aEGG+22R}?wΝe&LP$Uui:uC_fz Wc)HIRZvZ:uJ$w} \"A*OPP =T3g$gf=z \ իW/l6=ST^nxf#H R$OOOp/1*W|p/@^Y޵ouAeee0 ~ͦm۶Y\ jȑVjXTu˲&OGyDK._Q.=RϟWnQ)T-cǎKAjڴiZjYT5K4`5Je˖URTti.pرcoB W =R1cڶmŋ͍g7XJ?/_V۶m Q)PvnݺK'ԠAԷo_UTINLnt N*=rK'Tj$I[n̙3 wU+Kkf+ZXƍWDe@v{` o*;;$gϞ՛oi8pEԼyAi͚5D"''G}J*iW`۷k޼yJHHЌ35j(""B2 CYYYJMMΝ;5kjۭ֭ZVfSnԭ[7hڰa~'>}ZTL=zWԡCթSwMWvmծ](kb]? IwWqqq sޤI RGU-KgϞ-))P ѣGoUj r (v {1-ZHY,IҺul2EGGB(f bcc0WEӧOWdd:v&MhҤIU+  R5a]7oӧꫯj˖-,ZX(zxx_/\@W)( <"u-_\ҩS>W\@Q _~t*V(___>@iQ +11Q!!!UkzFĉ6l! @R _~j@AjΜ9Zb ԭ[7UTI [oeg۶m*WѣG+33X9998qᡈ_.@1Vg7nS'T">E TXnv{G'O~';vgzՠA͞=[ǎӛo]xWbbƎujٲeԩ6nܨ~= hԨQrwwWڵj_Q8{Ν;+11Q]}mҤIӦM#IQu֩]v;w>̙35~xIR߾} &h۶mEsa8hBk֬qZ+WԩSU\9;wNvٳZ~{9[.{NVDhȐ!~8p&MGjժEw=#u=SN Qa(55UՓ|||4|peggK٣K.Yfv*,,LIII$.I ty @#R[֞={ [K.G5m4mڴIӧ+--M``lb{v~믿^t?~ܮx*PZp:v3fh; .#: f[Ӆ xbkʒ$9n.IYYY엿z,X[[5lP?bbb ///}Ŵ?IғO>i޻woIm}rrrζm?޵]yk1bkzj+PhDb)ZnYPPۧۮ= @t)ծ][lS @^_' vnOԲe {~zbeWhhʗ/]vgϞ>.\ݻƒE IDAT´qF={n;vض( 4A]?/_^>|}}+V?yh[[=%KrrrtREDD9PH~ZFk^{G<ҥKjݺ6mڤئM:U-ZP֭5d;vLfRvԡC"""N:Z|>=jʕbQVV\]]>'X["oŋK_Ujرvnݪ'wܡ={jڴi;eggkZbN:&MڷoK۷Oڻw5jt (=?%8+%8N=B(H9rDTzu7߿,#H usRn|n4z/)2x6mh(L@(JPn)HY,®J[ R}M}/o{rRص@pKA_~ݻwa%M!"H!gnGPb0"R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` PrWuv S:P0"R` R` R` R` Je:u,BCCm۶MTʕ5zheff:ĉ$EDDhEQ>bcǎ)..N^^^vޭ~Xϟٳ5h -YD}ٳg멧ܹsN:i֭Eq.?^rssu m&M6m$IR54x`[Nڵ$ܹS|fΜKPM0A۶m+ڋP/R3gögjӧ-DIVZekKLL bksww}v=z^b\5J RƍٳG.]Rf]]]$[[RRBBB$K|X>>>iyS @^$W[__l"55Uyyy=zj֬iڱcUfMk Uk./\ݻw+,,d={֮;l]%>H꣏>rjԨU>Hb {=effMѣrssd[[NN.]UZH@Ru]W}(6uThB[֐!Ct1͚5KڵSl"""N:Z|>wy_bďHhڴ6l =sZdDƎ{OGŋfj (NJԵlڴ푑ꫯnfΜ3greJ25" R` R` R` R` R` R` R` R` R` nE՝]OPDC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0TPt֓.pC)0DC)0DC)0DC)0DC)0T9YtqvNˆ"H!"H!"H!"HR9R5U={*99١աCy{{RJzuq~yyy1cj֬)www5iD~Q\b ( ӧOW_}h5iDϟM믿Vhh$رcjժ|}}Lڳgv)WWW1_z%}ݻ,x g]*bTqiʕvAW^jܸx XBsoUj$Ij۶-[!CHRSS5k,=쳚?$iРAjݺ^xEGGť@qQ*h.DIRݺuըQ#߿K.%IQQQ ѪUlm.^#F,cǎijw"H]jKQt5k̡oxxl 8*S&>>^z$Iiii@PNNܔ[š$=wzzkP ?gUկ_?IRVV$͡w= ,Plll@UoΝ;WE!<<<$I999dgg~2bEGG۵uUקOIҤI6mh̘1̙3ոqc 0ֿJ*;vfΜ/իea@W*ݻ%I~>SAjժڼyƍ_|Qܹf͚Jo!???-^X˖-Sݺub _bTM6tF?ar)&&F111 @iT*DC)0DCb ^zźZ=ήe#R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` >Kpp+&H!"H!"H!"H!>G NaD A A A A A A A A A A A wv)gWpb)0DC)0DC)0DC)0H@IPRe ?p>0DC)0DC)0DC)0DC)0DCĉ$EDDh. ={z)͝;W...ԩn8QygP\ܹS|fΜKPM0A۶msrkHLL bksww}v=zԉp&5$%%)$$D>>>vᒤݻw;,S!--Mm5MOO~I [_yag_pv ~={%88pKppz>gPo .&oJqſ%T999?An~- ,PllUu޽p ,U9a8!4@ιI_KGiӦ4;;۶ZFhg*99Y7j@޽V^:u8(}K=zT[ RT4IRPP5 P@@C{ @IuQF]B} >%ǭDck SrrΞ=k׾cveAz\-Y֖K*""BUVubu}h(==]uua;.L2e(v,XB}C?KC|P^^^.@!JbZ.JC)0DC)0DJ5jכ6mbѦMV@YCnѲedXdXuVVUUVbQ.]P!rE ȑ#{^z~zڎsoUj$Ij۶-[!CeTaiii={~ichԨQOյkע8Sz'uI_v%&&w4g5jH5tP:uʮj믿*USmڴѾ}wgl٢hUVMnnnZ{9eeeۿy{{+55Uݻw5~xCI~gmٲEs-ITnw)%%K.%IQQQ ѪU8zt钞x cz&P@5jP֮]3g8)IC /-[jܹ0`վ}{]xW^ɓE3gTZԮ]Kk7oڷoyo߾}sssվ}{yz7պuk͚5KK,w@~Itw*''G}<==%T~~p%%%Ɗȭ999P0({VLLxnZAAAvnݪjӦ:t蠄[Ǐ׌3Թsg}駲X,^zIqqq7gvX2DuѤI/;;;[zɓ%IÆ SӦM;hz_̙3:qℲW_^t>L[l;öOb5.O@*##C999K}?bW_M6~[l뇒)SYYYZfY뫶mĉ{W޶U~6lؠ .hԨQ%Icǎz QΝӉ'ԢE Y֫6{ذavx:tﯪU'>#k:}zꥤ$%''kرڵk$٦jA)y?Op}ݴiSEDDhZt>kjСPt ňP+WU=̙3 q%IG$խ[<~~~7_~+O>٫3gؽvww]~nVHHʗ/[SrgٱcG͛7O/6m*ISN &[~?Jv}~q_K}իyIƍ͛7Ea(0PHz~SǎUbE>yyy P||U@s+rssնm[eddhĉ_+//ϮK >۔oȑ0`{*,,L˫wIҗ?JiiiT"T$k֭JIIour 뇒 G}TC__WSvmmذA-[oW.VZlǏHў={˗-.q媂K͛7ްa<<<ԲeKIM Ν;Vd97Tn]ی~Aiii߿QB3R@!… 5eʔk~DϞ=.]ӧOK<B 7oVϜ9snXGӕYV͝;z~۶mOך5ktQ[_|dEGG;T7Zi„ txV%#R@!ׯunZCմiӴ{nkN*TPJJ4w\YNӦMS.]ԩS'%%%iڵ뮻{v?~RSS?g';rznݺrڷo-Z&M89i$%$$M63f2335sL5nX p3z̘1VXX.^+WjΝZ|j(9R@[h^-^X&MRUF n 믿.www-ZH7nTDD֭[Ν;_*TЧ~ѣGkڴirwwף>#G/<@͟?_ ѣK/-,IUV͛5n8ruuUΝ5k,{ќ9sr)<<\_|r(Y,+ngA A A A A Pyg(:VUΝٳg#PFX,G^^^X,. bP&XV+##CTB+<]xQTR%hH@q9edd0 IDATdSruuuvI(c.\4eddK. n2ٳDӸ*00P>@IE2"''G*T D\]]UB8(VgP,X,:PTPXd@i@C)0D ˖-bÇ] sM)ϭLa@"H!Z`5j$777gӧرC:uԤI͝;׶/.][z%I{C=$///U^]+Wt8ӧ5vXUZUnnnSO<[Çb7oZjSڵѣGeZ׿UUTyedd܆wp#<#(LXEEEi:p.\oF_}*T IZ~t@3F+Wf3v\uQZҌ3#GK/z)=cZh͛f͚ϫuJMMСCUZ5m۶M111JKKӜ9sjׅ 4j(eddhƌٳz!mڴI'N5o<?^?H"HJǏkڴij׮֮]r.O_F+VhСCݻwbŊcX _dggO>$[AAAzgW^m۪~Z|L"I={~'%%%nݺC*((H3g?UΕJM,ڵK˗]g||.\(77N}RgÆ pƎk Q4x`>$%%%رcB$Y,4}ŊU^=yyygϞzbŊ:t萭-!!AǏ;oڵz;C;w־}^ZrwwWhh>k@)>@ҫW/5h@o>3T/^zHӧOW||ƏOZR^^u릭[jȐ!jР٣oJNNիαe}'zg%IӦMS.]4a-X@#FЩS4c =3cwҥ5fU\Yך5k4f[Ci\ۧ%Kh߾}eX$Iui 2DWjjuy;j(W_Ç5g9R׿l}{=O۷uy-\PJJJ-$n:=jذM'OjRm3 (UõxbIҐ!CTF =6m&N(Iz'jժV\ 6h͊+44TÆ ӶmԢE [?ڂ_]ɺ;$I6m>5j(77WCU``vޭ+ڎiZmߏ1B?uz'uV=oڱc5kfkON[[o̙3UffFAiɒ%z)..>qD}ںu|}}%I[VvTz}ReРA]\\ԬY3YV 8^bEիWO$%$$A_N8az衇$I7n;?lwDD$+ϓYcǎ QlG<<i(""iׯ-DIR۶mհaC:bD PTVu]w9?sLOO9$jժWm?u$駟~tyz222>gΜ$?~\gϞᱮU]m)))d #IUn]>ճ=(RRڤMSƍ5{s@ntճgOm۶M/>>ڻwop$ٞs=g)@׳gOö,;wPӴiSլYSsӧ ykΜ9v˕+ݻO?ծ]e: ־}{(..N/^t؞Tz``´|r4C3V?9$cD P=Zj 7e˖Տ?UV?ʕ+ k׮ ӀQ.jJ3fŋuv8^\\֭[֭[ۖmOKKSBBn갠h…zմiS=/>L-[%]^sΊ3< ͛7O5Rfff'( RIױ]Ӕ+WNW7裏Zji̘1 )so^7nTllf͚<ծ][YrF~[VUڵڵkdw`رC'OV||Ξ=`uQƵ[AAAz74sL(88X< `סC%$$_VLLj׮K?֦Mnb5HKpתUɕ"ҀgA A A A A A Xl,>lk{:&Pyg(IϜ]MYnK8#RRܹs帮ruu- )@0eY,ݻ)IգGUTIj֬>c۷O=<<lRzUV{裏J~7iF.][d<<##CԳgO:qN8'O}JIIQjj$~[$=S7P1P\LSBǏWڿN:>g tYF?P ,+PVZ$I*TPTTuV^])))syyyW^ի.\{LSNULL͋kLZzxb9l?~N:믿Ν;<'O{ꪆ jŋ@qňT{ƍkU~wm߾]ǎ_I҄ {C3fmի￿9ڵkʕ+e˖~͟?_;wwQ (b)@ְaCڵKZlN<s=zWlqF5JoN 6LAAA8pu1tPkT*U4zh˷NbZVg:$ EH!wAQ_`j!.ƑK*fEL9-h:hMʖ],+kiBVqT^F r-bFT˸ s/j| {s>sfg|"HR!x "HR!x "HR!Ȑ`pbE׸pn-] )-]at і.B9G2W hs|||ZuZ]v)22R2ZfM6pႬVBCC뫮]jĈڶm[c:tH&IqqqvmUll:v(ѨH}Ǯ;wիW/(((H3gTmmm1~gd2Os=רݙ3gdXN:)55U555}ᇊt颇~Xk h;RVH 2LȐfӂ Խ{feddhJKKSTTΝ;oϾ};C7o$iݺu4i5w\ӄ $I٪ԩSյkWݻWVolV5ydѣںu-ZVOJJŋZvuLWEWJJtiZJ111:x@[DϗΝ;իW/I҃>6?ט1c[o]8w֘1c 6 ={V3fPTT8Nϙ%I'OVHH͛'Oj>}N8&IK,iTӐ!C;WVVwq'NhZp͛j7n8 2Dv@hv󕜜:nV%&&67 @VIIߎ}v%&&jԨQq[om6UUUgu Q~q:***4|p9N>^y_7xdXp8ڵk)CAM/~0BS횚wȑߥK*55UՊQFF[2 裏tkU\\kl$O ir"/z5qDKطo_ױ,ө>}(,,2Zv)11Q6mɓ']ۋlJ Q}}}7prrr{W{uKHHPǎxbչk3p7O8N\ҭdRLL}]7nڵk'ڨlt հZStteٴj*7mݦ8EDDK.ڿ֯_iӦ5O}F$}0`FHM0A;w֏?߿ffϞ2FmذbFCjӧJKKСC]٬ jܹ*--Urr:vǏkƍ|nF %''`0 bإ2 Z|5ߑwKp?^Ro߾. J.Ae^qߣGĉz&Izᇕy hN:%I pm7k=ŢXIe0wΜ9zJAAAQHH233p8l޽{O駟bo߾U=4i$UVVSO=`[nׁٳGw}:u:(66VwnT׮])___fYope77e꥗^Ҍ3ݻ76.USSXiʔ)ի;͝;WZb[>@UUUz'UWW+Wjȑ***R%I۶mӱcǔ=zz뭷tapOh6mn6UVVj׮]*..СC%I_ KF;w***JTTTL&eddfiזt:[?رc}6z_#7讻Rvvz!I=j_uӺu$I .Ԓ%KtA͝;W˖-RG~~~*))Q``$i޽6lfΜW^yET[[+???>=#ڱc%5 Gի<'ө~o߾uZ+$$Dx@yyy:rz%I*..e_uwE0Цegg+::Z;wVEE5zhvرí}rr+DIRTT /µUWW y睒6m/ @{dmRII&LJWmϟרQc9v+99$[oUbb] @hJJJTXX(X4U0}駮V>FϞ=yҥz3F'Ntݩ)))$=cgϪ^M֯_?6R6p(>>^O?t<>fJJ;͙3GnX(::Z7nTAA-[L())vٲenMnݺ[nqalU]]ݨӧOLLLԦMtI& p#h̙-[hرX,UTT׫T7t}HHFS^+VP׮]]SFbbbtR]pA*((ƭRϞ=CiРAח_~}_$yyyiڵJJJRxxRSS2m߾]FQ[n$YV)::ZlZjUXX/]Mh;RIWY꥗^Rvv>FjSNn'N(///XBNRTTV^o?kt*!!A[MOOWAArrrp8_]SNu_|QWVuuzaÆiʔ)v~5k,͟?_={jUyy9A ||w+,h X#"HR!x "H 0 lt:ee0Z*)h#eU^^. l*//nKW/6fL555$oooyyyqg8) :tP``[2r)hCNt9]pi~ o^FQ;v$CC)A E<IENDB`python-diskcache-5.4.0/docs/_static/djangocache-get.png000066400000000000000000000724151416346170000230570ustar00rootroot00000000000000PNG  IHDRGe@sBIT|d pHYsttfx9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATx{uifqЄHɡqIJJ͏(E&-CEZaKɡ()d CVNc|~6}9v~_u}h^}pI$I+H$IR~`8$I$ G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I[P(D=TR*Udi;v,PcIM~ Gt2C!CǟyXINN>_(3x~tl QF RSS\wwHRS( $ <8KۺuxW\ UTUV UZh_ mx1bo&~)ժU?0K;d8$?,mɼTR%r .9眓 .,czz:W^y%~!3fL%IR?~<'..rq 7iӦ.%={6W]ue˖%&&jժqsȘ%rׯgYi3?GzGB!֮]/̹Kll,UTaСa&OLF(Vʕ;d޽/G$$$PHʗ/O׮]/7xP(=sST)*T8p/קTR-Z*Uо}{>cp f۳gO>$+Vŋs0qĨd2d .M6.]P(ĺu"RSSׯguqqq.]FG355;33<ʔ)CvXhQeL2FQhQJ.M.]ظqcdlߗ9sd;yW ~}hz%Hs$I駟f*Uݻυ^xĥgIII 2ҥKsWS\9-[3<̙3?>%JJ* <^x;r_/>i۶-Z桇.]|:pE? d9ϬYر#mۖի?GQ~ҡC0aÇP>}:;w=z0qDj׮͍7H\\6mb޼y̚5.,wI˖-III~|:tٳgӵkW>s?Fk<4k֌om۶QH/^W\AZZ͛7cǎٳ+W2dyyЪU+Ҹ+ر#۶mcڴi4k֌SrUWE?b~mڵkG-X`&Mbҥ,YJ,;v,ׯϲxϝh%,I6_uPBeˆHC]t _O0 ;v7f̘0W\9\rL}Qhc֮]ݻw޽{0\r8555Ҿcǎp2eE -[6rH߾}5k )R$wE%K )S&ggbŊիwLԫW01cFTUW]˖- Ν;áP(ܠAo۶,Yvڱgnvj֬鋉Z*ksN8svٓV\tW_}5K-[={6գN:Onp Wf2ed !C۷/{.׿([,=۷ogܸq4lؐx 񱱱c ֭[GvڵkT駟?۷/-Z2bŊ<lٲ&/eٳ'l0W~euRRRh֬YT_ʕIHH pL<ɓ'GwnۏaÆQm+VAQ}A[ϟ?KvcիWjժKM6F̘1;vPT)222<;UD ڶmˌ3HLLk墋.q-ZΜ9s")Rn6  Ӌ-"###˶?_jԨaO+z˥jμ;v'"HK#IF?˗?l9x IIIG=ݻOpt29Z_f#޽j޽;=o~;ӝ… GY4iO=&L<KNxgg}8>2uѢEGLpzivl?k. չODz%\V'I٨D|w?\{||g۶mR oaߓڷoOj̙3{gϞcp2~Γ[J}#IFժUcРAwy\wuqylٲ,\z 6rYgqUWQjUv3g͚5c֬YYYh[ypyѶmܾ\QLL5\C&MKUP 60|oξ}\s %J^ ==}Rp,c6nٽnݺ$$$k.ylB~8SryꫯxGyi֬˗gӦMZE1qDVzL+R'OUVtڕQFѤIǪU#/-\0?+hӦ M6%11Ea-Z7|͛OhSL^z)'Ocǎ\uUQren:On+p$Il~#mFҥ96l]tɖk%J0gF̈́ x뭷طo˗笳/?s6lؐ%K0l0}]>N9Wc=elݺuYt)=cƌ@TPzꑔl[Y~=oO?4Er^Ip^-IQv|$&&F$IIR6ۺukme]FBBiiif$&&'eJ_#If7Yfl\jUz! `0$)Α$I$3G$I$I$ 0I$I ܹ9s搐@LLL^#I$߿ 6ТE J,y3g:t2$I$̴ih߾qg8 !!˯^zW#I$Y:D~O?^2U^Zjq5$I~ $I$ Ñ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$I(H$I:>+%u Q*mܐ%#I$Ip$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$Ip'xP(Dڵ>5kFѢE9ׯw~ @ŊqƼQ$I| G :bŊE-YK/={sq뭷2zh:w5Goڵk'EI$I7N;G2e /BT߮]x#~ =ŋ7ߌM2 ҫWH[ll,r gÆ 9{!$I|222۷/z+uԉ_|9aÆYڋ)Bbb")))jԨ%D4jiy$Iߧ|nȑ_>7oB Q}*T`ܹYiMX֭[Y/@$II!_۷裏#pꩧv޽{ꋍg=ҸpFARRq/I$䑯?Lҥ۷?mKg=Ҹpݚ5kС/B$II!߆ѣG /dYo>Yn%J,\^s7obŊ*T`ƍdKʕ\r'|=$I|!ƍ9tjժ zjVc=Fڵ)T/rXd DV^ͮ]]`A_$IS GkfԩQ?j3`ԩr-se1n8Fٽ{wp:u"##ѣGGϘ1chܸ1 z$I|lٲ}']G?{'hڴ)-ZW^ҪU+Zn׸qc:w^:*֭W^$Io;Gǣ~|q=0zhnL5^__~;мy<\$IR~ .dSvmVXAZI$NlqC^ĝ#I$I p$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$IP^ I$ej7M^Ks$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$I|>s:w̙gIѢE)[,͛7gƌQcWZE֭)^8Kn`֭Q:O?MժUnݺL817.G$IR>W( 8ݻSbE[oEv5jz 55͛СCٽ{7< ˗/g…)R$r·zaÆѳgO?|ON׮] Bt%.U$IR> .XeddРA_|@>};v,_|gq|_~yqFVJ^xôhтkײn: ,x\|Ԯ]+VPVlRIߧvu Qϼ.!J y]BJ7u ?Ϸ`$$$sH[oW_ F]v5j7ߌM>ti B~2ܹI$IRG?#۶m믿wK/~Ӱaè5jDJJJsJJ Ŋf͚Q2%I$~g2ߟQFP@:vYyf*Tu\ HKKcİyfʗ/O(iӦGmf͚(I$INGw}7:ubӦMdddp @LLLq1111<ڸ1bIIIE$IR997HVh۶- , ..Go>Șcw$}sY֬YC$I$G>RNݻ7W,\^s7otґE*T࣏>"gYZylŊ:or(W\v]$I|&oK~*Uĩŋ-\DêU[`A_$IW G}T[zz:qqq{\{;l}?CV^e\)\0#FaFIJhڴi^$I..ݻ7vyTT-[0~x }Y/A;ݰa(UFbرuY7]I$Igm8ҥ ]t9jbٿ:@ 8-O$IoL}H$IrH$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$ Ѿ}ؿvN$IrU=099ӧrJ @ѢEY&M6C\|U$I$ G5{ IDATuQtiׯ_ORرk2n8^z%*WLݻ7 Ω$I@+U^н{w?P~Ϙ}:SNDt֍CRbl)T$IrRVPW_}p8֭[8S B$I8e BPxq$IN /^L֭)Z(eʔaΜ9l۶ۓt I$Iq'|Bfꫯ9tPlٲ5*p$IAQfMV\СC/,Xd I$IѢE馛9sF*Ub˖-A$I\(.\8R_ڸq#ŋ2$I$@I&L2}?#cƌEA$I\(%%%xbڴiû ҥKF غu+3G$I~!CdS$I=s$I$I?LbbիGRRR)$I$)W GSL+ /3f]5I$IR ў={֭H$II/P8KX|yv"I$Iy&P8ӟć~3}guVҢ~$I$) t`ɒ%5222L#I$I9.nuP(j$I<( 2$ʐ$I6s$Iq'|ݻw$v'<$I$)W80a Ӈn|@^838qbb%I$)3G˖-c„ <39j׮MժU)Up;vvZVXAzz:u_[n9u $IqP(Dn֭)))L6駟}vʔ)9Àh߾=ϑ%I$);nuգ^zY$I$w$I$ G$I$I$ 0I$I`8$I$p$I$I@672qD^|ERSS --:E;UŊ3Wj*ZnM)]47p[nw!~iVJll,ueĉ'T$Iߖ~@8s?S|tܙue^~eׯϧ~JڵHMMy3tPv3,wq/2z+-Zs,X I$I'@׿r72tP֭{epǢiӦYYgEZXjUꫯ#.5joFڦONzz:}􉴅B!nvRSS? )I$!Н 6дi#+V]v"p8w}GZ}4l0jlF9sfsJJ Ŋf͚Q25kvĹ֬Ys"I$) ʕ+dž nalܸ{ ͛7PB*T --͛)_Sx6nHll,saΝ ڶmիyw8sWTSO=ŋGp,;%&&gϞ,;,X /I$+p8)\wu̟?ɓ'svܵ^;3ec?իWgyީ}.\#FD0#GRJGuO$Io_g֯_ϫ7|Î;B!O~߿?o6m۶%---L_= b\r%u]޽ÇSNnO?ロÇϴiӘ;w.Ǐ$I\p4qDw)Y$Qc~uZd 3f`ƌQ(!!9spRHڴió>;ݰa(UFbرuY7]P$I~;r90ejԨ]5|ckժٳu\8p  P$Iߢ@m۶n-ۃ$I޽X3c0ڙJ s9J*F SCWL+)֦6mR*VT+K$9ý3 1su}?m.=uδ GueէI$I G#G7>UH$IRX=G5j'[n!&&%Jd{[ o="%I$t;p4fzATT˗?$I$)78p4d Lb0$I=G[nn3I$IN*5jԈ Z$I$)lN*;Yf1|p6nxj$I3\+0`%w$Irz Í7H 8UH$IR؜T8?~)*C$I뤖I$If^unv@q_$I$A r-ϟ G$Ir G+V Y^K$IRnw\t_@$$$PtU$I$Q@&MGZ$I$)lQ`t!I0lpE@2-I$I`8 I$I  G;v$"">={$ItƝPriڴ)*U:յH$IR؜P8;СéE$I2H$I#I$I G$I=GI$I +g$I$ Ñ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$I w$J+^2%dSL:f͚w,}yOf=z4JC曧t$I$`9iuIIIShQϟOڵoȐ!رߔ*U :uЬY3ƏO.]HKKcĈ<tܙFѷo_ڵkGDDę9sel/w$IʩrQdd$E=jI&ѦMP0hڴ)*U⭷ {۷/{Yv-_|ũ=I$IF9:iii[K/4۶:u#zPjln_aYn]{/_~K$I!r}8JOO,$6mĞ={$==?@ g̘1%I$$>ڵ 8﷢B}"##Ch׮]˗Ӷm]$IRΑQtt4{ɶmYDGGSILL$11$I32K.;Tzz:E %%%/ (ViV$IRNQIHH`ٶ͛75k^׬Y;wx,Ν.I$ܔ7Ȕ)SXfMO>aҥYȗ/cƌ A^|E/W\qF$Is{ylz|ڵkѣ &55&MЫW/oSO=E5ԩSX%JwC=Diݺ5#Ft'|xƍX"o:t8%I$)ghʕԯZjL>À0`IV&I$lrVs$I$I'p$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I\* IDAT$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$Iy]$Igq׽i$)pH$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ .@:pMp IgApWp)pH$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$Iwa $qH$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$"0> w$I1sH$I0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 8ў={߿?Ŋ#::uG,I$Iav΅dyn6FEDDZbΜ9.M$IR wgҼyx7yꩧӧwqիW_~|aP$IRS3Go6t%w_|5kX$Ip:т TqqqYԩ7|$I$Բtl[giX|) w w p w ,ٷ/%deѢp^輮si4x]p]|Ϟ='9vEddd3fRRRm۶Np}p;T u}4^:>c5kPV GчMwm=wڵҶm6.]J5#Y|9m۶eTP!H:ug֬YCFNhs*%%%===bŊ$&&fkO]:'UPjժ I׵t=Nds 5kdҥl۶-KܹsC%I$ΩptM7K/j۳g*u֥dɒaN$IR8S֭Kv0`֭B k\W^y%I$I A wg5\î]x7xwIHH_檫 wi:иqcbbb]SZ:x];`0"$I$)Ω{$I$$I$ Ñ$I$#I$I GRPLC?S~ij$I:C?@ @ `Μ9ٶAJ,I M6aPҙr@(*UD|rn&)Pg̙=ŋiٲ%)RoןSy~Z*/^|;vS)vNXXEEE1qDׯ}֬Y]:~Æ ٵk?H:e˲{n̙رc?ƍˉo߾ꫯҼys>6l:ڵkiذ! bȐ!l߾~ 2o|87tz~`,Zӧ u GajՊT{9e2qD.6lpZϓ'QQQu IꫯK/sΜwy<3{̙3-[SreT]veYAL%(PM4aѢEَw{fϞMv(U,Yx]ve799Xh۶-$$$ЧO222NtWb fϞ_ @\{|,[,>i$ڴi FM6RJ[g$es"_|[7| USp$F2ej6m[n @׮]۷/cԨQtԉ &ТE c裏rESOQ\97o~LkSSSٹs'{/GE=;#[ߌ ZhyO?MF1b/ #駟8سg(P 洴4֭[Iԩ NcŒD={d~]\V':0`vEtt4&LQF+V,K9s/3a„,JM4e˖ҡC֯_iݺ5|@Gy!CaÆeK.TP~իWgiݻyG֭jW^{=E:lݺ 6{n>3qiӦ SNe,X0$%%e;~RR6mbϞ=~FIg}F&MBfϞrgѾ}{vŔ)S_2eaԥRP!5kƆ B\r |ݻ=z@޽Cю;ذaW\q`?u֭[ 4>$OӦMIHHdɒr-Rxq^l7̂ Xt){fe<\9xoJ:}Nu]V-֭˰axWYr%ӦMk׮˗k:rH 4mڔ'sN222馛[l[n%11YnVbŊƉ?j=Wv/֭["!!!K[|||$ /@Jț7/>+W&O?[=z4=jB <׏X?8 PwGw*k8p/7]w@DD> fbɒ%gt GtЁ{_~… g듙Ibb"&L81~RNDFF͚5cӦMߟ*UCZZdfffqcJ:N:WݻөS';O͚5yWO-;P)R%ut*kŋ3g-[/BŊ)Z(ŊOH:믿]_}ʗ/LzҥKfʕ+j_~Qgt.\ҥKyײ<ЧI .?hի*!!!,Pͣf͚gVIh*VZ?Nrr*UIGرc4hۓlϖ-[ѣ >#GKҮ]p*u[׏ dW3GQyGިQ#vСCoh޼9cٲe2j(n:t(mڴUV,XiӦ?cTRӧO҈cҤIC$٪Uh߾=^{-EeѢE\xٞBJ&Mի۷o穧Ft)Lg 鷎իwf͚۷'2o<^{,OUa8N_|K.q7o^ʔ)CǎL<(^|EfΜIݺu1c[>|@Ϟ=:t(QQQ\tޝ.tGRR?<6mxٓGy$#J,ɬYxy衇ȟ??[fĈo$ s]_|Ō9 &'Oԩ'|]CH$I9{$I$ Ñ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I o $yfoN0 w9:bcc'o^/*IZ0dժUڵA~222(PJ"$I:a*Iܯʮ](TIII~s3* ֭[_ wIt¼Hrm۶h0HLL(IHr}7o^)l~۷/ܥHI1IR. ɓ^ya r=7n~ J:$I$ Ñ$I$#IR7~x+W w)6٠r oԗ$|I$I#I$I G\h̘1TVH+ϖ-[;w.Z">>.BFڞLll,WM6Rxq^x.\ȕW^ILL Kfĉزe {dɒDFFRB Ffffʕ+ <Ӽ +W мys֬YC0O%J ::뮻M6wNt$s$IU DJJ M6{eɒ%;>|GѦMիEeL2^zW_MÆ >|8&L{#pmq 7/rwpSlYvIFHKKk׮*U?ȑ#>aK=شiÇ}\y|ߟ˗3zh_3J Gc :͛3m49J*tޝ7xN:A׮]IJJop¡cY{n:vȀСŊ㮻o7|3͚5J*k 4gy~ PbEvJbxꩧHɒ%Ccl2 *fCe׮]̟?ys„ ;NJeu\?f޽;:u* ,`Ŋ;K0?sΡ .Lʕ}ʕ+Spa~P[jj* 4 >> 6>6mJFF׿Ӯ]P0[.;v {%--IqHkZ 8T?~ʕ+O?Pz3**,m DقTBؼyseкu벼.UTYfm?t,Ig8$"""ey4k֌~oJNXp$I5J. %K(W\}޽XMP|yPP|yo~Zǐ$9s$I56mJy̪+lݺ֭[PV-ʖ-ȑ#=Tƴoߞ/ӧg۶eƒ$~Ir @JJ -[keɒ%3ڵkӱcGرckY&:u"))E6̜}ӦMKرc .fʕ8%cIN?Ñ$} 4yx)B.]2dHwhт3gˆ#̤|s= 0k, Bjj*:qqqTT,O$|w{JRv҇ރ#i~J:xϑ$I$a8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH\iϟJNNL2}I 7Ñ$I$y]$lp)-]$IG̑$I$a8$p "tR:vHBHHHG% f:(Z(#FȲ={8p *T 22%Kү_?ٓ_ {r_… 7n*T **ƍrlΝ;VZOLL ^x!F mHNN\rDEEQhQ.6nܘXiii}+VHʖ-˽޽{߃>HBB111\_>MF `nݚEe7ydWNTTիWwI:˸N+|TZ'|S2x`)¸q+6l&LO>Ԯ] ɵ^˜9sҥ UVe…<,]ɓ'gcټ :6mЯ_?ƌ}͛>|8wu?C~Giӆ$zEѢEYx1SLW^>?3:uhѢ,Z^zE_C:uزe ]tJ*osNG3p@V\ȑ#޽;C}׿rwҢE Ν;;v,g-̘1o .CqF:uD%Nߩ$4#IRPNƍ@.](S :t([oXb/aÆL8?YfQ~бWNnϹ+BK, xvYt)  ##CrJʔ)CFF]v%))o…  Cw}r^]vz+s̡A 0_~sr饗>Ypy1cƌPc֭*T۷ӳgO:wK/;r 2$޿?|̙CBhԨ͛7tw%I$IBΝCGDDp饗 C rRjUT† BW^y%3g2UW]uo C`VXA޽# ZC޽ 6pe_ɓkC4h@FFVVmٲ[o5{AݺuCAzz:| wyg(4k֌ . [t6rH+*U*B lYl/&!!\nQ(Ya7o O?:M6›om[~zmvc^Yj[l@(V\\@(LUX1[ʕ+›$ G\!""K233Q|8x̘1+Vd;ސ!C1c5 =<==T̙Gرc۩Ur ^SR^=y[nM뮻شiGZjl߾'I Gt2EK'NG A7oδi(VXc/^s裏2amFŹ)Pq֡C+ƓO>SO=Ş={(^8 4SN~-[$55c/_W_}{O?I-㝟$(']\0Ws_s$I$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$I$ 0I$I`8$ƏO `ʕƍӸq$I /Ñ$^FIDATI$y]$vrpmc2cƌp I #g$IΎ;NqOO˱%I9H 4@ ?@~tM)R(.RlXhW^y%є(Q9=z4ժU@s饗2qrrY$)Wh׮+VdȐ!A-ZDz(^8=111[mۖI&q/ФIK/}1?ӳgOn&zݻ;w.:t8ݧ,I: G\ᢋ.2cӴiSJ*W_}Edd$wקp4l0֯_ܹsSwy'+V( Y>uXjag*W|1Oll,uԡbŊ|g$I9$I¡|B>}hѢaWPǬZ*K,aʔ)|L41ccrǗ$,#IRS\9GӦMطt,[,[%Kinfnf 7O<:%I9$INbb"7fܸqg۾~Z/d޼yYO0lܸ1s ٷoI$)'rH+ ԯ_5jp=P\9_vZ[_WZlI^B.]4}h޼9E^z,^֭[S`3q3p$Iʕ.ϟOJJ Ǐgƍ$&&rc%%%1sLzO>yGn(Vw}ڵ+&Lga(Q={$Iap!I:q?3ppP{$I$ Ñ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I G$I$IԠAeʔ!99Lll)< 6I: wguo1%HtT#IYaɒ%I҉3I .A#6IR7gj׮MTT˗gܸq}BŊΣ~|GGo!!!ƍ}PihԨ $..ڵk3qٳgӮ];J*Edd$%K`׮]i߾= DGGSreyllBrr2 PBtԉ;wfop%M"E[XfS3Gm…4oޜ 8p 4hCsԩSm۶1|k5kv}+Zh^{Gtt4Ǐ箻Zj 0… `>C:t@jj*;w{c޼y=kגᆪA˗.]PL~'>x,oߞe2tPk^~e6lXO<>Jܹ3ׯg4lؐ Ppz%\d8$h=`ٳgST)nFjԨqNJVx饗i>VZѠA&MZuVzI:uO  C6,tB xYzu=z CmO>d.b^y7+ѪU8p 3fLvIґNcedd0}tڶm%HTZ-Zq… h"-[vqfΜI-ꪫxwܿG믿Ce F@Gv +,X+Xu- qFm;Cff&۷gÆ ERbEfΜys$Hc_]vQbl*W|}qlBJQ}oݴnݚ/zgO?Pz#zj)R$$$ШQ#?|L:*>>͛7l2 +V$!!!ŋYn1#I:euRÆ 駟x1c/2>,/";wUV{|ᇴiȠYflڴSJbbbHKK#99:ö\ΗI `ڴi$d#IRuin[dɒ_H:uDNؾ}; 6dРAYQ `„ \wuk׎iӦѸqSBÎpB.]kwjʕ+:֩P|y e˖RJ䘒t.sY$)NJEL<իW/^ӏƍB ٳ'[;Pvmּ͛ys ,СCٽ{w9 `QFe韐@Æ _|~ﱺ %%%`0{ I:2g$I9ZJJ ~! 4c=jժ.7n%\B"E?>o6ݻw?lhL•W^W_ͬY^:qqq$ӟȗ/UT _||ٓC_Oݹ袋qE_裏2vXvMҥi߾ ?=*Ug%%%%KҼys:$Ǘ$v~):t6#Ioc81"D@b EDڔUkYmU.ĒpjKg2 ٪AK{")294$;;ޯs9:;:~w$ Ñ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I GK\{{;7|3]v@J{zzl޼"uذa9$M$I#Ofٲe$&&eioo$IH&̬n}w|<}]$I GKX__ӦM7eL%Is$I$UWWSVVe6Ǐgdee ϧh4:f-[ꪫHJJof%11 ~G zjrrrBL> _kkkcɒ%L:dkϞ=̛7DxK&I$ԐƍyG7oc_)++ٻw/k׮ѣ444ߺu+bʕdddp^y8_|l> ۷o뮣={ܹs裏 yי?>}tvvh"ذa###_>ޓ$ bx7!I:@nn9&O>n&.] R[8WNN3gGc O?ŋٹs'Go$IΑ#IҤfyN)**ɓtvv}vzzz+r-Za+奦r뭷i&N>Mff&--->|x~5kK.o$%%>v}Y rJ233?&55w}p8ݻ)--^x ^MI0I$wО蒓Oٸq#MMMlݺT L:uT 444Gqq1̘1#^o#/Xhvb̙[[[KKK ;v K/ĪUu۷'|FȠx 7@ss3?8֭c֬Ya=j8H&8_F߇&I$I#I$I G$I$I$ 0I$I`8IG5|J G4A"N5nbH`Џ&6I D"bDBݎ$]V&h4ʑ#G"!!x3#FH$=4$iF FǻAB#IH$IpΑ$I$#I$I G$I$I$ 0I$I`8$I$p$I$IH$IÑ$I$#I$I࿾zw]PIENDB`python-diskcache-5.4.0/docs/_static/djangocache-set.png000066400000000000000000001011241416346170000230610ustar00rootroot00000000000000PNG  IHDRR YsBIT|d pHYsttfx9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATxy|OW7HT$4!)bhZKk-XZiҪRPeJQJiLB&D|xmb9n7G|9ύ̣y=^n prmR` R` R` R` R` RnܸqlڼysN͛e4n8 fLQ))N8K^dd %3ڵ]K~u5N!sssS…UN׵kr*C pUys@ѵkW5lЩmʕڻwZj/ҥK?RsApº~bccW_iǎڸq/_幌PĨhѢ9] GA pײ:p ݻW[ՃE ]vqKʕSgyF}х j[ v\ܵkS_uUrwwWbԡCϙؾyq͜9S+WV3]a?󓇇ԪU+mذ!Ӝׯ /ECe˖Ո#il``Kʕ+{Ov1vܸq $͟?g822RҝfŤ^H@YnUP!lR%KTBBbbb4{l;16))I7VttVݻ+==]ׯWtAM8QԺukI7~mР/7_pIII[{1UBB 5kL;vP޽-Z(--M/K/ժUiݻk޼y*Uڵk… ԛo7oU޼O_.]~z-X@SN?|Iu.>>^5jŋ /]vrbccpB_E=7Ƕn:mViiir)..N˗/_M6jժ4hnݪp ʓ'oر?~Ժuk̙3ھ}-Z&M8FDDhܸqU-}iʔ)ZvvءB 9f͚̙3j޼ͫ+WjԨQrgaÆJJJ:~%e>z^-; Gm.ɾgϞL}ϟwܥK${Ԟbo֬f٣6mK;ԞUmͻ嘱c%7m..޻wo ,K[haOIIq}vI֭[;5o<${6m/_r?瓒b+V̞oϓ'jժ3frLJ2e챱Nϟ.v=!!^pa{"Et~{UTqjw*Q_~]=((ԩSwvIڵke; <8k޼ٳvoooիX${.]ngA?zs/rX3|?х h"U^]8OOOǶ?z?(P@'O'Cʛ75}tyzz:{9jϞ=NL>]yէ~{oH"XٳZ~SߢEtuu%qY;,X05n8ԩjԨiF=$iJJJRDD~i+U={*::Z4믿6w3gJN%Kf/U3f̐$}*\Ӹ]*$$3f8}OԪU+YnIV[ak䐎;jY^z%5jHuuU~']~IbbbDٮ|zǜɣbŊҥKz'2SdIܹڻw-?0u<<<{ԵkW}ǚ?W|ԡCG[˖-5fOׯWfTn]=>ӧKK+WVNԧOǘ;vHݛÇ%c rQ6Maaawqǎʗ/.]jW^um*W\;}/^䐶mj͚5:u>S͝;WTZ5;jڴ$9VO?駟~|;y޶*%&&nY>T:uT|y_Rbb|||GP֭)SFvҸqn:#4|p 8h;˖3v\V?ŋrlRR||| څ tڵ;~eȸow\^kp;lNڸq EmYabȐ!ڴiSNJUR#MOΝ;+55UK,j[_Ŋd]pAQQQzwAO>Ɇ3tq{fU뭮.\XJII}||.S&^A \@ոqcM6McƌիW7H8$///JHHȖ9;w,777͟?E:myZj9r+W̖znBnkݺuw5611Q̶(;D=)! gϞt! ҍ;v쨨(M0!_,;XI(% :TW^Uݳ|/Pbb|jܸ~GM>]ϟW/_>qw?;fnݺpŠn)IJOO7~g؀$IÆ ӧ36dIRϞ=u̙Lc/]h?f˖Q/GH@8pN>u*00Pڽ{;)SF/cYt[ZpիbŊ̙3O?ŋ;VB,YR_|˧2efW^yh/u]wٳUlY5kLKVBBbcc߫[n3g]٥KmذAcƌq| jܹWʖ-+;vLWm瘡H"ZlڴiZjWppl6N:;v… r]?I4qDUXgj۶mUe?}]=ZO>^x)99Y'OԖ-[T^u+^^^Ynݪ;|ʓ'Zlʕ+ !H@3fVX(mذAnnn*]ƌ1PBڲe>#}ꫯt+VLO>>)ۢVXQFiҥ'ݮz=AJy3g6lؠ$t1b:ud4_۶mկ_?]xQ*U_۷kJIIQɒ%/kذaTRvBk֭rwwW%Ըqckx &vښ1c֬YK.OիWWΝƎ9RuՌ3m6ZJ*Yzd{pB 2D֭ŋeUT) hMp"H!"H!"H!ⅼHRRl٢yxxt9#-55UNR Tpa RȖ-[Ժu.MV\VZGz@$*W\W<ڎ=֭[;~O7Ez@2+WN9\ I| C)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DC)0DCysUs[-2齪SNWA A A A A A A A A A A A A A A A A Kd;VaaafSddmIKKO?-ͦ)SdOOO￯ yzzrZxqs(,,L^^^+gǩ\:Ho?~bbb13g/r7xC#GTӦM5sL.]Z:t_|4...NѣG5i$ >\_6mWZ:/[ޜ.vŋ+**J5jԸsi9rzLOԩSկ_?͚5KꫯA1bڷoCl,DGG`XS{hh_qܹs^z9BCC<\zkݲ0`^z%ծ]['Nr\||+)hKΜ9ws&$$(55UYsܹL8z9p]ElٲێKII2xzz:oNcofϞ?JR/^ѣ5bvlʕ+Y۷ڷovQnr}2e^^zɱ/..N'NDrww6m$/c+_%$W$O~~~r~\O/D+((HAAAz$xyPP:$I ˗4Ν;TdI=㊊ʴޮ][G 4"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!R;v+ͦH1銌T˖- RJ8q\弟|*V(OOO=䓚9sfN>/*\ *VZ}rRƏ=Y|uϫO>cǪyNΝW_}U9sj׮sFi˖-3f""" … 9]+>>^ŋWTTjԨi~թSֳgOjرڸq4i"IJIIop-[16==]&LP^#I={9]v9m޼*USjҤI("ŋvSЦMIRLLmӦMp4_~t钾kG۲eTF SOח_~yOAʊ_UThQG[tt$zNcU&777Gzzۗi$رcw˵ϝ;:}=z9p .ϊ_ Rmʓ'ƺH":s$)!!A4oFۙ3gTB,מ={"""T2HM4I6lٳUpaG{JJݳ6oޜ常8կ__ޚ4i5e߿_v풻cow}W={T5j*uA6M/= A_*^TF,M4I.]ݻUtiIRhh6mHKtiM:UӬY$I4h#F}ʓ'ќ=.Cŋ㸯J-ZpIjҤʗ//Ѷj*o߾6ͦ^{Mqqqڱc=.ӧu9U^=S_hhU`AUX1Ӹ~9Hel؎wx:kӦMN2-QY󓟟=W6QdI=㊊Էk.8>ˊqsNG=>HIRvf:uѶqF>|i]V/_>͞=f5g,YRu1͚5KIII'^Zqqq[cƌҥKըQ# 4HɚsW] c.vu,bcc(I:xm۶]:u+tLzz{=͝;Wz'5zhu1w;8x*U(88xgnE9]B&Wu_:q] 8MGѣmN)xR` R` R` R` R`(ۂԕ+W]{n޼YV?C)%%ET@UXQuQ֭հa\QJKKܹs5m48qBZ:u$v%&&*66V-Ҍ3TL 6L{V|ycʕ+WK._UvݻtRM4ISLщ' .(H3F]v]VUk޼yT  ջw{ZWs_~U]t~L 9R/4dyyypjӦ-R:uӕ۷+""B͚5Ӑ!Cn:ۖWrcǎK.ϟ/+V(o޼JOOW_}wyr*,]JMMo5o\ygO?U.R ҆ $IQQQ:zgϞ X׻wo 4HR\\J*-Z8[.\ 5`yzzjڵVFK믿O>R( KAJz쩞={fjUTTܗTƍlڸqqઌTzzl6S۩Stqy{{'$*))Ie˖U@@@U .(Hm޼mԲeK}ҥQ׮]Ӽy4rHEFFfW,=lb֭z}T\9Iґ#G4gٳGgΖBUX RzR|xcl6W+K˱<==pyzzZY\ ոqcM>];vԷsN͘1CM4KH]իPUPA?k׮]{w-tE*((H%Khɒ%JLLԠAw^fS,GO|>\+R׮]ŋoE]vr,N:[ fe p9Ժu/޲_ڵk,.R:sJ,y%JVc)H)RD?-cbbTP!+K˱4w\EGGg>H͛7K?0a֭[PlRhӄ PpT%QFiժUZb$PBر&M%JdK*,_ϗn%I?l6YRl6<<}dw]RJ:ps jQNIUrx ~~H(P@l:J6gW-+X Ro>W^yE۶mӧ &6СCo9.w<@ne)H[#c)H7.=R()$$UTQDD%X R˖-So /hɒ%Vc)H/*[-tI+K˱nbccie p9TÆ 5w\>}:SߩSGQFVc&LPhhգG z8O?Tv]&LȖBUX R*T֭[5`}N}׌3TbEK$rڲe~7?~\OhѢWd9He(Z( #(H-X@+f9>IΝ+ev*ͦ_~Yڵl) +Irwww  UL~GSmۦO?TǏWbbvSf޽{..R6mF!OOOUPAU,KAjɪ[V^-oo \{._;ooomV_}";?>XA*iZVe.kZaY$iLX#Im4'#%% \@g<K纯>xs]}()P_^z)!!A6lhWQ u!5m}||t ENTPP:t}*W\!)Pz'5w\۷fX$IW… S )PWppիW/Y,M4IQQQj׮6lcVP$(Hoѣ.OOOmذA'Oo7ۻj"^{MZaE^f$*h"۷O'NP^'>ӂEFG}޽{ҥK*[?\ET\\ԩdVMPcǎiРA(%JTdd~ªӧkɒ%JNN.z+=R 4Є OG+V]Ţ*ٳgkСT5>p5 R jڴV\IPbSNg%D(Q Zh;wV-P,(H͙3G6lɓuª ziSPP|||gŲ?@xꩧdX ( .\XHe@Q}PH}ᇒ={b^H^+"(Hb駟bccob!Hp)FAj$wwwP*U*Uܮ2~D˖-f͚Q  Ajގ:pǟ?c=r[aaazl޼YQQQV5l09sX9993fBBBHfHX,]G^Z:u}ݧqW?>lc=裪[MÇwў={j**99Y#FPZpBo^֭STTԝ>=E-=zG7bҥK2M;}z:(99YJ]}mر 'IZիWu֒m۶?֔)S4j(I? +,,LG͛o(n)HEGG+44keK.o &TR:{ӧf9iȑZl-H%''M T߾}5vX:tH*Us'H ջwou޽kek׮չsgG={Ի+OOOܹS.]RƍuwwWxxRRRlm))) \!L=zԮm޽=ME-fϞ=tqW'N5sLJ8Gs=otZh+))Iqqqx&LPӦMբE 0@ԩSպukmvH(..NY-Z85%K)I;w*W hR}]1֧QFZvƌ#Gꮻ=.>C7N/։'԰aC\R͛7XV( RSS~Awv9"`K]pv pGsG $"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!"H!r 5aY,9lۼykذa:sC3F!!!Rdd֬Ys'PĹ\:|mǎzGu9M6M766VӦMӳ>3fM۷צMi(J;6j(=ձc춍;VZ~$IUVUzjnZm6}ǚ2eF%Iի4zhm޼Ξ"ťf+99YӧOwviYF=z(r@ղelmrssӀlm۷l٢CP̌Tnn~AwܩK.qv WJJ-%%EvK"""$]^"XRk֒GڵݻM.ΝjڵWݞ!I v7V?I_[ٳӵ(^\"H?~\ƍ9$am{~kX2x`XݻW;w(\"Hk*Wz>^^^.?m^ߕǺ Tb٣kvKuE8p@~~~eyK`_$JbԾtiذaVk֭JKKSjo*,,LKpvءp[[xxti[nmPr ˗;|կ__+Wշo_+::ZK,n:s=M]tQnnϟokт y'p}~i=sՇ8ԕ&LME0`>Suj۶_ddbbbLլYS-ҁP)5ڵk契#Gj۷~1b/^aÆŋZr7o%~FZ֯_(}7SSLє)S 2]@C.]:p0#R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` R` ps=%8igH!"H!"H!"H!y%c+:8K"fA A A A A A A A A A A A vv@Q؊.A=1#R` R` R` R` \"H}2dׯ/U\Y]vUZZC]vm۶UrԳgO=zԡ_^^&OjժS 6G}t'N@WI&믿VLL6l#Gh֬YjԨI>͛_ :syܹS۶m혯~m_<>Su]EO?N@A_ҥKPnԠAZd$)!!AgϞw}ʕ+K"""ԪU+-\P $kԩz4k,IR~ԢE ˊ>KEK,kڴ]Zj~ڵkO>Qǎm!Je˖>S]xQY,=:|lr@Q3RWcZo~.2effq}#"":%%E>>>[CQQQ;33ᾫ{(Z\6H%&&*==]o$)##C788XYYYɑ222tb8_cϞ=[q R?^x5iD{$?^+&&Ʈm޽ܹ(r\.H9rD:tm$8쓝m]KPPn$i.|NRvtITHHm[%~WPrlP:r䈬VC?IvPLVN+W^zv+T@m߾am۶)<<:<<\Ν{$mݺն@A*77WݺuӖ-[&M\SO=+WСCJiiiv4=*SfϞmkZ;w*TM޾P=R/>3uIYYYףGIرc-[j:s挦L O>+VԈ#4e]xQ 3R` R` R` R` R` R` R` R` R` R` J;x+:HaF A A A   G*i (0!"H!"H!"H!"H!"H!"H]GNNƌyyy)22Rk֬qvY u6m}Y͘1Cnnnj߾6m8QigPTm۶MLQFIzꥰ0=Z7ovrkHNN `kT߾}e:tȉp&5(44T~~~v;v8,EK!##Cm5ѣG~GI޽{ [Kp/;>:H\7p]\7(^\CG999?AΟ?/vOOOk={㯺s΅SKPdq] k/\7#La.:FG//lk45h {sZbj֬rkp]/999:tZhqK!88Xk &M^(j֬; p=\ǭDa4>}ڮ}֭J&5tE?-''G ,Pdd*U8K!22R111Sffj֬E]'r?~xgQTuIϟג%K|r}QgG?|||] Bu bZ.C)0DC)0DU*66zX,Z~j(iR-Zp,,6mjRJX,ر*p\bBCC5do~{U.] oooEEEiݺuW=]Զm[\rٳ=zN (nu=k,խ[WP^|E={N  .]( 67o\ϟm曪Vi&͙3G??jҤ/G ,P֭W_y>|X͛7t;ڹsmAu]3F'OV.]4|p㏚9sRSS_: qR@o^IIIzT.KױcnJmi׮7n,IׯnM6M~6mڤ'O~Pڵ%IW:u4rH}w$$$ٳTreIRDDZj jw0댌 M6M={ԇ~h;vhh?\:u'aiP@<?5k.\duݡ^^OS{'NZz뭷TbEy{{e˖JMMu8ڸqbbbTreyxxRJ9rΟ?ooll|}}Ν;W5jrss xG$Iƍu}~ؒ$ooo=c={?u$)::ZZlٝ;n޲e.]~X?;T= A (UI&裏lmVҩS_Vf4c GjӦ.^hkܸqӟ)SzjݺMNJJҹsk̙jӦfΜ^z9U6mtwwQ-4uT͟?~gIw߭yyy9$oӕi """r+p#r]HC?C>t]qqq:-Z($$ĮߦM׿UvU-[T۶mݻѣ=z_ԩS:v옲_7ߔ:v/B7n˶Oj%]^$IVVVrrrn.+:믿V˖-m6nh 3R@!ڵΟ?+Wʕ+/))Ijժ;fk{ڵku :$iĈ7Uϕ!ٳ:v옚6m*zf4C=}X'::ZT~ijPyUI&k׮5k&[AWڶmXn΍+ժU˶TFFbccT(D#___͙3GǏgAtUҥK:y˔)3gjL>u0]jՌ3yf+[SO=+WСCJiiiqFnҵ?ѣp2fBԻwnoѢ'jǎjݺʔ)={())I3fP.]l4qDuQ۷WJJVZ{cԩSG5jШQ.???}'8յkW=c*_RSS5w\5licǎURRZlÇ̙32e4h>}8 u=|pegg+<<\/^ҥKm6-Zi(>R6w\7oƎҥKjժѣzK;w֭[H^Z:t˔)?\Æ ĉ'xBC џ}zO5kTB 6Lc%RJڰa^|E+rwwW4uTӕRJ)""B_}QXWH!"H!"H!"H!"H!"H.pXV={VOVNNVKB aX!???b8$(@`Z,IR2eT pgŋr)((0XcF Jg*++K KB sedd(++K>>>uvIpU$O$B]+999*S ! N2e(''٥@Z"bGw R` RpBY,8p٥J>G p"px@(C)0Dٳg~PHH^x'|RsU^ԤIUVMt9hB8p*W͛7+..N>}]퉉pႆ,M> }bU(}=C3w+)0DC)0DC)0DC)0DC)0DC)0D-\PE۷ocƪjժ}( R`  K7m]7)0Db(--M=z5n8YV:tH?T|yM:nY<<}|JMM󕚚oFE믿*""B'OԀTN+99YΝmܡC* @o8ӧkȐ!nxb[mڴѤIt9͙3GQQQJII=HbzꩧT^=M8QǏW>}Tbg EA R"""4of{ڴi%I-ZP֭UJ⌥}ү_?nnnjܸVkk/[j׮}ITn]թSGǎ}=#uٍ裏=;22RSOBԕ㤤h1b]d 8e>;;[ǎӃ>(I#rZb:udvk׶=pu)Kqss6ԠAM6usv͛7_Vxx|}}m*//X7[[q/^;+]J(jԨzG8?(::}N8Jzm3F~Pk fml@cPRpڵauBQFVO'Om˟ʟ9,^*UJ;w۷;e: ֦M)!!A/^t؞`kѢEe{~G18cF PS˖-ӠAn:5kL駟l2}W}RJiΜ9ԩէO맟~RjjKy>^SNU^^jԨ,]TC/ժ֭[kժU ;V uV7N:}*Tv۸ݻ+$$DoLUPA=c׶m[%%%^S\\jԨ O?ob5K^+AIE{A A A A A A Xp,8`k{;&Pig([%_8zjgp2f.ٳrww-)K?~,~Gu]$Oҥʕ+'OOO5nX}1RSS#K+V[o<~WGj̙_ƍkҥ\>KQZ ժT5kL*T+"-[L;w'|'xBtlR.]?n8_ 6L]tÕ^[nUo) p)ӟfUre}$ -DIR``}YlY>|X~mH"H\JjlݻWVUƍS``o!I̔$Dg믿P.z꒤2e(::}T={8޽Qnԭ[7]pAO>&L8yzz(XpYAAAz5o=XVEn}I}Cw+)0DC)0DC)0DC)0DC)0DC)0DC)?~,uժUktXrenǎsv)PvvaK]~%pSRg*UE[G8.P8kރ??+(b h8rnt)10&+mT6Y]tqHbeVj91 2G\KZ&3;Þٙ|ζ(;v 44f3k֬iTs/bX Ã=z0j(ml_d2Euus{NNtHhh(6lp/,,dܸqۗ;nj3moFBB& OOO+Ҩٳ$&&M׮]IJJQG}DHHtޝ'xtGJDDZbbbb0LaZ7ozj]ZZ/&990Ο??޽{n͞={宻b˖-xzz2qD5k۷\Ə@VV555L<=z{n233?rQTTDxx8۷'%%>֭[Ypx߿?/f޽]={Yp!s!!!dNʰaXzviH`#>>%tvm6ۛPZZl߾X^][m۶*^~e<~uN:ȑ#q8۷'ORPPĉ]c5xg]އSYYnS/پ}ߞ?)iN:t <?Nbb"ݻw Ddd$]-#G\ѱ\u3g(--pdrypĉ+GDD}""EDDpalB~~>k׮%##~dg]ǎ;v,[l!77xl6ќ>}^zAѹsgILLn_9m۶ S v;&k?6,"_ %""-BS횚wm߽{wHJJ\`?fܸqo6_ hb:ĺu0asO0`X׃lpп1EDZ3Mm۶Ʋyf?^RRB^^^m+++]{yy@}}}:Mhh(> wvK.,^:v w] ʕ+]M&.sy+裏Ҷm[,K5鎔\IMMjIpppk~DEEBٴiSLiӓ/ѣG1dddLhh(Ǐ[n/԰n: l^ȧ~\t7xQF1|pRRR߿?eee|߿߭kc6Y`f͢xtѣGHII^p""0iS7z; //sҧO, iӦ矓O}}=c̜91Att43гgO,Yi߾= bƌoߞ[2m4/^<SLaС.} :~9s[oQWWG~HHH/DFF???bbbx衇""q5DD7"7>"hDDDDDDܤ %"""""&)7)HIAJDDDDDM R""""""nRqDDDDDDܤ %"""""&)i1ȑ#ܹ3x K?n ˯hiii\v7z"""ŋ7ndddЩS's%""-PwoW;vw}dx ӯDDDDDE8qmڵ];}Չ5R""rKLL$22qa0AgϞeѱcGHOOn7YA~$22_eQQ 0|||8q".uUUUL>:vHϞ=f޽.uvb̘1tڕN:Ν;kǎlf͚5{""rut""rӛ4i,ZiӦJ^ !22r&MD߾}5kX¥~TUUsQWWʕ+=z4 m۶q;8p~}Y6mĔ)S۩dǎ0|p[ a޼yiӆ>ѣGSXXHXX`2HKKj2o$MC|ꩧXjUp88p  ''jkk & |yrss9x }!C`/}gQDf}""ҪeeeNn8uu}a(((pw(0FW_}viԩS}.ٵkgcۿ??J.\KAAvF^^pm{mHDD}""ҪRTTdjrC,\zתAPP|ӧX,lܸQs9^t)O?4~~~0vX&LSSZZ O??9멭mrlt y""r}(HHfۉ_lrPPLHH~`̙yxyya3f,>#??e˖Nvv6qqqe˖qw6ٗnQDDjfj+oKtC3goX,̝;v{&55TN8Yp!qqqfFc3Lε[;xGkDDUKHH$//ѾgbZ]m޼rݻwk.h۶-@;\?2gϞz;L!!!f/_Nuuu< :///kk@6mXv-qqq/l߾֭[X,Njj*VL)**H %""=zf֩S'{-ZDVVׯh4bk׮.&LM6X'NƪUݻfÆ L:7|ALL 999z.Ovv6vV^ɓuQQQ̟?UVQ]]#F`ҤIκ;ReIDAT<yΝK>}X,TTT(H;R""~G+Y@kDDDDDDܤ %"""""&)7)HIAJDDDDDM R"""E%Pi% VUp`0 7z(""DAJDfQQQjÑVjRQQfFGDyEDZ Jyy9555k׎6m΀v;uꄯ/ڵ#z R""ᠪsEM`}Ft/"7=)7iDDDDDDܤ %"""""&)7)HIAJDDDDDM R""""""nRqDDDDDDbr܇IENDB`python-diskcache-5.4.0/docs/_static/early-recomputation-03.png000066400000000000000000000671641416346170000243040ustar00rootroot00000000000000PNG  IHDR5sBIT|d pHYsaa?i9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATxy\r-7 r WxfGijRj~#%-MJ(3H>2+&jey孨xq% Ytawg_ǃgg; ;yg>A ;s@DDDD aHDDDdc&DDDD6 aHDDDdc&DDDD6 QP(2eà (ڵF~~8MP`ԨQfm۶ 0w(D لKBP^éSE6m~~ɓ'1e\ttA@ii)&OѣGݽsuL2Gur5˨R <==/ŋ;m4<3իcj%zݺuCݺuXb9;4uThM[)1cƠUVw;#99'N@@@óHӦMC~лwzɓ'(k_Mٲe Μ9ÇWz_yG#::Yf!22G:t͛7GLL <<|86lk׮a(,,Ю];b̘1eЫW/_^$&&'N1g8::٘2e o,]!!!4iwڅ5k`̘1P*oЭ[7߿>˗/Ǜo֭[JCCC_ԬY.]¼y'O>,ƌ |5j⿏spy5 !!!Xn;W\۷ocĈP(K/ŋptt:b}i z ɓG܄Ai}w,w^ߋ֭['vܩ>22R_Ϟ=[ XBV\\,mVpwwA#deem7o,l٢[pqyRСCի sduچB._,N3ZFF@:u:Ν+N>m`+w ׬YSXfζӦMiiiFK$M;w.ׯ1^㵋lCXj"##ftHLL1tP5kPRR^{MRVC|}}|rKdGsnhߩS',_* ?^'R})_~A֭3ψ1|pɓXU6mj*۴i %%%ppx5Զm[DDDkժ_|[lAiiq߻wyyy[.TÇ_/2W^9::b̘1xWk.S7`ꭺf.Wʯmhgg 6_ ]b8|0ڷoo0۰wAv ԪUK\ofpYR_FV*Z}l\jowƍ5)K=333Wx@)[6zO??GV;uiذ!Zj$1LJJO?-I&CǦMzjiv=w`РAzbzeiFk2˗5>yyyT*j\~WֺׯBdddn;w 11K,k 8/77yeԫWOwRv[R' ْW6沌mC;;;`…Xpe]+W0i$Z1݆իWGՍ.,urh?Q~Ȳm qrrJ:uB科s"9bHTƞ={ЫW/̟?Û<== % OOO8q:j׮3ghM?}8ߔ̲Ξ= WWW_){.N@Oׯ_Aaƌⴻwj-<ՠڵkرcPTU@So RRRФIRJKK #q={˖-Cll8]ץ5k`Ȑ!ףN~ФI?ؿ05Xp!%,{Tl޼]t+fűcv7n9̎d^6g*z`]x?7ob͚5ⴒ̙34 )"""߆۷/6lؠ2}~6_~}կ_?8p@s9s8<ӧquAA bٰauv9tcݍOdX$uV"SVvPN3gDnꫯ"==sEݺu5+G=&Lkb٘>}:ߢ{hܸ1 5jڵkعs'<==e믿"22ÇGFp [T'bժU޽;ƌooo,[ )))ذaV']j a bӧƌB̛7H|1sL"$$mڴAϞ=|rxyy!,, {۵.ѼysO?Enn.J%:v&Çc}:v܉6m`ذa CVV>۷]BCCQJ̟?pssC6mаaCbv<==Q~m,Z=z舙3gFF!22Rܹsܹ3  Ċ+5$Ozz:;#G;N"e*(sw JаaCaɒ%: 9R00jEEEBժU///Ν;bWn:󣢢OOO!fᥗ^|||R)Ԯ][x嗅;vhBll+(JN:ȑ#"ͅ ~ UT֭[ ?ѡf3##Cޞ+V-t믿 ႓РAaŊ:GO}YE  - 2DV.tU8}Pvmac-Z$ԩSGa`A:99 M4BE= 7ڸqP(+Wh_6LKKF) B@@ЩS'a…6o, +'O:w, ժU &=zT1#55Uׯ) ={Ν;"##C>|аaCMprrի';VcS7o*Cd 豔 00/;scQ FYclUii)/lZh(̚5ܡU$D?2224:=Ns׎ٶmΝ;xsBTX$ñcGZjZ}Ո@" Q%7oz-7w8DDdX$"""1&DDDD6 aHDDDdc&DDDD6 aHD… 1bԩgggxzz}/qsGDd?CT"66(.. &… &@ȤRRRдiSԬY;W1;)'SXXWWW ݻpqq1CTDDKDdR}wi%Pn]1+))G}P(J>@QQ{ѳgOhݺ5QNUq R5k"66KBPҥKKNNB@rr8-** 8t}Y>ЈZl ,X@|+x{{#&&T/ɓ+jԨ>Lsݽ{SLAի㥗^…  /|F5l@"2-[N:h׮Ѷo&&Mz fBdd$ׯ{9̘1UVmѡC̙3]t_~?8}4^X֭[޽;7oٳg#::Zw+x_yO>WfΜcbǎxglt ͚5Ì3аaCغuئ={DBB"""0c ;ʼn'P(ka֭X-[^{>?ɔ@Dd"_4ȑ#7Ԙ>~xᅨj׮-v-NKOOJ&M$6nܨ>J% ,YD h߹s@عs8-22R ̟?_ky꘶mۦ1ҥK'hL?~1]^VTT$}-^X ̜9S:s@7o^z b;""AVdF/8.7:}}}ѠA\xQa4k }ZB)4)J 2D缐tUcƍR/#33S @zsN9'''nZsUV Gׯ6m ))I[bHxzzn߾m˗aggujL@*UpejZFժU-p'tjԨ'''BBB;w ^z9u5׬YS+94h7?ݺup=>+CD&@8qB{VuN9n^]T* nݪ3fwwwצ\q!)) |VX-[A^@"2={b…ػw/ڶm]ڵRp94jHԮ] 5|VZnxBCC!BBBP~}-s߾}wF=?ٳM /I{pssÛo4.\_~3gzQuGŦM+jݻwJKKM60K/{{{$$$hUA[ʽ̾}"33_ּGɓ0auQMD Thh(V\QFO믿n: <  "''ؿ?-[޽{k "Մ ~zo"""Gfиqc<ӈGVVzjLjǥKлwoxxx %%6m1~r-366=~tؾ};~mz[ݻwI> @"2^zرcϱyf̛7JM6Ō30l0~:u`ҥشiɓ'?zݱgL<6m²eN:f͚b$1ӧOG*U0tPDGG38q"ׯYf!!!.]W^^=~|'Xr%6l<3hҤF[''' 0| o "(8""7n;ܼySc눈HF޽+Vo߾LH/^&"tl߾ׯǭ[-HN<_}\b""]ư aHDDDdcT*_'"""oF`` l \~AAACjjSl 'ihS_l˦W7 g9ʓ 8n>e_OOO&D2枧S5\J>p8 }GS:ۄ~ JAPd{!!!!;iyyyطoڶm}J?D$9PIB*l+8:%%GV.$* IDAT7jժc?Fz?ݻ&"Kf@Ƀ(YuZ0F~dAZиqc?9s&x sFDFRP bc!2&yf9s?otbĈ4iC#" e0y.?9Yቍlٳgc,}$G@A>DD% ೀm@""#t-I֢<As&DDҢ 2 I@ز)^&klHH"ArRst `q mΝ͛W_Evv##"[%I K"%`fq#..۷fٳgQfM3GGDLa Y q@'6L*s΅#֯_yF[[nfls:#m0L,XRRd,Zf͚e[eoPn-?Utpp;""a`HL<}[I~,*֭[1wDDZx,$9QJeQƻヒW"""nnn6mjȈV zhTFBҳP ,.3FP(  JKK8*/ "x,`m_˗%)))HCyz%`fq `ڵN]derZ~,&X|9ڷo@\|0{ll޼̑-ⱏaP?-_Λ7qqqx瑓#R fϞmu]v |||&M&]zvLl%sElǏ7zѾ}{8::b֭8y$f̘UlD$R..`66ޖX\hBkRDAA駟"((K,lD$?,m,#GhM߶m5jdhٲ%???h-2ȶ<|k%d4Y\0..#Gݻw!ߏUV!11~sE|`̘1prr AtH1uyyy&,xpB_ 0kٱ7߄ *_DJB˖-1m4@-p ̟?_o@Dց?F=q45Eneq`8wqM\zC5:W0i5•+W'>>OjjIc""fԃ@*)'&Ki)lٱpժU]]]'0aӾ}{9sFcٳg DT*CD'Y8IVB% Kreq [o[jM7nVXa77McʕXp!Fiu<0#9Rذm0)) qѣvZܹdiժ6mڄUV!<<}fϞlD$/ڨT544ʗңG|7ի~7|wؼy3v܉t]={DϞ=ML")(ڔk hȎ%ꫯ"''۷/vڅu;,"qRr<(VbT,"9O=q̙3++,""`dm(8ȍE$uE^^8P"Ư )ws/HMys >}Վ}mE|=888ĉH8 Yjg djBiU6t$Gb@3AaQ >dee;"" !Qpd]-ʖE,믿ڵkMcÇ٬rKd-8mwH'CHN>&Y\8yds@DAh|YAFB[ײcq ڡCp)@ƍѢE 3GDDХ2"kex-W#&&ɨR ''Xz5|}}!ٚb_)#cqw=oƿ,deeĉØ1c48ID*۶mѨQ#qZXXΝ.]12"U+edeȏUU*;::BR!""$%yVFVFʳCcǎxwpuqڵk0n8tɌr{x0|-_~Cpp0BCCaΜ9lz 摒'6c1}SRR >|۷oӧ5BΝ!:>1DRV-[vڈFǎ{aoJ6YL#99XjQNtш$"jl/)S 99+ԩS ]Y7@^&Fe1 ={PzuHT:W^ÇH|WQ6\|F~,pNN.\WWW| D&M0j(_&]_jj*y$%%Y{%4&"lRqȗ$nnn֭O}!33}\]]gf͚7:t>>&4x0ٸ uJ*(((@@@1k,DEE!44ܡ0\ @I2|YL#::7[ f[7Y.I)dekIGa Ϗ@m/ RI A YIqǖ&DDB wkbHDdDy(~ڶ0$"ೀ+/ J GJ.Olxj#WL$I[@"" J on/&DDFGrT7 ;L$v GJ.&&DDFHP3$?L0IrG@""#$]}p`ވ0$"Нq/&DDFY$gR0^-?L$2JH_@""#RWQWE-7L$R a@bHDdqy$+%e前0$"HҁGJ>Zl:LLLDV???gΜ1wXDda[W`$DfZvl:ܵkFݻ.]ܡ28 %de19ɐ0m۶i^t)p!=_R[u"sxmm8;V^R;}i*(6.=%+]7u6/=UMkVY/Jj[ wkXr_m4ڦ+%_ eM]_w{؊KT)!Eq.k^Զ;v66T{VH5NNz|1pť ];#%%*+:1뢶R7ߦS=>KWč!@ ܵn?Rߋ9ݵ?l42̲Ot^u ~:"Y(._{.ަJwD Ⅶ:ۼlu>a}JU%N=gc^Jۥ;ފ #װ\1lSn@#[L-0 }#jl@*R2 ,TW*ZԪ5R1^:L|Ц;ԫ3M>oHۙzz'<"IUSSo'{]ͺR8So(4 z6:T(ͩy}6앞D5t^-KP.6s.,wcuW>~m!L7X8̳:))D_Ke| tH{]&,c: x'O澋Ѷeh3[6T+Qp=F+sVu1%`V l 7&4= <fhj9+RTKg,0'[J]joe&ZRd%$À 2=&N_KO.Z^b3WUXb }KڈjŇI~p),S,OZn"wR weD벾A\}[1p*j@K7WT#Qi+[M%[jD<7XJ6 -2 xؖ_?ymLs"88hӦ o4ݟg9NjC>1API~,u4A_)qYOOy2%_Ka\N-d1(_6Yqqqf͚k׮HOO7whTR kʮˢyI98PU~wz>]ȯ'ʌVZe7C$?e|$2!NgΜaÆaȐ! ŋ;4Qխ(#p֩q [xdRU:'I}RW='8@,iTXȇ"gСC١sػw#ÿj3PX\R Q~`CgZ \MQ%e"}X Qit@#]ԳJKi<>\ZٶcY<p)R?91'ßq۬H +Ym߳3RWLqb8<<[I/+2TIzX wg`w?# qJ C1z_\+o~;*jz{mN F?QF _h۶8î]o>L2 &UTԬYaVU{{{iѤ! @{'VTʂi!G^^ OO [-ⶭ8ܶ۶pV,Kپ  4[ f """cnǎ5j(J(JiUTX<==TAm+mᶭ8ܶYon6@\\ -[u֘={6 0dsFDDDTal IDAT:0`2220i$ܼy͛7Ƕm۴n !""")SL1wԺuk7~! fA+mᶭ8ܶ۶bqZ V@l@""""0$"""1Ll @"hK.Bh92e McHD6 L0$"V\\I&!""^^^pssCsNͥK HHHBB@!>}7ѲeKR_?___O>Њm֭<==ѪU+\0yd8::|QJܽ{H @"jyyyoO?SLAFFv#G|}}1o<@>}|r,_/O?SNaĉ1cлwolڴIkGѣG1yd[زe Ffҥѣӧ_%%%Xf~z&^DDȂ-YD 8p@HcZvv/ⴌ 0ydetIhҤp]qJڵk'ԫWO+Ν; *J>n8^ArrrM6;w4U}m۶ڴi1ƍaΝzѓc=* YYY())A˖-qa_~oFff&233q-tΝõk43|p( uPZZ˗/~7ܾ}'NԪ}_ll,ۇ .Ӓo ""[l6m ggg?3rss~!|}}5~&O HOOxOZ4^WZ bBnp RDRR 77?( "z+VѻwoL0~~~GbbFeMJ?~p̝;< 4iaÆN:HKK޽{qU=z\zzzb֬Yx7ѪU+ꫨZ*=B,[Ll舘|װ+H"&Dd=ʕ+ǂ aaaXb֭[طoGƸqP\\ɓ'#<<aaa8x tRܺu ~~~hѢ&MX:~~~>}:>#8::aÆ7nVX|ԩWX#"*P^DDdRGE_7w8Dd-Zwww$DDd˖-8y$.\QF!%`""3 FZZv˗!%s"88hӦ ߯ҥKP(4~u"L.];w?0#J%p͚5ɓqa4k ]vzSYq~'&9s& !C ,, χ+/^= ⏿%FLDDDd B||8;wz@Y]6T*z)L6#PTT$VTʂNDDd%A۷;;E `ff&JKK*x8}4h/FӦM/ڵÿ5k|Obb"L?UT|E8ڶmmۊ۵kFa裏t'>>qqq\ԪU 𘉈!((ȦoEXZ5#--MczZZ$--ZQ*P*Z===Y[% NNN;i* ;vШRZZǏ9DDD${@\\ -[u֘={6 0d^F $&&N~uENN>s\|o9?QM8`ddd`ҤIy&7om۶7\rENl 6 7oDժU aaaDDDD{yyyBnn.Y ecHDDDdc&DDDD6 aHDDDdc&DDDD6 aHDDDdc&DDDD6 aHDDDdc&DDDD6 aHDDDdc&DDDD6 aHDDDdcdΝ;pvvF6m~I[z5 z]l5k ..'OÇѬY3tw%?:tHK6 ̙31l0 2aaa?>\]]xb)--:uTbDDDD#BΝivvvܹ3ݫ}SNZaYs` (--t?,*{fDeP$lW4W-$mKS,V5.&ii?QKkɚ٢(DJ32|> CCC222'NDXXrrr兾}<ZId`ZZN:l 66։!UUUG""""2b]2 PT]فo7LDDD  a$"""r3 DDDDn0@""""7HDDDf  a$"""r3 DDDDn0@""""7HDDDfϣǑ[ HٳHLLĒ%Kp}aʕ#"""C |GP8~8y,[LpuDDDD#<666K.[bܸql0mXBbb"a;wFll,}~DDDDCt`FAA4 󑘘<ȑ# j? /zOOOlٲ BjjM"ق:7!"DEDtd7n}7l`WD 4˗/fgAff]8p Fs`0@RAzE"*,Ɯ͇Po0kt4"2"Q<~ :R???hZ߿hZT*w %%&ˑk>b@ȑ#;g4a0lD$]xj]M}ZWjA!׮]k3f<|( dO?mw*?}4L&jMZFEEE hBkᄏ9993g]5k3-Z;Eb 0g!DrOYf On P(hfv.]PVV}a޼yh4ؾ}{뭏'N8>"ߕ,M(s^QDDD$K.={ilkP(t6:+rÇ#''wuWJ%J]5ko;H?"Dx?cǎYgݻȰkVرc4Oj> CCC5DN8aaa|=_||<`4gwʕ+MI @ 5VV5/ CDj@\^x/uVLNKKéSƢ:1 ro<644駟? ooo ֭CZZZ1"ri FGu6!Y9\uI_gzW%W|SeC"52Z\aDtxu:}Q AD$Jay5V `vVr@"rYO?c̙3SHx$"W%<޽vBllRgn+?^HD $")򐙙~It)DDV\L)ഴ4466"** >>>ԩx%"r>HDR&<.HDRuo"jj ]¬ѸoʈFFd2aӦM8|0O>3f $dLDNѣG1rH\qQ20{F=WFDL!aLL^Yf11!\\J[3kMxj])g=z0Qa2[ɷWɷx 5]wG3_8~x,XE… q ڳfN'<ܹ#Gl>bܹS@EDDL4]IxlkN:`0Iz+ CAAA#::Z@EDD@-]qˠ~Dz+@$3gbܸq8vZ-?\x&ŕuoBP%'xq.Zli;+(:vڮ~`Dp:pشiϟ>߿?K :TtyBq&"qߊ+///$&&;WƐ!CеkWt)))Wljk5_}y "q&JooYIv-:vƁPG!ٳgo_D]]R^QVмVoCŕu%~Ξ ߎ `ѢE8{,`Æ ʲ{;K.O< DGG#??>>>XfM{=<ӈE^ol6Cն\H9a+!j?Q>vMNxh4x?뷩#G{ .)))6\ٵF\xmh4`0<k5e0H}!`Nl<p߾}x'[ƾO> ZmӮVƌ3j"/''*k׋k5Ξ̀;:;(vRlu$Ԑcƍ6^o}8q!$D D其y6pBdۣDtroFr!jߧUx*̰ۜT$<3׿pEL&CUUf̘۵@( tt:_ŋ#77[nEWT F7/xY;#!k. ]bxlq¿,Ys!((ϟСC___̛7Ϯmxzz"..f IIIm>o…;w. =7+`d +u`AxbH>_Hޭ\=}C@)mڃŭ,K{n8pΝC\\ v]/((@zz:^u$$$ //|***V1qD!''`dggGrru;BYJ^@ H x'k->x4)#wG'N EEE8s ^';v .Dcc#ƎW_}J[jSN!;;555EaaubHUU߾W\ .`„ 6ۙ5kfϞ>o(2fH0O9r2 -$lpĈ뮻0c t-“O>١/"""klN_ Xz54 -[>@TyDDDD%,6ر#F|򈈈$K:zP(6uuu  g^RZm}nK@""""7HDDDf  a$"""r3 DDDDn0@""""7HDDDf  a$"""r3 DDDDnFRpŊQ\\fǏGDDd2X)8 h45kJKKTֶڿz+rssjđL\t)x ddd ::5kZ?h ,Z> J%"""G… ())AJJM.#%%EEE+#"""x ZmӮVQQQnc4a4? v6HbYrrrRp%]7I@( t:vN׮<뭏'N۶Eqqqj6 Vv{R ???5hx$$$ // L8aaaM20-- NBvv6jjjBĐ* x/0`ŋc:t(o򉈈FfX,pU* z\HDDDf$s ق:7!"DEDDVi ˫1g!T뛬m!*/{Ƚ09Eay5ZWjF߄֕ZPeDDDd`Chm9dt"""g`$+k1w% j}+Wc$o;H?"""9 pA]ڵ@rBTK1)22/cbB 0Ù|՗yj&""r@rk8 șx' 7^*/&|:.ˊ:@7tW8~j}btƎ"p0QS.JϜE_9%D#]}:q00(}6eϜ*9ADD< .꯯*vp%V\Yg/r@d2[FrٷI Lf +P[߄.͋sJorco0[,[y IQ0n܏n}Cqg$rE??.G]k[f=}99Fay^ IDAT5fj Fk[UWcCm.ǵ|QtB~܏n}xCGrb7`0@RAi{](!] @Wob& 7 aV&;-دs$ r s'.F>mQyaX?.abl?kK "SS?F:_E]2Uv|A54h/_03<Ѐ-[XX"!x}PᖈK4qԏ$Opg|W 2~ydmJ .YYY6\"h4Tlڴ10[~@h 9ے=DI<> =^ޖDn$ӧa2Vmj5***Z}NMMMkjj|̙3E{xx TMDDDR6R.@YlF f3Э[7d[`0 <<'NpۡjGu[u[(bjM000 :­:>'88RRiwތ\???~!9p:p:VGؿ:w$f{zz"..Zf6jsl_|f"""" h4#>> CCC222'NDXXrrrSNСCd5 ׯj*ogϞ-зo_c޼yXx1CϞ= <<<0vX7zE!77:o&Q(뮻!ap:p:cqv Y#k~ DDDDn0nŊQ\\,$AK. رcqeIRnn.d2M&8y$yt ׯ/,g20sLDFFQQQ;w[+FܹGFhh(d26md{łl)))U;h4̚5 Ajj*jkkEv؁I&aϞ=/pE > K}_GE"+ѩS'šCdtUti.oXr%/_ÇcXp!^}Uѥ`Ŋ~…Xlw^thjjrrt`4h/_&xg):8ucy睢ˑsax/#66yy_]v.Er^jֶ֭Xkdظqu ^łP}:@CV㭷ƒ>(\… ())AJJM.#%%EEE+^\tL4 FOAAA0`V^-,I;j[޽#F\TVVAR!116'2ӧa2Vmj5***U%=fӦMCrr2+IX~=JKKo>ѥHΏ?+WB_ľ}0exzz"==]ty.-33zBd¼y.MRjjjcߑs0[4i˱{nѥH‰'0uT|]fc 7>{g}AYYMP[$P(lu:U%-'OƖ-[m6]$<<,̟?<j*ZJti.oј7ow>}oҥKMti.ܹs8zJ! ݻwǴi/nCdd$fΜPLar uh{OOOKBBeϞ=KryZ}]Vti4tPԩSE!7oעT*-zZJtI`0,SNte[-/h4.l۶tbf̙3-jڢT*-Æ 9rDln^HDDDf  a$"""r3 DDDDn0@"r[۷oL&ٳgEBDT D$I2쪏ٳg;@uu5T*rw!"I|g(((@vv69bm҈ IRppR l|}}[~뭷-[gϞ ؈~ڵ+Ld}-шӧ#,, ;wFbb"o.]:F,[ ׯG}}=ƍg}ǏGrr2'OơC~zbƍ{pAvmQK DDWx"V\( Bm۶!-- UUUXv- >}: vZ̟?_!"j |||j5"""lTըSlݺ?{,bccq5e%%%9s&&M5k[n!66Ǐ7Zey& WWW8;;o߾z^FDDh}CިQPTTEc,V s@dJfB``Fy ML<mڴGpI,\III8}4|||EO1p@?{,e"|:۷oDž 0n8s-! -[4j[Oz"##>666ꫯǏãmԭ[jej0rH̝;&MD"1~Y*&$*={D֭mB"e:uU7ncʕx&fkkk-[;N:fؾ{\t hӦ ߐ̙3~ipqqZn?Gbb":w\؉&z—_~: ƍ5)[%$$YfJعsFDH$lٲEcٚ5k Hp ٩S'+W4:t=z􀋋 hԻy&ƌ___HRb(**Rչz* www888]v_նVX:7j(@ZZz-4nA._ DFF@wb̘1Zh+Vչv$ K,^AAAJhӦ >_Oz!v܉]S^^{ ސJh֬.]ZJF˗ۇA^zJôiPXXX>cƍhӦ*hҤ t뽝byڄ۶m{x޽{jeDp_>}`(**O?A_~A^믿~zL8jҚDDD ߿ڲ}eSϞ=3g ˖-CΝo>x|x/,, 9997n4i7obƍ(((-222СC`+ЧOlܸQcaooӧoacc+++dgg#66/_@Ĩg['OT*=z 99!!!UV_GXXPiPP1tPԭ[׮] g?<&Oo|6m TXX\|'ND`` 6l؀QF!''SLQf7ހD"_~W^:z(j]Ofdd]v;v1c 0uT4mfBLL ƍѡC PPPoq lذA\.G^^^ԪU 'O^Ө?yyyprr*s{/^#퍱c"&&Fk>Z$U[,[LO*-((P{\TT$;wV+ XYY gΜh0sL3fRTQݽ{WQZ=mҥKL֭[Ν; DHNNVU(BÆ ݻ Bm^xAU%XYY hSԩS¾}T@! @())Q1$$D(**R6l H={m}F9*KKKF ̙3'?#Gj}9A8x@XrlÆ !11Q~xxz>^H$BZZޓJa֬Ym̟?_ ?^^{M6m$\Rӧ@¦M0x`6"޽{wOHIIAǎˌt磰:t 8vիY涞<,J5)ؖwT{BNN/^ŋkݻwm눉?s>]6j׮]JS&ڮC:ocɒ%صkFOLb}O>xߣvڰe˰f”)Sp r?^y檉CAAƎ{N`)G Y&n"t*וĔEtIIۘ4i-[S}pqqD"СCU}hlO'ʄ9;;u֭p{WuPxgFII ^xdeeG&M舛7obԨQj}XXXwRL!Jqm:ʲ'/D?;;OXUELJٴi→~ZlY=tPDGGcڵ(,, ۛ={6lقO> .Ie% ggg>}6qϫr/h{r Н,nܸ#GĜ9sTe>nEFqI( Q@CU&Mh޼r} %%%eF)\x+V@TT\ۡuaeL~мys9rDΡCP~rOFyx]<:e`JD"QMvZ&TV-WFBBz:iaX|9ܹ,_jEff&_۵~*_|E$'']&??/F@@^+HIIQ=NOOǶmЭ[7ՈYPPrssqIU۷ok̎dZcoEtttxҋ/;w`ݺub|Y&݆>BCCakkyCkkk 06m@t(݇ 믿؞r> 8V .௿R]G~L&8|,>cULOJIIQL$$QٱcjD:~իΝ=zW^ݻw1|4h@-xZQQQ9G޻ヒc޼y={6?gϞh֬F:u͛HLL3o]2cܸqhڴ)n߾ 6`puuӱvZ'O;VXTlڴIc[e{j08UCG1yd`hԨZn8,ZFѣG77oSXicggnݺa׮]5kr}pHLLD۶m1vX#++ )))صkiPP\]]pB899m۶E&Mwy7oބ36mڤuz K,A^;s΅7~mM6Exx)))6l  [l0n8K=zYYY۷o$r1w)җ  RThҤl2 L0Ak{x20Jr\pss\\\BbW^beÆ ZGDDj9v/ T* wV[7--M<==T*ԯ__0a Uu\" 8Ppuu섰0_~+Fe?yefffʔzjUjJ%X!$$D7n,^ZstyKdgg Gjժ%ԬYS޽py_1K,ׯ/X[[] dddkkk+4o\*|u͛7 D~aFF0aO|||.]/Vm6!88XQ{ٳB׮]5k jƎ+8qBecFzz0p@YYлwoҥK=W^ $vvv*,\PHJP^=ˈ+ <,l"z*K/q H$0aBNcI3\L>]bD֭[6q1k,̟?ۜQٖ-[Ȥ8Hdɓ'GVZsՈ#DD@"X`Ə///\qHd8HDDD$2LD 0$"""&DDDD"Hd @"2+W7@agggggt_5 0wDT4hR)"߿.Μ9ŋ;L""@ȠRSS3Ϡnݺ믿Pvm/_Ư)S))((F x!Q0HA䯸} J>r\mGXXP~}UiTu"** ,_׮]S[/)) IIIѣx>@-[=-ZZw =1tPٳg ԩS~=|hԨPvm˸r A@@@u=ˈHAm߾Gʭ#&&>,+#>>Cը{e 8/̙7775 gΜQy:uoݺu_7|ϟǍ7j߿={e˖7o"##U.\aÆ^_-[>DEEaÆ;w.NݻwGNNѣGhsA&Mcǎ:%%%ݻ79s`ʔ)ӧ!HꫯbǎR!ꫯ>Q5%Hnn@۷ou?.^uwyG _2w^UݻwT*۪yf l2<11Q $&&… 5iΝj׮]O>DԩSB5ʕ_rL. >>>€TeK.sչ_.\ ,P[ާO! @UH#Dd02 Tn~ Vox|"Iiԩ걧'7nW6mڄ-ZI$=BT*ѣ. D6o B޽{?4ljk֬6:gkk0U&Ms5jm"!!A,++ ;vßzzbHD +nZZРAr"--M^zpssCvv+W4TNj]Qv% Sܹs{Zuj$gqƨQ 7DEE۰a=z#F赯D$ 3|}}qiwdZkP jDkyYgj[P( Hc1׬YS iӦ!!!|V^֭[qUoLȠzŋh߾zP(t6m*@NN+vPPPɧh  DF CѣGYzBBB`޼y&"z_˯\/"h$(sիm0'N-[4)GԂ{U-+))1؅_~eX[[#..NcOܿ0`ݻNcٓm1gϞŻ kkkgTq *((k֬!CдiS;ذaF)S`ȑXx1rrrdXS܊}]lܸ kPdee… ѢE 4k ڵÌ3wwwO(..6ǘ1c]~ زe ƍwyBیʕ+dt صkz- ذaz ///U/L郓'O/m۰`HR<33gƎ P~},_[lf̘3g>U5kľ}0sLlٲ+Vt邺u%$$7ٳ1c 22/AhԨ+Э[7ӧ۳oO>k֬M{͛7Wkkk!CDoGDTL6 ?#ܹuDDHDT<|WƀN<LDT ܽ{vƍq}aHDT ={Ç}@""""@""""aHDDD$2LD'TB[ 퉈ȼA@^^|}}ae%α0&p-; """z jw & >d2Tb}U1b%DDDD"Hd @""""aHDDD$2LD Tp޽x饗 D[-111]6ѵkW\tLNMѢE ̟?_?| .\CݻwÇM)iU;={ԺL̛7зo_ʕ+퍭[bС Ȥ`YRSSqtUU₶mf`YܹV+V-F.C.d2HDDDdD|ZpqqQ;$""" eP+P-fƌU5N""""ce޽[U&p!o^zRjDDDDUM\|Y855Ǐ;իS?FÆ ?ׯ&"""2j9r#Gbxq!''=v ;;;sLDDDdAsQUd2 77~t 1$"""&DDDD"Hd @""""aHDDD$2LD 0$"""&DDDD"Hd @""""aHDDD$2LD 0$"""&DDDD"HdD?D`` > ;4""""a> b 4k Gѣɓ';<""""mo߾ի k׮Err##"""2.Сvލ/N5 IDAT8gϞ:בdjDDDDUhGOL&M%%%O0|p#..΄QhGׯ_Y)))XbKXB:3f@nn/==݄Di~~~>}:&L*㏱zj?^md2 77 ",(([[[CP)"""""^z | իf͚رc;w.^{5sFDDDdT=?[lݻwaÆ!&&zmCDDDUE_@DDDUE<H @""""aHDDD$2LD;wbG˖-+ ;;یUdSNƋ/TDGG9:"""`Mлwo|HII/h興>EAA`׮]֭]52HDDDDOF;vhtXnŋ[#""",npƍ`ԩScU}u/bYݺu)%HDDTFkԨ7|rܡU[@XX;f0-; 䭷o7n 44j˟y3EFDDDT=X@H$%%%fJ;! ""zm#Z!UkwZ ;v/ömQgq  _|99997oAۺy&^}Uxxx͛7Ǒ#G oŒ%KVnN2X;ر#lllc={ś """D7055Z(J7X;}l2UY``ODDDd,n000Ǐ(߹s'6mjv~gn Zj%KlDDDDF1a<| 99k׮E||<~sU|>ɓakk#Gj]G.ݦN&,""""S A@BBbccq/0fakk֭[VM<8r1_Hᅠ-0 >.]ƒpܸqàԮ]jeM6u3c )X\vZz;… je/^,BRjDDDDU%Ǐǎ;4ʧMիWiӦ~˗/c͚5Xx1&L`6,% 6l߯*4i֯_DӦMlٲk׮EHH>#̛7Ç7XDDDD"OYf &N??#mۆD4jܡ$R""xxW;{A Q` `ttrOOOK,ODDDdi,fs8t7xk֬Aڵ ЫW/tQ""""۷kVO?!%%֫\.\.W=d h,pNN/^ |gE1qDlܸm/==SLABBZ'>>...????DDDDd AsM^^ߏD$%%ĉhذ!N>moݺVVVj#~~~4QU#"o9$GGGnnnQΝ;gwNR+=z44i_#T Tjb@B#G )) 8pQNDFFb4X{NNN Q+sttF9Qub1 +H|W@PPC#"""V,&/F-$MDDDd*H,20DDDDdLD 0$"""&DDDD"Hd @""""aHDDD$2LD 0$"""&DDDD"Hd @""""aHDDD$2LD Ȉ:G6m///.\0wXDDDDF%pϞ=0axu|sFDDDd4Asa)233={/L& rssl 0w$77u\.\.W=d&ȐD}4BScǎ Z'>>...????GIDDDTy<cǎؿ?֭@???Q!U5<C'_~޽{u& J!JM:&M–-[@sDDDDdtN'L5k`۶mprr;w...7stDDDD!9Dke0jԨr"""qKDDD" @""""aHDDD$2LD 0$"""&DDDD"#VweoSz61+ ndZLL7s 1{y uSfgo{k {}O;2ΐ=;yh2ipu5F(.Q`/gv@w.߇=ת] ߣ ^7sq?ւ@xGcyEY}V󬏋yE>*u]ݭq%95 ^[v?i |#Oջ}0gJ=}~ܗbEׅ}!7xO-xYŽ2y;KR3Ԕsj~e$f\.{1jڢzv6Vҥ}yeK8}ێҫn+?Wh\F¡z5UL[SYhJ݂}5ػaSq#֭lXZU 4v¤. m=t{.fbX=:q^u5F{%gooL oo#H[ vWݽ3\b&C/?*=Wmxa0]lJWM#Q-|/~W{>tw" +*VBJX=u$_Ѵl(LSW{DHkq 1u N?gP u']|!m'RqL=cPnY7Z[OFtź |ߨ}\CYm_TR~U-ݛh3?27s۞!>#JF{zh例9qûݵD|ybYT:?Zj,=|gMд3^N{>6G˲u.~=yۨMtZ8ſpP_dѴlvʘmtt}IF9GU FLgM1)l5tp4W3Uhlvk9yܒNtCi8v=t9u3.7U =窳ΧC~QIU<7u#z/ ϡoCn/fihufwGߖuQ>|tȓCqTw \s4-3OgMP^gl>v>0jݕHt]T%ϗדyU ē@,-#)cr-TY$uMPT(DDS.t zFsVCwe>{W_(k_ {qRg\Lsx}gv &bhz$6Nnj_6OR:T}}(̘"V}B-UɣW%eg46L߲~8;KR 1,z7BEͨ?P?L5@Ach 4}>Toz+_SNf/WcVʾ7 5zQ9Ct2ڳ2kҞ]aS ^⇪>Y}7 `Qև*i1Y_k=,Rk/FCm׀FKb]諡FyqbAEsc\;a9AJV =h8cs`Cڔ4!`FMG%@)Xg=IIʏ5ՙ_C^n0šҪҡkCQ픡"s+˪a~i)7QrF`0\hS{I^#k)}^4/1>z:L-~#m+ctozH>I@_,{sF$HVM1Ңha~^'yd@sF6Wm9F A>W4s}&4ůt?VÓj'*S;zd$||V~ſ}8|m۶ErrCR|Xh^*M[ZEfTUaR_~f xHY1ٵ2 NKw`#L1GQd}Zun:DGGc̙HIIA-н{wܽ+MAqu vb+4Y|,*VV}lrD?>=UnM;}F=\XI d2%|"M1P9hyDΝ;cǎѣ K.5wh*oCG:55 5`Tn;&y> 6Ԇ7"v$H B T y:ۑB!/s8p.hS pffmԻAhfd[MK[PNh߾Þ={p!ubccg0HQn]saUV̘1Ѫ YYYFmЈF !Cܹs-[Ν;5N !"""Ncccc9aڴi1vX jmmԨ!(طþ5o}k9D{0XN DDDDbHd @""""aHDDD$2LȢ-_Gv $FDT1$"Q(((@\\@""0$"""&DT!&&pqq#:uDUk׮DD7B:<www١uR>p GGGرprr3ڴi5kfΜ 7x!H@"d2~DDD>Cll,233ѽ{w?~ UVaժUxgΜAvp9L>ś#-[h9i$8q3g}vL8QѫW/deeaƌ={~0bcݺujaƍ0` _DDȂ-[L >|XbA.egg k*3gF.]͛ >T) CBÆ 5bڵP(TӦMA!''Gprrڶm+Uz m۶U[yfG*#DTY[[P(bn)))実 F^^ݻ{޽;.]7o3n8H$N:iii?yyy>}(^p!\rEU???W3+Vg<<<_{e?j3gܽ{Wmz=vssdgg* )!C@*"!!_~ÇWK  "իWcԨQׯ}]xyyj#k( ;#ݻkӠAZ Pлwo$$$ &&7n\.ǫZU@"6n܈cjf;%]#jؠk׮)((piIQQQ۷/>j ͚53HDD0UiѸңo988rrrʽEvt NNN׸˓={DZgaϞ=#" U K.U]Bl޼G^ "88 4iiӦiԍw}.]vOQEHZ&"":qZl+WbĈDslɒ%Y$DDCDDf}v={/ĉhH$xLݻcժUprr2wHD$;;;m:._D7^'"Sv ֬u IDAT uV&DdR&\n1sLE޽-Jsvv۷UxUg&;w.ƎѣG#88 ..]sD՟ #&"""2jqHQQ=3fʬеkWCPgŧ~Z-r9rB@VV<<Z$jՂ52223226lllЪU+\|YgT TQjqؽ{LP`j|e)))SxN""" DGGcȑhݺ50o;v,ܹ777Fppv$x+JdpqqAnn.U&sHLD 0$"""&DDDD"Hd @""""aHDDD$2LD 0$"""&DDDD"Hd @""""aHDDD$2LD 0$"""&DDDD"HdU8|m۶Err^OH$ׯ#$"""2j[ј9s&RRRТE tw-sk׮wANL)yUpܹ;v,F`,\XtuJJJ0|pš~&|EXTTGk׮2+++tԹެY1cƘ"L""""P½{PRRooorooo?^:Ǐ?Ǐݎ\.\.W=dO0UÈ#dԪUK3bDDDDQ-FkժkkkdddgddG+Wp5K2BQ.\ f̘hcL$j"44wV]EP`ݘ8qF&MԩSjeZgR'J!J DDDD&T-@ȑ#Ѻuka޼yѣQQQSagg]]]@6 !Cܹs-[Ν;U'\~VVHDDDF"` *L-ҳČ 0$"""&DDDD"{N?A. ,byI5ە.Z&=/MX-kK^ҵM/6+ӇԲEQM432ܙ&Ѐ3̙̓Ϝ=| P| P| P| P| P| P|oQMMѣG͛7 {^oV ӧOjjҥ[j*x `aa*I?!ŢG^вe N} /555j߾$i;vuѣ>={jÆ *))ѻᆱ#FHfp:cx={OYV%%%I:4p>ǫX{ѦMÇ_KZʕ+`YV_t_]Сڵk8⋗9ZHRDD"""</i999lZzVK.,G}T{V``6nܨtuE)))?d~.o:v/7h9ժXBt:ɓ'+##A4hFAWVVl6ns"oNf#,,Lڳg͕lnTPPdw\u_tZUVVz<ZCN[3foիe2$IC[ɓ'p8dX<-8pvEEEV&IO?~__tL=c R> dڵ>}Idfڵk۷o"޽[͓fӶm.̙3eݏ& WP^<8 ٠utY&IeeeeeeL..SO>D]>((HAAA R^uv҂ ޠuj՘1c$I&58TmmVdEDDhҥ:~$k׮z衇6x=6MiiiWbbT]].&LPTT233%/>>^o_|QVj Ђ^vϪ)٩:qfϞRiӦM!%պ_*$$D{K/`-!Gևw \VV[ dxq~ {Qqqf͚]H^c}3: O0ptt ^/(>SQllڶm6mx detbxLKK3:O1JІ 'Hjr- ^?35JǎS^$h[58!w1|)SP޽Lbt<c۷kΝ wuI ,А!C L ? Ϝ9@x7 M7ݤ?ڵk\.\.vܩ?[u /˖-Sll` 2D={SO=et<c5:to>}>}gϞ'Noٓ ?3:t:v쨎;*99oNK'=؛9 .K(O7=#$رc ^GNNl6̙B 0@)))*//wm۶;֭[h1޳8}.IgBa)?X2/tACӟT\\^xAZJHHЊ+$INSњuuu*((Prr{_k:jjjt9_tZUVVzoϹLgx[?YΝ$X3fиqΝ;d2cL?%Khڼy\6((HaaarcZA0{s0?@vƏ={J:~;]v ZjUbb/_.$nݺiҤIh"͛7OK:.PyYui/G|Lib6eرCΜ9kᗴͦ4+11QYYYVzz$i„ Rff$i…={^yĸ Uhhh~ȟ理NF^İSN馛n$Çh"h̘1Z|T8qBgViii&ĐbUTWW{gΜ9;wn|HȰS#G 7ܠ3fHݫ+--M}ŋu}2!dZN)**8͛D=3lZl^}Ux- _q߾۷kȑ Mv_fXX,:r䈤QXX1Jmڴ1*25J4sLmVCu?VllQaq;VÆ ShhyvZ1¨x^E:44T&cBar#7s`wyQ| P| P| P| P| P| P| xU\rbbb,ժ.GƍSLLՌI50''G6MsQaa ׻|MMz (""k O>JOOW߾}zjmVk׮w-^Xwq9-qթ@1%''+//d-Oɓ'p8dX<-8hS[[ZF[7@s#%33Sf6:%عsgL&y53gn%%%n000PVnn{t*77WIII>AAA x6^q $l6)>>^Ruu%I&LPTT233%8~;"gϞ}5055U'NٳUZZ8mڴ=1X;W_iK,ђ%K4l0m۶4?2:DkUYY),`Z ^r c(>c(>c(>c(>c(>c(>&5"Z{Om PVnt7g5ϛuEmW&-[:z [^3'לhS^sւJF] `+鳍LiBLs8]?R>Xe3:RWnrbEtiS~L:sEC{W̐mw~=_N"‚5_f|m#*o)U=?GGUH`,uuż/tFWI1 h RO:tR/USPBLҮAS*#X6ǗڼLߜs{zdT]LI~RyYUTשC@nús\.CV2Ͳ km Mt`1iCdF[ JFTr=E Ol1:rJ(88XVU?<^S޽n?݈-FGI8SG l"^Ssrrd4gjJIIQyyy_ҝwީ{W}ƌ1ch߾}͜at܉3u8Sgt 5ZV%$$hŊ$өhM{m۶m۶鴤mrTUUHCs+ `Νe2TVV1^VVz_qIKRPP<:tp/]XXh۶m۶mNKٶz[^1 800PVnn{t*77WIII&))cyIڲeE^qPl6DeeeZ钤 &(**JSjذaZtFlٳGk֬1c49ܹs1O:tмydI/^zIz)h̘1Oӧ/^ L> bg!&I7plۦöm:lۦöm:lۖkkp@C1@ClV\j*??H^ffԾ}{uEcƌ,X ???M6(^رcoN:)$$Dמ={9͚5KݻwWHHbccw^_7|"## 6xriڵBBBCwQ[l6͙3G0`RRRT^^ntVm8qvܩ-[ܹs1bUvޭk5k 2Dmڴ;CkҥرZ jժUZb>-\P-ˍTWWkZre?_h-[իWk׮]j׮RRRtfN۸ L fZ+VH:&њ}$nbwad\NJNNv+99Yyyy&>v]np1qD=/~󟊏mݦ.]hzg:O?$֎;4rHy#G,ʾq+ɓr8X,E0(q:6m ~+dggPw6:ϵj*l6=#ڽ{L@UPeez-$áy鮻2:W)--zm ̓6qD۷O;v0:W())ԩSe8Nkj߾}Zz5'zW/W^W_"M6Ml[x%NP;wdRYYxYY""" J]&M7j֭ntPPPr 4H ۵l2pUڵ1֧O{}(Ь(>Ν뮻NǏl6:.4+ W;%)''Ggc 5"#RDDa61z){N:tƍիW/mVǏWMMyĨcǎ2ejkk5}tEEE]vZڶmA~\%Ѳe˔*;VoԡC5n8 2DI&iVdd֯_oQ{ՕW^i' Q;Ν;UV)66V4~x*++Shh_ںuRSSU\\u֩XӧkӦMZnϟozQ;ڶm.dXqbQyy$i޽r8ꪫ[lyϝ;9s 99|AQ`oHw}:M4@4U3ydt8u,YD9s~~~*}2d @;wDPPִ QMׯ:tP-mjɩZ޽; "n޼9^{5^oV, stttDdA<LT|ڵ+쌈lڴIL&ĉVZAP`ǎ: ɰyfi֭L&ÁgW^ՙv!pqqA=_ԻuƎ ( ^CQQXڵk:t({ V;=bccѰaCaȐ!ΆJ”)S:u`̘1PTZmTܞ͛7"""gzG9s@&iUVG ~:ypvv7uwʕ:t( **Jl#111wرcQ~}899m۶XjVdd2|XlBCCP(бcG9rDg:FzzVL&?3<#G} К?'^zzH >>֚PtҥIV֭S~ٳg+V@Ϟ=w^t pmt YYY?~W\_~ !33srJc֬YZ޽6lɓP(W_o߾8|0ë}֬YW^y:u#G`>|85jd,^8w\\\O`/0sLlVVPPH\r'NDpp06n܈ѣG#++ Vu!''*d2>c>cǎPv|Yн{wb`gQFl';;EEEP*&O^~;w)ל~V7֪)W:޴iSe5k HKK3Ê+p- N~6_M'UE&|ѣٻw/y<ꫯРA888`ŊXnN=IDGG_͛7RpA,\P[F^ B~~>ƍ\Lj5O>AvSNdddH^nU*WLҗRIb L2]td2.nfmBD@Z~~7( |Ŋף6lbxd9ɢ>>>>pwwǙ3g.#00/^)p8ݜ4=]t ...PSSr`8YܴiFye:_@:u jZY"r2L7)99&Q^=k׮E|||8~m <'OF~~>/^f͚\;wbGpp0:wb͚5@XX8;wjGvA.㣏>Bvv6 z ___ǏKb8vi&_X`̺,y 2Ic60+Vu[iӦBZh!XB-F&Lл… @|BHH ˵n S60 bB֭>[P~O>Dg }n#vH駟Ʒ~kpL&Ä ta p Ie!::ҡC@"2СC8uΝ"=DdŋkWt8DD8ư0$"""1Ll @""""0$"""1Ll @"W^ū899ݺuKGDd(8"2_~CB@tt4QTT}aڴi8{,-[f0lBDf6mڠQF?ѠAW\/_B>| B8;;[ *")`"2?ou?hҤ`ܹ B@PPfΜ J5OPP}SNprrBHHgeeaԩ B@Ftʕ+!ɐ5_bb"d2ŲHرcx'ₙ3gjoCpvvҥKy׮]8;; ÇGJJ25;wQQQpqqAÆ Waa!̙f͚ 4s=WBg;^}UiDdYm۶ !!!ڵɺ f͚_УGa:u\!C੧¼yPn]=gϞ{/ѻwo| .͛>C~Ю];,XQQQⴋ/bĈxꩧ矣]vhڴ)ϟ)S`׮]x'~ff&틶mb޼yhѢ~ml߾]SZZ"667o^udgg̙3dx}vdddhm6(JDTK DDf-}YuO8!^y7|S bY``@سgXv]APoX6k,?,OV +VIIIZBBBX֣Gd41رC<99Y￯U~i^\ի2J% ?X|r0|uExb<$#"A` R믿bbbx eTݻ}||мys\vM,жm[ UVaРAZ[jڴiشi_~O?%Km۶hժ{1̘1w}{g3f 99 yf?ofڌիÇ{Ν;W 76n܈~,ED @"2gyN'|[bP(hӦ ͛q`ʕؼy30c ̞=[Nݻg͛j*'DFzxWcǎETTz)ѬY3|gwxgܞ\.ǯ֭?ooo(,,|đ=ZI C~N ,?<իW~زe (C%"""zjm1IIIsz%yxxs8p##"""~И;wׯU^~}q>* *J|T*'@"""jd=*.._@@C""""2LZ婩4}f̘l/%%Z$"""6v˔J%:.]OP]돈csssquRRN8///4nSL{gM"881h FMDDDTjmxQDEEcbbFʕ+[o!//ǏGVVqرNNN 葐 X:JT<LDDTCmcl@""""0$"""1Ll @""""0$"""1Ll @""""0$"""1Ll @""""0$"""1Ll @""""0$"""16wApp0sBKFDDDT-|GXx1VZVZѣ3f <<<0ydKGDDDTml6ܿ?}Y 0z)]b׮]tɓطogpJRGDDDTlӡT*ѢE rȑ# G%lx[ǏǪU駟bժU1cſG1y5 ӧODŽ IJ{k׮Ņ $T*lWWDDDdF<~p`~~>W_.CV[(""""Gf>xѸqcj 7ϟ_~ҡU+=wy7oݻw#F`֬Ypttjm84~jmx bHDDDdc&DDDD6 pǎطozѢEh׮^xdffZ02"""iӦATN>7xGRRbbb,QguOIJJBXX~߿#""";ww///gv 111֭> 6.]FY8:"""z-Zlڴ /FÆ ۷oG߾}-QgU.))uлwoY:,A""o+T*KBDDDTkYU:um0j-/x ܼypuu՚ަM EFDDDT;X@픔d2 J?! ""yx$K@DDDTY]hj5֬Ynݺׯ_,X[npdDDDD5%/FLL ,q̟',X`eݺu /"֭[ѣf]/_\.;tӧOm9֭}v;wCݺuͶ """"kducо}{rB<-磏>B@@VX!}""""keu=8qN;вeK-租~B0tP}>L0ol˹v8p̙8r&O GGG5J<*J1uJl=*Vw#hǜ9spU?bcc1vX-:tŲɓ'ȑ#8py̙Xr[$QMA[)`9r$._\ܹs7o4k 4@XXVY˖-q ̘1_JJYc""""z.\~oMft /^*tQ+ k4Vkؾ}NԩSvZ-gԩ8x >\r֭òe0a-Y]#F`߾}b٤I#!!lر#6oތ#<<sł 0rH-YE ֭ĉo֭[f͚Y:4-DJDDTm^xYYY֭|||{n4ia V-/|WbUXDDDDU$&M@Te2٣ VМwqVupqq1qKBDDDTkYUƍҡ  IDATZVa̙Ȱt(DDDDUh…r WWWǏPdDDDD% tDDDDU> ĉjPرc8<UVh߾#"""".{.Dxzz>>>f'M={3gT*1ydKGDDDTY@ܹ;v*?|0z, EcjPVAj.V:n߾-ݺu SNœO>iȈjK.\R "44P*/-Qg5W'%%!888~8v܉ .Zl^zY8B"""jP"** ={Ddd$z)KEDDDTXM"11X~=={"** QQQ_$"""f `dd$̙Ddff?#py=hժU-?L&Ô)SmDDDDjz+rrrBϞ=#** ۷oҥK1v,]mڴ@QQكXDEE "))ȑ#_nݺfoXM`Ϟ=q!GxWn:4hРZ;a 0z{W""""Vݻ 4ѣu}?#GHRRJB#"""6Vs 8++ ˖- >#u֘8q"6mڄ4./%%:$i8xxxfQ X:}rrro>$$$ 11'ODӦMqe <r\,+--L&T*4@`@@M?LQ*՜^^^Bݺuaooϛ'|O*3f Zh~['BBa,j@ZG"11 믿 "** -BTTٖp2WWWx{{&Vzzz"//~~~g}HZ:4"""ZjO>QQQh֬bHLLزI_}UK@DDDd60DDDDh0$"""1Ll @""""0$"""1Ll @""""0$"""1Ll @""""0$"""1Ll @""""0$"""1LlM'qqqر#AŋZt{nL0byyyA,HKK/vލ'xd}R dggDHDDDoXl* *J|T*I\DDDDdӧ+R՘2e upu!<(Okaطo5jB&""ix 'N?={L@P@P<ȈϦ@A0i$l޼tHDDDDΦ &`ݺuغu+p-Q12Lo+0zhs Q㷍pKDDD6!"""1Ll @""""0$"""1Ll @""""c rT+2Zs ֿok'^-k/}39Nѻ|*.\U z41Vp=W30 rxMP\6:oPo#T,, T%MJÍ|toZvn\00M &zu#|wmRloP{ nاof?/^4{Te!><򋍶᪐SG|QN0:\qC9۽I(1qn:N(.Q\Vf`vV6{.w3/?oOx:bfHW WM~g7W=j*A춳HUS@δ dx:w;J,2 KӾ'j&),ƻnuqS`apwryʼnW%wa40p$9ߐ?م'hញO#".\'wNg+;;^NGoJE}7LzNO'n㧓>/C0nBo`;&}) #;Ms)I[>/ay^4YvVffJ*iyEj WWL ɛYXsxndMewLj"= +./=g;/ffJbR/ዩ9XXOToæq5-_%^EC!z{2on< V} $q/ =W|xryo ߸siǿ]@RegH\n+.K7DOfn> U% QWG^jN!>Q8M>u:u( ouV jM J n Mb3`:mT}7yĺC7mYK]NRA[NF4c!޺u[ΠDS3_xO urNjMv%|jWu`h4;@o ϝBGy{ P0嚺g yvDоAIE|Jתwٿ ,߱H[7';cG&ݿQbhH4STIڡTj|9<() &.@u$fR c5*'b;Ycx06 m(d&r`sex۔ЪW| m{ ~-ueG5uM*&ZӞDtA?:$:)?* I?4i1dYqz̵ϧʉU ~ X%%-izg`ld(5C901U4[Ze59譐ҳPy=LV#P+SIT0\ =YɊf13ig7ILHq*eGk剚vl3.GPR-}U?<dXӿV+V-[ WHnyle{ nϔ*Bx1֞pX6i4 JE4tRK8uZT$Xn٥II}]mjwa7y4# ie>sRu|DNk>o.c0y tr;,}IdyLSne؞_YSb`UNL2Ψ*=qVPrjz&U9Htnb*=^ƮI%eVi> !bzl{5Ltxԃߎ/ۑ[[>eU|LMIKM00}gOՋ 6nEw'a$j;+]C;0)P;|?c͛zn˓6 +F@~CFTj|ؘV\!rDZgL˲I$ m?oZXKMLW46VLm+}=pJ, 2E3t}DHm|>phht'Zμ F׻ ;׊q<0a'PG%M\*TVu7^ x A|()WKhuEV V@Pk+ =*.vZի %R{U X{zz{@UwKV.ƿ}> |lYj zRk # j,(S㖤'b*4 2)ms ]P> 1RzU2dY q?a; gG&)1k-Эu$~["u `ٲuO&˳pѢE :wÇ[:$-b6ڡ:UYrUJeldSPSrZBs3Hʩ-s2upUn#% Bk =lnBw [vaR/X|^. )a{N=U~mb"퇑&6)=l:ܰabbb0{l?~m۶E>}p,EE V8h60V XΨ2^hTjL)CRMU.L0~NZvCwz/h?'n TXg=U\hxUf`{c_L,k[;y1'ig'$j~"N{n+X6VdjJڤ0Z![Pp7nƌ0,Y...X|CUeU2Jy"ru&˷CV+x *SʒN蟷jP=@.U5 husOJaxyRE {&$ᕇMHhYz{Ov#fmGs;)ۨBRS~+_?Y}pQQ;3fevvvի8`ȴU.{/e+ kbyZtPP\5)y1v&{ybszu~;~l|*neS5/W+0S/kzJ2MEI?S7tL2&ws>kw M/"|ͳ++SluԪR/*&(e\݆#)tqF7ܜgJٯGwrRrǮg7%ߏ~b[8p`:qVft~ZDž ΣRRe;wRYmqBGQ~ؕC/ʾv2^lt˝S\P|R\CJ*ہy9\jU>6쿤wzIayJ V/(༾RQc=(ZM.nr5ۤlP*u/E..(Er:E@Q>F.1V&7T>kq:鎞e--̃Z|iiaJh]vK V%*j +6N[Y\r;Il6Eqgt*Gu!Tb)rcYGu!דZvŦWqm *}va PYgUX2Ww~;|vT./'*\y^ܫ0JnP /)DqJ ^GuTeǫg5UuHSm"lto߾ bҥX[oa8tִmA@NN-%l興ڵ  Pڵ 'N;BB*X5-mmᶭ>ܶZC(M &&FBЩS',Xyyy3fC#"""666 iii5kܹvaǎ:&9s̱tԩS'L:ƍgAr9"##aoo9{ත>ܶՇ۶pVn[aW*~-bHDDDdc&DDDD6 aHDVmʕd8zC9s 11<`LB &DTa֬Y\]]ѽ{w$$$uL&L&C!]pC СCOZҜ믿bHKKӉmѣ;bݺuٳgA|Ǐ' ͱt0$MToAdd$>#̙3iiiӧN8ŋƚ5kfCyK/6lК6m?'''o/""@DdVX!9wzIIR233 /X&fϞƓO>)nZ(,,jеkWiӦ:KPbԩS\.dee YYYйsg@kYҥйsg?@HHH0E{Fpttjddd:tMΟ?t޽{ӧ._[ni3~xd2uQZZׯ`:x狎ơCpU,>>ѣG7DL[jڴi'''x{{ M{wy>>>Zgܽ{Wkƍk[. 33Ą.<<貇 Bx@vv6~g9R+Q$"27{K@D0֮]ѣGcРA6m|}}!ճfZӧ:M4z-J׭[D||#޽DHj˗P(22?# )) K,AXXrssz Æ ЬY3xyy!<<XhqnƍCHHRSSqܼy'OR+cǎxPn]Xf ,وZu xѢE :wÇrJd2?>xd`˖-L葪5 ٳgh۶-?xf&?>ƍ1c ,, K, /_npL???~0b""""˨رc1cXfgg^z< \BV_>`RTPTkZ x{{DDD5 Ɂ?jM_XԊ0==:=xDž Ӽys,_mڴAvv6>StgϞEFXODDD^JJc~mW+ѥKtE|ݵkWlK.ܹs3c ĈѸqcݽc&""T*`_Պ^zHMM*OMM6о}{\r`BBS[ķ#"""k.LVc׮]Z|Ɣ|'zbbb0j(t:u‚ 1c({zÆ xwcI&'|ׯW^jUZ6 iii5kܹvaǎ!7nк'33ƍÝ;wPn]DDD` *=|CP*@vv6<~ג1DDDD$@""""0$"""1Ll @""""0$"""1Ll @""""0$"""1Ll @""""0$"""1Ll @""""0$"""1Ll @""""SE!((NNNܹ3>,i2  ,$6l@LL fϞǏm۶ӧ޽ktd޽#ȲjM8|7cƌAXX,Y,_<9r$bcc%"""ZرcիXfgg^z{w닱c>00t~ZDž γo>|8qT*T*R|,VVUNN^z%|רW!TcDDDDգV֫WrZ婩өU$''駟j5/^Dhh|3f@LLZT2 $""V$][jڵ 'NԩߢE >}ZrrrL  W `ԨQС:u  //cƌDGGaÆõr"""ڦ$Æ CZZf͚;w]vرcxaȍ7`ggCA,DMT{E}lH, I\ f-)zZ+JZpPxiP`6n S ސ$Ptsӄ~lM 6HIf߯s<;avfJ6M.KaaaFOg@?C3@?C3@?C3@?C3@?C3@?C3@?C3@?C3/RuuuǎSnnz-S[o͛%I_|RRRrJzZnX\\뮻NꫯnرcڼyVZep:1թ[nzK&LP@@;vtcxׯmۦ2ѣ%I 38^ϟٳg+66V)))JMMtha N`>I&TP~~o|ԨQկ~uQZvbcc^pkDu]]vU||o>:@HRdd"##%''_2l"á+%%EJOOב#Gqp=#0`}veee)""B~3J'L칯Z楤())Ik֬$y<hn2nI .l*l6\.W`mG6 ԁ|?/**fkTTT4X@@TPP?_zt:u]W__FƐ7m9stkZ$ۭ٭ԩSrݲv>|׹\.EGG^VUO=~^p~NN{feh dƍ={IjƍݺuSII߯Eph׮]?w\\.ߣU/Ç>,ӬeKVU+**λԯ_?IR||>nhr~pp 2fee{'|w߾}Zd %$$tj^t:5mڴfgx<7Ё^WXH\R'N$[>xf/p(33SJNNVnnjkk}%rʔ)VNN%&&*..Nz7kݺu-& `@@z!=Cj%:yϟr+??waHiiscmm>PΝ5` h7Nr@>Bt>(~cbbiml}gFG :ũK.ԩS>}ڠddx5:_1fff^%vk۶m$IW]un /ƎǏ~OoLLx \ xƌSYYU\\RW3f0:~pڻw}c={Ԓ%K4bGU]]}xMM H`noY?ϴo>y^y^ݻW?u-t /VR\\RSS1BӓO>it<1ݻ_ կ_?~QڀO8QK.=o|ٲe H`n~[cǎ=o|̘1z H`n SN2 ^ -[7A d޼y0a>?$9N+bp:17N۶mŋꫯsκ?I#G4: VeWX:0]V˗/WyyիW+99ɹO?6oެCIx oJUUl6\.Z=|]z+Xo[nZ{~$}o~~XO$^Ɩ-[p8`kСJOOWeeewڥ;C;wTAAbbb4zZgk?tBBq'I宯/+ #dgȑ#_Bڼys$YFxӧ+;;vGZfLҬuֿ 畿s,"m!3| E`;8p8tw룏>RHHo|ر;`CC PZZ :9sFS__FPx˟$y%p}£[e /׽{xtt˛SNvn7^Ɯ9sըDl6#&&Y˾X.f^<;NdXduFG!ܹsr|V<3r-/3gH,JKK5gM8YիV***WTT(22_b -YDooF֐7\m!}8? ʕ+USS}9rE5kAAAJHHy<9N^u˖-… K~/-`тqgK ~#hͦ;vhϞ=zTSS5ꢖp8D%''+77Wʒ$M2Eɑ$-]TK/X߹ m7-8xy>V K7|${>-[Luuu?~V^f-/##C'OU^^x. )--U@x[N 4iR,X@>h˼KtH (8fp 3g$JHHPff˗{m7e)G47//Ozp8j*F0- }w֘1c|ϓZ6+̰huQIgɣX\sԩQL˰8vXeggwܹsեK]wu{3*iv… 5a9Rzƍ5zhaWr*h mT "aNnݔp 5Uph[@?C3@?C3@?C3@?C3@?C3@?C3@?C3*k׮UllBBB '*66VEm8)[lЂ T\\C*==]Mί_%K(224'OSeeeiРAZ~t颍769?))I˗/ɓicР"?Fh Nno4nu[O}}}ϫZlmGJNNl6ct$fثW/YVUTT4h <Ν+{زڊ) `PPt:}cGNS-`5zt48P233d檶VYYY)S(::Z999^8|q(44T3}6      4:.MWUV7([~vC1 `i=3}'4HC;._wAIhDv@k.X9YӠcOh򆿶<_(Peu5w/kEBC[t/j6^s5UӪJB7\K,%Zso/LN.+5VAsۣN6(<4XpE>e~[hFn;.u;.롢c:Y^UUUl6\.ll}O/+kh]<Ϟo")[bl*Gq% E\Vl]:z'+Fcz@ ?ʟy%n}zW%e. [FG2-Sk*66V!!!JIIQaa7W4`hȐ!z7(饉cJLSl"á XCUzz*++׿Uwq?~ƏCq7\m!Fחr՝1:%%%i͚5$ǣM>]Pmmok7kFC~Bq#\b@\Р"͝;7449FcڶmS__zs%/R[OWy9`~յܲLr [1E={bi{UUU)&&Feee~{m[۶m[۶mzU]](CsW^Zh4^QQ&_yQ%)88Xƺw-S_0Ҙ۶m[۶m[O{ٶzS\9NߘTjjjIMMm4_vqfa#p8D%''+77Wʒ$M2Eɑ$͜9S#Gʕ+uM7)//OІ |裏>jt0x`u]-Ҋ+$I//Iz'K:{|r-YDzg4b7Zhnm[۶m[۶mcs|@?C3@?Cl֮]X(%%EFGrrrnݺ)""BǏב#GeJK,bѬYb Ǐ׏cS;w֐!Ctcuxn[S߾}չsgi…~=ָq%Ţm۶5ջwouYiii裏 J(ؖ-[p8`kСJOOWee:ݻwkԩڻwvء3ghѪ5:߿_uW>s1B:uZrzatoҥZn֬Y>@K.ղe˴zjuH:t֮]ϗ-[UViڷovt}WmԿqv,%%EIIIZfnӧ+;;tqIEDDhݺ덎c 555>|z)=㊏Wnnѱ:l/;ctӹe3&NΝ;^00YgXuV=x^hْ$%ݮg}V'O62_`;Р"$lMޠ orrw޽FG$5شiLiȑޙ3g~{ 0a#BUUw̙>}xCBB_~GCڹsgz^Λ7k۽QFy9blh?}     k.Y,}FG6E`J>Z8qB6Ц&t;%i˖-?9 Uhhp`Jfbi4zG>w۷ҥ&M:=sU=4c nߺ5{lEGGk׮JIIѮ] zړ:ZJyyyք HݻwכoO?T'NԈ#!I6m})**J[nՍ7ި+0(5gΜѺu'I4iyUTT(44T }ܹS*--զMTZZ(IٳM6iFhK.'Iv]$S'_ݯW~K_<]t{ڭ-:`^~>|~$^. /~gI  㙮 X?)ޢ]D=2= ^mSat%gii^mJm}:KA.Sm,| V6 .NI;~x^NeT\?ɓ_c}-.<_G-??MP7fO'WLpsv197O;ǖ/0g^kn2>j6o?+$3?UM>2<ξF@66˃hiO 3Hm;£"ku NFRM{6 L(RY`4~_ }0·yi.>I-}7//}򓟜oN23࢑Ix&\mb Y+g&hި "Y*7/Մ4l ^x$)G*YyJdϠbS7g8t,m]J!`.*쎄ٱg(w;"mDI$/{|w&I/4gϞ׽nZ֕47K2bPe۫*`$?AvBOV2H'(VAľ(%šJ7NS-f_SSaXmBye53NLKCğXm+G *؋dce˺x8[ni:iH @o,C'=O^dgXEeJu]qF5)Py^M$-N Fs#V\@1Ȁ!VXf )HJE<gIC9~f͘w$ ǥ!`$d/{qr|#<~K/5M<3gLyѺ0DB<8\4Mʦ5dt! }ٕ]sNq_C8cRy.!Eh}uzAls@IQւx eo\Ds^(üMݤ%%&k*&?҄9WZ-a? tNS'Ntj7pL%+Ns%'3@y-4=4lR) :YsƈSI1$*_K^sJ!zFbEjٺ&-c#"2v,Mh<!ViǛo~~*Q:q}j+^M9UFyVn7[w)Eȣ*Y~vw=݂k;ѓf |b\G-rLTKHZiUb*-̲.@FRAr\P># ] rs22 :#Jʇ? |a@c/d@.8J쫛. ۣYdLw@u]zx'hTYSpC-`& PX1`!az@Eq^>G!! N8&P\S*K3N2x$(;ݱQE4-+׆eo|Ctj^J @9$Ci=%ZQ EPTb!BZs-D+xR@Z&1QVATn*dEQAĹp%Vpsa.]QL#DU[gⓋ}f%Yz1^pWܟccc`a!U<_kO|{ .(n; &„.Q2LwcI HR*B^ZJ Y!Snjm*~K] 0mbك@c6hKyr ѵC)^N΃[9>&In(Chu?)i?8b @m1reH[L5p>`J5՘3J EhIVSz)cŸ6xHAYXzꎚ[|Jl51FkVR6Rt`ŤfA_Вu r[Av; s @%I<""nC^1"F'Y+LC%;foBLH$#WJsO+ }[nXǵl"Z]i~C1'X_Js.MI2e rX0h$FE`u7,/bNG2ܰ6;pNר}5'v "vgE\MJų)<<yQ ,؍ʨI#\lIr=gXb-52ӏhxt RZĸC٨A^.g^J0:t7a':*h!%#k\ȥ ?S[bV 7~ׯYUZH1 ՐDK_E> p0HnVmNJ3d 4OEmۄT.CA.zdp ;XgJ_ⲁtS'`Ճs{\"9N  :MgՄd,Rqɂf!~RfCt>*>We"ILOzVDPa"BF Xbabu2 3kI0!>AE%FsR bɼcBQ c%|i}M ?s"8] B"14 i5uR`YoGSankM9+ɦ:v0ie]t#=Lqg颌ٳgO )~3@ y r$n=4M]. r#1"g*(3D+K.Q,ŇI֑{$X| @S`f{QL n:HTn'A%LVNljPU6]jPK ^ Cyx^ `c42,(ɺI+3P:%rQTtJS|OL`KYF7)J{z<{l p:z1%[cBPhZ WRĩ,uq ?__I.%= hؼ!c-!"F!S(@vm*(޽r@=,Vw|)&c\W[bhe/a+ b"ߗcR4R2.YH@s^egOEtlN!/# = 893_%\ -t:izSz!YgKd:00\ Aa*v͆ p=\x9`Rq23e" fm7jfPN"ZYh'q[-g>T[p-?Zh"=9t)& . C'?%}WH,*/ZO`0 RךPffp]V$2  5Ipk:!^Sg8B<-m!=6^}9*^Ubr?`"bJtK|L¸dT-$%>;WaӴt#a6u6~t#bW mmF 7y@~m=) ԤCAkvx@#X$aPMB4e. zc,_7&tmsN3QXQDs/u'!ssBOy(#s%gHufR<0y!8#JTٕ;q*!ҪR2-&:k3a>_ߡc۸_ /DPg|pt~!E)Ȥ7M1=U4Ҝa{pɑq~$rLB%j9gs\ y Yo9Ƌ7bd/(6 X),05yh eAv)e2\2 gӈ٦hg%K &Ek~t}K/H{>3n{wB z?'I\h>E6Y%pTeဘ[DW@ڍ9KB.[x&ӄ#4k.PnQp-HR\GTmErUZNVPcd-x,0 !+F+wWM'Ҡ<$J˯n8 Rll>^*{B@G {9J @ X$wŪ&_2dF-@a2:&]+ TmU) #Qn .=ی-?N:`, ( $eR7C*G" xQQtd܊BA2s6iH]fYΧ${ʊVVܽOi΄2Gxj}di:^DA%%H# 3I%hy,ti-"wIa2FUs% Ԓm&L($ fVºdվšz Tb MZR f$"D͓>Yl!rA-ɀgl&Vsܻ~u3fB'i'hĠ{ =tAZM:z(U%Jvr4 d%ËOw Qb8)>"`KЇ`kd1f"/x:) >$Ӭ.#km1Х]넡A~}T^ iQĊJ\*:V tW*C!XGQJ) ٫.a϶K`#t W+Ql&xlQE%̐g8#ll$6 cR+{M82[_4h\"9] 0)(Dkp`x*rԎ2_U#w},%DY7k:G)6.z]ε5oKTr9v4)nI W9Z @ӢL}('D]_(PXƣ{vzy[8b2*,rR T|ɬGdwFw2rm0v[\jI@i颯ȹسqWNa:b+iяe Io5NKٛ Dqe+ce2 Y6.WnǩUgE>N=۵kj7{YӐ#qg@@*6H\E~Eo88/q90l68&^fH3'$%Ѱ| Pvb'x|@TT ʎ= b_ ސݬc%^o. SSGvo*&{Z9zWY]\l(3]U^?VN>}sswE^vW]zIw%_u/xeN_~x-]se\}٥Oߺ.pȅ\p|۷/p5 灁ۜ+DSݹ#c{}@sk1fD"wcc*0(K׀+aYZav[~G%4ʧ Bs\ Ibꀧ?Moo:m߼ʫ6`4w4Ǻ+Җ a"lc H lhc^nDg6aO(y_se7]sy#?pwn7?{|w=ߓ;<~l~|O]oo~у/?t '@v ={9b$ەqԌ9. ~XJ`}C;<9o3'qyEnrRhp}&[jX+ ZSR.ᥲe M1KJ1g+Uq)bb !z\v_5F :lGƙ7"1&?h"__l޻!tiS{0GR3H:VHJRM_4 n~}m_p蟜gzώ>$/IF}(f!e #D]IO2h6uz!t% &Ĺ#o:vo36M BlʱE֘ "]Qn>c gxhSDXO zfԇwM)'OO Ͻ֛zJ;\xn[2AрQ87{iMakZ8'f4N8L m"-elÂ1pSaw:,nh j7D q詘cyUVyjf/73L ggpS lY$ xVԱ%w*} O&s”;n'雟9#9黠`Ɯ,$ A] iVC׳m @IN9I5otN_']niJN>&kΆ"&dxcU2L#)[a M`F.%TJВB%^w5=hʾP(.C-_A6")lOwVw RI6yG>M}N}]"=FJ\ $1, /Gם,JW"1Ƨ^rGO]~vfy|ޞtT1y|-}2sAYl?e%bkD<{A`R4>}b/<ċIj.XuMu > b&`#GIeiB;vYxP_(H v"T!*ۤ, OO(~SO,@WKTqT3غ0"vȭ5M4Ղ~}*cs2قKgQ'VoZf$dxY~aMNrT*F{,jVq𐂘T>>q✟?}w=y詗|ݷvõ2U 4Gg m{zVp%+zV ;pCJ"aJ) B w@QXъ:@=(˯mп˿ ˜MO=-,fER߲(L&:81A ))L9"zUWНȑ:}&9UcGQ3F]ѐ~lz:^ha~(XI!SwĹmġnSJ()G/iX&Zߐo *ev&* PIaa^BS2y^13^|5 2H?JuYOE?C_wbT"؈#S0b)+D-O4j l`꿜G_z'o$Tf#iV(Aw/hKb"{uz;E8nWE]zSTPʺ% o I>;ܷO_UG^+/W}Ձ"tm=*:&{oFG{s+an[|P`!jVO5<:??;tࠦخ"0Z1")/k\$gtG1JQ~o[8}N;AnQ3&b!r޿3\{AFzj4(_[bGvy`'N:,*!HbQ2bV{ͤεRFtDWu@U[^\5ӃH\-gOV)o"GOo$Y1m>qVld `C'r5 K|.Ox2h {nyWypӊD-˘at 5^[=&hjMlvdt˹WkgB3$Ncq |=6XS0Wu=hФygyލi]R_$Z%M դ52jF Ih\kLMJT؊8t3gHwcAXI<&g6gjdvI )}xK{oGL)<8oJ@㣀$I2H  הE(R0֬bd(UlAy+@u&`?yb?|$)-yT2%|On/Э 0@GZ5+nue1+*Y2lP\ɚO8C+cøCdFlbee\u~LrW G?Ӛ^'-q?|t]}vR^L!)CGߠ6](Dݝ;B V2J@*).\.(6c㡭9!^yg!7j{=C~JUgNh˥]V'O a,XaR8Ϩ"=y)C%8U&G Ѳ: 5 I.У Itς"otT mF |ofk?i]-;c!8ˌ`G2)ECjM)a:"7lj*C=qs3Gy )]%&'^2H5+M\FUɳ |ڄ.r4(r9R*56EbXYlLMa% oyqY:kWdL!xq|w z#+

#" = Yy5vW-6ͱL*Es)5l5uP_b.\ RG͜B9ocr?q5 UMlF== v׆t*@ZfF'c|v:cPtpA:a|&`GHq)>sРv (GYW.ҽ tpF@0m7D(#bA\V`qvl!JH2B),02I2]a4s2q*fVJrE@\zVtߓތ sgu4+( ~@l^vMʉ:{?-~<B8vR񧋻Eo|=!:P 7*й֨C>/[QrHd 18R3[-xWP6 '(h8Ay.OEmʺQIÅp0φvͨ#j(ȂKK6h؇PJ{R_P "z5W)y^,!AzwI-WFqUP=0tr O'^YCmk| rR*R3hcZ_zS^ d&8rH8q<0UumC<+ּ%#}jw'FFxUjDs&|@ggq \FsQh7ȫwϿ>~FuN;٧gхuvS0bCRxEL9hC;Nj pI>X2C{tnW F31Kש4a3 \") s2d=ԾFAgNBpLBEݻ鲙XL#͐`7%)ov|9fl[iܢ&4s]ǯ|!C,f fK5k<6۽ѱ#TH8LBrB{ +4.pzA|iexsRbTPJ89NY䔫I gZ"Cc_xॸwS'L5sx#Ao <O0P-fKH@̐JP@[r>[స@ohBx VF^D̐a; hAӣ>6AjO:L'W^ J - Uc+׀~7G|MYcc=뤚587tnN 3jK}sЉSPC+v PTI@`rHJ Rl"!(9 # +}:йYvMJ0"{Bb*EgU)5zTEʨ]%M((~lpe1CqqMl']ރ*|vZd L!G"b y?\0wz h YFE"}1 F2T`$c74Y nK a};CaH™tf^hT'm }ΎgO߱yƱ́ ,WˊOv,FỄP;(M7m!TNMor{١C/ ޿d?y/W9=Om[>|3kCiB'ޟ_D}!vY@Lm4.,YK" Āy ig *qM$ņrq"aWIѢ?ZHI;;>cN IK (e{voT \\gYT(fc=ٷ ؗݧټٍƢr-ؐ%Vrk5`MH_x]־y&wzz92t:7MsV)l&sΙsˍrtϻK^yk<=O^ o?ݛc&ǎPÙ"z"1=&4SC+?)!+4ʥmz4wu, e=aj8bo52'BKs3; s5lj8[qÍQ(1dc2n9 jAewn/yO8'tοÿ zRŦt nq\o^ OS[n(d鄛S&SwMܿ.8g ,&rK?^7)/yoЏ7~36%&0ݴ(,),Z5-XJb[e^E2abNX\RQj/i޲l" S-rz>6`I0>'$Ei,24Gh'Ma()3Fh97b4,cۃtdSiB!`iwSl8V폌Ԯ馝N+.&P$^R%% EEn$L0>kw㖇_xtOszҩ 5l@$ \LJ%t V^dUj5^9ʈ($Ưin\~#aFgÉ8ht733чVg?O_~-4m$EC}cqRFJ‹U,6*NhQ:Pzy*={)MJq;Pwu - ֧ٯ̱%eJ_|2VD)*`?;.w9w)EwC8qtߦ?#]TnH h4ve q@IN;?/IhCrƘz)Dz0cT4@ 8@';TpVc53v}>Pb-kC!z @{ a,B0O%>jff}Oh&cn:n%lwڻ$7ՕS:59tSF+!teWaz_-T @?"ڕ*l]]yT^woZkTR!B:Smx Zlۂl ~&nzA(7ఘ/d 6ŗ>IW@ O3e i_ zxc6ڕhVjY܏Io8QKZ t4%1 G=60hZ ٷ0:Aɉse*P᩵ʸ ;ɜ~֜3/G_;vݹ/JO#Ӂ˹VHQ 7*ի΅!Y*0 +MW5AڲVA+CNR0P"B|eSaZ:+zn p WK3}E@)Z|Oـ !L$xaGM3|,$nChc߳bw{..yw6:-yH mbz޾}_yT{cG6Aoi#_';BgR-ZwM+<_MW)jhP4m ݠ$STh1c =sjrm BA}F(JkΡ E#5U) ?Ȑw*/~ 9jє (ՎnSN^4=}/π9c8,Kf)f0Mrc$_x\{zǴkvv:-'&5ګ 8Æި[KUy" XNvd,n$Kݐ`6_E@V$U1mM`|e+6PvS/mQxf6 THHn8}<ʖ Ng^pFj~4$<5UCw%Q!Ո>Irb4I;7E/*ݷk$Ox]fFR΁$S J$FHQ]wqB= enK{մU~jVsMhmSJ^*z,'3/-a+pk L-^J OidS[΢c΄;ΛziGO،u}82)HG1YJI{[蹊9q63F ]<АT=c[tDġ8o473'4g?HݠSt]=<*qRAo Йc:.…XUn\aʐutXYW5*_|Dwj˷%֡}&9iU7YIq%`WߏUq83-Lћ:ntKd:)Y9`i>L_}3vnƳt8Oby<}?aʩ0ZE]L Di8y1T~1\!9;&2ū;pQtNÆb`1eYmjh(qlOYfPҴai5{>d+iٺ0uC)Pz'q7GpjƩ1}|8ۄ=}eOIn a{S=HXOo$=;CSdڤJe 9w~DlfFm0^$1a4RD0[tc#> ;施 iI "Gл2.Q@Cj!|)Fz MDxzaj'#![ᆪʀ gWs scW ;{hV~ mb~vڗrHa;l.Cp%tq&%NϢB~]8utܨyB̶ 䯸56ujIx -n7XzW+g3eJfcD{NPItLAn*F!ˊt&G2lÄ{?+*?Φd̹VС1_PNA-6Ϋ^Ar[>ZWBX)LHss9 VA zט췌}=Ĝ$w*O P|!Ѧ1k +e8NGHD3\vceCle`s֧*&0t{].\@ '0R n2U>2:q}2N|֢%/"'I2TwK >+GBxbzͶ)a1B|n0⿞tsN]/Z-N u{>;7̋hC h{<Q!(5fñ$weTv`Yd5pܢCF ӸV$p⿭L.dDM&Hj70I؏Mnas̓, FRhEZ.leIҋUӶې*[IL 1ԅРqPDCKMWIŐPZK'G͜&MduD; bi@NrBӿ/*UXyD'Q2&Eh?Z3v2O<^x{-ON.7'VLg75(ĕ"+Y^96@>|Y [kP_/eeI؂'Wɢe$d ( LT旻룜Iʺ3P E38.Pp\k$kV$p4J T$RqژW~zM$^?\J 30uW^S՝ےB?SŦLLq让W K-P[ICSC)^%\7"#ZhS[rްp_8aj,&%6Egƅ,j z cӀz,.D"2ᵈy[wT/ CZY|u;U#e\OEӿ2OM]4 :flJ3*І?#.WMhJ]ѕ#U…)GbOE~-~ t`LvU*q6f0Jp1V9@jA s, (Cٕ^ d'l⬂i&ǢDpa"Q$qPI}Zqь#fˆ_džcxE~eӑqt2y7 =K̎ӋV/f OSú"mhdŒk!QAR}U>ɋx/Xh~;UЛDHJJ!d:%AQ."򻐎&$d4`.WJASq/ 5`rFMpfY1AY(r 0s$s=k .>;y~!8׃$!r[<ʩowoݟx|PIetѲ( c,2} f4[YxΠI4"1B t*[,j D* [ `*l,BӮ):30g5KxL)|Á8rɑsP BAOO.; St6 e)`f=}5빨:mn, b!/`ĩX%Je1$U~l> lcϨ1G"f(^@Mb?lqϢz4ȗm0EޑaMpŰsƔlዑsM_k\!On@RmSiZ[ `-ݥ֝wczUFhq$٣BS|||r{ܣcUYw3spOD@ h>hk 5+0IG2kR:cb2謵-#"wVBDޑ >z*]LG(_>.h J[4ݩY>ᢍ'eS,|$}ц64X~?=l4ۆθ(Ԏs>[̓雧7bH0ckֶ&; e2Gr4BTdx²E~1o.B|: ijgyhS< u֪$rxb pcJsk Tquy[ÿy{|n߻G6nߛM}׾|돾^pR:D[Ln<OӤ 92$?0c(ԭr** KLh٘NSjFD"!|0B?'D! k<'"T,+"1(tùHZMhfI;#gopQ֒ ]qs)y"KMi[zb%aOWUIp-.5̷Δ۴0X.x}jߛ;g4\9ΟG~oǛw)[Sl ǡ8Y`hbh}rJ3{-*n kE5,TQj<={4}r,9uSxT X2<@!]1/hф) 8nCxˆa;i˸X뜟 rXuJjmj(v n>ZyM}婿s hvM H߶Im{ck1ű!`.d*2XF3ܤ%1#>~F b?%B$C(A$q9LgD 1@.) [Le bjT PR`;CȣZf s >Khp_'^Iig6Vvu%Uw^:e8ձcLY01#lQ|Kn4wW OP(G޿Mq5RAU%ȷ.D|HV~I(1#X#caLC| R,J7D49z P2gl *}*]0TS0y>["BD}|Z9 v?uꮥ3g0j{/[:@=\SyZ+(IHāh;nӋH5S˷ǟrSu"tS1(S w:F++wm¦P!6pg- D,:켢F?*>R @and#,i,ޢz4,)Ʉ#̔$R^9i$ANOǎO?샶Gp"mդ'vy$Q$!r#="66*f+ S}T^7[b 9:_ҦS]w|?w" >CrRn_ԑمhߔ\cr6 1i^7䈟 Ee$S RTL'լj%vLtEM4(CX`3tB0Α|K.8ڇ^ yLDLXcY˸IO='#` Q "GodԼ?{(C bݞ-)﹔{I&@)_JW/yԇϞBnVɯFR W^T/'IXe >65Mtނ^4)[➞^t=®FUH<7ۊǟ]lDp| К`1nՒ!i==%g-;y"X8vО医-^'/%*TF#l]Zf|m^2v, jGD{[ng{dÔǠbRz_x,>$j~=݆ FrkR3Y 2*0J8SFDŘ};ٖi|:*'1z>=볿(ҷE^޴{𰥮X?utϏ0w; pfVLEÍBD;_g҆wݲLElc#_oϾP@@(%ddRpPsv8ĕ~9U̚NHl}o DU&ՑK'QΌM@6B%;ܻ1ߎjʐZgމ $٣ΏaZ@zн4|kf}޿ ]:u ͽ9Mzr\YJL#[K^F>M#˩+~ ) CA(zRF!y21 /ڡt=rd9x.NHA@Pș4[6VXPS.+qۀo }U9^W=o~^`p1VϲE n2erUj<JuUFq%q ?ڹz@h{I)kOeTT˽iPɜ%OTD| Mθz1:Kq [NS3 >bzt}kɴ~XF|ߝ[T-iVe)@'=D(B.͞is5ߛW/drU>]&1._*C !ȯ -Y#`JلE,Ry:Qم<>[ť&Cs&C`(^^PVDv(: .A؄")GH1yjyo| gG ߷{CK3S)xOq VmX rtҒ2­G}9ءdـm\7ҝv\ mDxS5ķV]{l>k7]Bc8Y0 KFVyJNZ@%9sTe7Q,0v)>2iV3짯Wzн|eY4OPN2L$?#?yc_͸k䪷y>~~ȱ=yVo37d$HICf3r߽ L8%Hct(3l;y䚷onMA+ &s;IfˏU]kfDDqݒzBrIZT'4fs%e@U6F 2kH#]ʁ>rvB) سOԡ3Og/Y9( nV3@K=dE~]|yhZPPmr8U/J_.ewzĩO]gJ^Cy=VD!.UO1,")j>:.L_T $0)l ttb5p"o#0&:U>5%Fė1LA6a{Jɤh~S wO8wS ^N5zo%O{''9 "\ȥʼRn+^{'vԣvp]qNًP+qC[@ϕѱA J1ה~#*{蟪Ѻ:zGʒIc7r6| 3CQ3wG9:c#0'4ﰭ?ݶT&LO>1î@1 e_[>ʹdjsr`: GUsQTBEj>iXD)!r@\UG홑xbS<]ozat5$!ƕfZFv9fYt- J?) LbX"$(A涺\(J9&5+x k-zǤ`r&@qIئ@?>Ͻg{#}:ajRL%v!3U79loCȋ?cɎ5ޔ>}|߿:uw7CE{IO>x&B @SSo=uE !̓jl~)}OD^d.@}UaP&E2?zP Wix-2 @q@z$4jk >_PN|i5l[8;%\=FUĪx)53q% oẟ>uG1~r%84[G28}=CG2JKr%}7*nQ/ҼM'O_-TDB:a$ z[<0*sgL , DMKc GKY.(RONpơfm,!YY&~ˊxTRyJ62 gM4I.ZiE$αUETc,:]>}tCd6!dO7L-_xР5⦫%ZzB`]3.%hZ=xNR9ީRA2t˿n-Y*Lە0lQ=~4.Mqc,@T"g'7 @m1s*}%l&]<U8#Կ92&4ZHv|>7[r''C{ɻ8sYԡoG!eZc+&9 }ȶ!a() xPSKVHߴo5>-tPKԵ f `0yi^6MXC1MpV C3DQr.~ee&K IrE.豑['A$",.8{5!RƛQ`qX zT/K\}O:P?@cԴg|ջ bG)L;yWw$S:}xUZ 3LHW1T2xJC탟v" `9n IIK(P+O?y{_ukJؘ%E5: H s*IryFy)S߽C"A"jCo)| C̚"o&mi^ʇGru(eD{p)&򤁩^wxݿ9}ǜ) O2=y-]RoWAtXMjٌ*}뾫"QNC3O"wzS݇^c4$>!rmh)%70XdTOm(4#B4W aF)hU,(%s4o",Вb*2B2׾?*R y2&e7¾dWKS>ޟBeK;@xw 8K}uWj)-xbP3@.&^*<{ XUHr,* ~;ӘX~Vza1Kbqh0dJ ^(ud&@A4ѣE Z2V|v!'Y+S%v@-Hs觰Hj-!iXbϜh.[svx١D-43@ͷsBۉ_!X x>IV mF\ l #E,MqTS-νGEB5ll:N/!Hao:oyϮfĉXՑ4Jʣ[z,| )6Ҭ0x5pU/sXyB55AjgT`䢍sb) [˷h%kRY; 4; "8zoWD_ LzȰ7#^ϞHfđ*tĠVbKERWfl+'QTO?߻z"ψY4Tm\|NKsb ٌg)eϤfP/yyOHiG*@Fk[=6Jo-Tm <>fcuX\nV_, GNt:4evi km y~PG_H.$jA@"YVp24"wUks2M[|4'ՎRgC!}81Vӻ ,Fx O U@~*СB1i]}-⊟[FyrAV'×_ ǯ3tg;Oǣ}$=i\U0ՑNl %Tt^׿CьEhn '6AJtozC__q5nOm©Rܺd"TkHTmuܾdP1hRkG=$Qh42-\Zhf٢ "h@l]4\Nj@"-(=*.'9I1̀c;rOZP+1"3$'O/ UHڱFaX38gnKsMoޏtͿZ x~`ߋOyUyvY:k $ISsN qn.{1B@LVhs{K[9Q"n .LF/M)ߢhzqJ+&3)"˫}#+|%f??%oʹ@} ؉nfqrDmJQ=. ޝclc[`A0P<7v=&Eî]ow]qF{6&i+0+AV'#EmYփ9>x6$8"aHW^'=H|v /?=zFWvRy,\tE_)<`r9ߺ=jm~淼?+/ZӒC;Hy3Jwn_M*m.K A@^YsN) 5a\0%1zI:-8Q몜A(ГxaERph/zSia`FHߝu>Dy~A] fdϞ>:6֣^*pHi+d̃ Ң/z~oN5M|l4;tNbsM9޴yoGZВL,(;4x|zCy.Ϗ0~(R\dG_4,0 {" -lpd=it%O>Uзozc7=+3,e"TKeא&&߶9i%h׉/$p.~TC&A21e#"^!IK-l}fOu]1~Ó_<{ܷbk-:x GiJ>7j5ϋH`ѓIt (DU6g܃9`>RY!&VU"(Sl9ɗ3)pHˠɨ$@oωq-ѶI+C…,\g~_i *ZYhRFD">Pc@Tr6D>:|@A:oo8;PʄZQ'Ι#u\譋 "bVB҈ l>> XA)IlIn#3>]!nTCb )$hwՔfU'6v.%|5 2?eX#Iq$AVG+(:AG$°Eclם{fE6<]qڣI]M].kJt󛏜p3<Пx ɾX3>;ZېC %(O![̳ 1,o]!8g`V.lJ0?O,c@Lg$G. 3[cZѱa88A莩ԓ |(Ϣ%ثdYB&bǵ*<{(L0 `':vcs ˦@r2:g 5AljNӷaH#/5ⰝUTcUo0eRϘbgg6`OqG=r"=vdKlupX\`hl /Pt[ W$NjD F~jpeRlUQAJ86j#?XЇ! kH~v3#5Zi_uOc.zjE 2-'7tE'˓&ir筷'SH6\Fz:9l;C7ZD%i}G`مY%.|R4-*}N!,t6k4KU; ;[uV?"p5Bx\k&3hƄ @X7&"A٨ , kctOcjE m+Q$Pԡݮg|oX{/#j7MMOx*MZ rΙ%QfY7J"Ht„UIdZQOԬާF蓖b6جߖGevEɣEOy X M7KswaABO3`fbIk%S~5_m7Usfz8n]bxZhoo{|>FdYTzHGdzuN^(X9馇dQPU1dJd (M$-.M_B 12 u`\X YU38>,-L^յ6~GB SW2SDe!YX_ȧ 7R &WleOzbM)R"9Ѥ>9,@\;b*_KsY/ԕgݭZ% ie?}&ZJ oW*i! G9'+?rI#97Tas' ]eHoL당f?O` qZm-@3}`^v6$?ӷEYN~eOtPI.HKz$#GrniG"^#u0t`>ŋ'N^kNfKt]E=|mӞò6JO IUPm*Pplǹ*We ؅ş֏0 >tN$]@c7+2hZ&z|2*Sqo=L_?sboUQBO} tR/Kd:<}` Y*%R2XD`RaG=$̣2XȜTԒفRFf˳D52q#i]E>;҄WB+@w&$¦gOgVNq)_P)ɐ}IAvȤ:ufk$AAJ<οOj)iՌ*ۍl(!@& Iv:) ''d3R}j|`KvKq- DL9mbծ*l>P33$;/d)%3~ V<>VZg=2 OJ*̴Rv.镶H&=jAOG;@%?S)v /:$ }l4^S4l. \qx.k1X"芫2fA`FV1t^P>]INo<3=چN$_'\hqDd_TKsk=rm~٩XzhGRGվ1i ,n2FZ[*ۜKsjc:1hX.p}3Ԩ"EF$\4n[A g։7 V,+5, e+޸d`-Lnc:F:Dw+jba-܄$ñ)”>xē:XX%9nӓ >>wFzR@J-9Ҥd&lucwt:N &*mN v|]%RIKt`$jU~QkHjT73yt`Vgr2k< Z̍׫٠5Y\&Sl[S:&s8A₸&OHI\&Xuf/^J6Jcz0WL$)<+f%7Z?S0Z_}[lbYԍt(P~=:qqޥԗ|>a2얕:pt|Z z2\!27(j 9X(5>c48(FE2Q0QR2~.N'-)3O%I尟g",FXDLG!{}Y'=KVUjm 燷y%(PN1y*4Wcpfo2A#cXOrVA87nI @y~qΙ! 䕸*CYc&y-02Coĩ^ț9Zj3`U]S,i5$)ru2_J nQ*gKrxp~H_yp$HBϿtR-Aw#0p!*=oUx)sM7K0Prpx/vnk.+.]o>?>9x |q/].o״1/ݿ{wXe葳)GA̟42v"> zJGSh}fb2U* Ú^TabCKfiqy_m5D_͡>JF>lhyICMr3 9"6+ l~~mSO7.9~ױ?-mWjיG~oØ?t:u]7PJWT[jϓb5&Lu0PHrdQ q=Yt4N49Raä1J {!tIpY2z X1>*\D@bzD;1)hMDXawëo ƴA1gV{S?]g=wѝg~'?/ obK|E2vm_prl`Sa1.1iP)-+zU2P$ ^\ ޱ‹n(Râ.!ĥ 13ճiLR/^1Pc."y\NmH@Ok,,:T1ў ;pg6^kf7I7a.(W7XK|}Wuk'gg.z3 6Iv=CD$ȵȐ%*'P]-f'@L }ƚh-T*efu_XH)us$y,ՋC{ }ʩ8QӪܛ07>9jKL-|]|GswWm 50A`\%g'iv'^_joͣOt-; fYiBJJ`a1b$lh"Ps]Kbfx9.mRtO}h:ήP7w*`);2f0%ۗvR36g&xI_܅ $,JR6Pg|HLnhX¨\2IY3;Uv챏-: ;Sяa2x&]'QG+. g;4'Ek8Ւ ɲ؉dR{^-DZ܋`o3F;C1bwڽSM*wnd.֕he4#8mҏ 0q|G46X:%r;s7~k-WslNZ 0% 8[HaHjvA_:v͆e1 bXXJs:`]NI?JDgKW0JJ !?4l=D"RLP7 RH6Z+%8{n|{ ScO[;&27ղbb@H\,`YB+lCH8p[)1-eP=zsAڌTTn|k“Aڮ?ز#2V `z+}/- bk:ȉ?|pr]UmE$E!*;`%/wJՠycYbn@(FMEh@gQK"UYm*fxK/V(Ɔ"K699y(%yj|d{ 0L/hTZʇ<[>z켳:bN-bO-Lja{~ a %FvJUN掞0_V foژ6@'<&-z$ւkLC}U:B{bhTDZtqeF-(Pԭɫ%"Cb.8e42;@ȗG6:G Fm6Y{p̰u7;=^eErf%s^w^HwIPҤ0% ҬBy{z9}^]b8 1^.s]l|NRfrlR>GY*+6(,Fzhҽ^s^kjie ijiJ{be ,B 3!9p"PNgu*zc +Nb*ƤDՀ5h0BӔ+KYQZ?K#|x%/Guo:ϡɱo)TBo[5ŕPjM}+8 $m\Hzږ>PDg Bf v濇B fZhJF(r US꾑W.ݍ! `9;4zJ Y=I[dL'菌5w(t"<esmhYw_i2;#B N S)V|~M<d-!b9۷rJ_r^ m0qK,n'a1EkSYJ @+ž&BF7^}O%oS#А9.~㻼Rɀo.XbpӘqv !wa ٖiPK4taP|u_dk]ȋ!2u%|܆n[H8 jcVr[tSm9R]21,h5V:o40s:L2}3Χ UX3ꊟ8qA^q87ĺGNu:5̭iޒdz9AC,hn9Pޔzcu+tᇐ$m -yR(ڶrNU$y&s`Ҋ~r3s^q'@-=ul"O|]!+(m 6 t|>c^^!6 &qvLHPL㭅VovOe75ңۣ)b az)_t$?^P_4:/`'5ݽgNd12KBL:rZ%Q 岉SgH/R.a|A2LqRGpԎ"/KF&T[vE.#CL@Ro;x1[ yJ&&`%HY$ ]] lݥ60: e3O+X5F: ft"5r=sjJSP(V*.?>q"[h^GWA( Ny^jZm)&L::qn֤^MGjkm/X6M4)0!H\0#tcP(ᵃ{" L!,t~54 =WB4t7鍌~t4|f_ܣt(EJ>Lti3gwM;{{q7=M1J˕?~WMG Ɂ-D6rxhDn1Qpn|n8i'3vȆzR8^[#>θ* }qvz$DTWR fӃ>jG%lQ(Ptt.cG5cY|$1$8^n[#}%s$\x{rIImqZz]tK eŝHvd邟r/W˫(r!7˟Y KyiFpT_LY9?yO'7PeEfQlXɫ$KZƹj*:=~ԡО~S? Kz$f;gfb"e(EnQrdSI\yܝmZ?"VfןLE'}>uJnI&߁B:}[A3C\>&D65lQvsQ =$y d ,xd"ɴiLhXz#u@ h{=Bs'SgdApԐ=F NKc~_T=YByQya`NSWhC*/uhHAmJ&!㖟UF:enߒipcmO􃻦W ?_ľ)u: 2:bӉ=:X!b\.U\)J-`YBĔ1y(l;J{Qp"#CM 䕈cu ]5b U$ɋ e=RHibn]U,Rz+{!>U05chSrWOߴg'FlfQ!]H 9q NпŷQϫZ^;[fVoNȌazr`QM82:XELʙ[S·CWs[S-M$M;4YSz5qTb/njmg|T^6!Xv˗Waix*u(u`Ya"]DJ1`XMc ҡ춧$UG9i\jGD7oS̻(I˖}^j_N'Sٰ4kd0)w}+9C~c!= q) UMEX}C''yj!ra2 g(e_9[YB{= o(3OTԘ   H>~_O}WIIa)F!k|-j:]xnxÎ-}EptU4jGyLވ`)DI O Em;.<ء3nҠϸ"2Ѽ0=>NñLR5tHimY&H ^%!bmQ$_S$zϼj@30yI[{ %$|m}5ۀ3Ak'6gOan@V6 M7cPuHN mK'ڥd'I3E-Bkjv"}aJ YL"zZ[DIɇ~Eи]ut ^O蘛.B RaH-ͤf}4XS*]WzRms^C#BYډhCTg>i+I(xqY5͢5L¸ 7iq $:r\&JVu9d{Cl8)tʖ%D1KlRt2;@J2MFgx %AĔrgnԻP+qk W{9{HtX4WZ=jљ4ڔ벦Mcd#E$[:[ ؅yP|xSk&[FǙNV^*KY^sOUӭ?膸440rڿg" saO)>vFBABi@|-z% :ClQKq9/]JaLۖA@8غ ]Sүq߮Q ="GHQ G6R bHKmxE?}GNwڙ@Wr2=߲h%Vge16vo؉QTKp0H*E7sUjDp1Ǡ-XK#d@r4 TF$Ä́Cm ʙ(EvﰨuCӚ_|yN$+3ii(0a7Ap150 -X0סDg*@je 8Fs 7ZȔ4OGYy>˓nI=3 Zβtg.ė" ˆTW8%s%%c3ΦnD(3R7[1 KGM &>, +yxIn4<1* ܽ Ez|'"]r㲓Ǚdg,O88$N CXrdlVMrk;z@kcK2&Rɥ5 ޙgVZ A҆\ψ 1%DFAEBG¯ ƻt:4KuP3"E;) $g1Rh2+E!6m+"/:pN_  P0l*cN|oAv,6RK$*s+VYj[M [lqcVeZc'#'hPŚ]k.<%}+4=ԩl4 %(bz`#%D (1N#c(dT]ǭ]S:N B'Ӯ=1!C>gWsU o' } 7PڲWi4jdh~V;.fFs_?r,nZ{)R[Ng)4\ߠʎrB(:ǃ_lؐDNi\VO3k5Zqvx?,afXZl1dl\tyt}O]+D/]3=D-m.64 v]M`*ٺdd܁gk$G@S {"ےKS2ۭMxWR7h;--* 2"CR8vB $iF+)f#tC ,8 e4q&utόܯgc1}{>rM3Lڀ`_y[{}4S?J;,pPKM#?~EcWID3J50_})PM* wS;tχ ߞ 1䬀LonI= bXt#CH?LObb3|$I wx;OѢx:H cpj$N0x]ykv 1y 8 "eh98@ Cўh suˇz&E'a.GrX-@ !@8,(-ԗl841Ӈw`~yĝm9@#W 2N0Ҷi$3c@aX:a",?{Jr!Ȣ{<.Zu G{(҄[QRt gN_wu׈05HQvVamb)c FJO/ BAx&[_ Xv d?ۖme!i)4pN*D`ywL=^}fd!lRH` oL؏@k۲n1&idF}IP457vt%$=MN]K=#.Fu@m!( #287@' zi<V[(: dl^SToa#l3Q (ÀX.p_~DBĀ3ֿUD'eL}Ɵř\ێ߻93xvϸgoTczbN_K.2g=6Yd,eS8Ф\XTvΤPP[f/H^Eeؗ\/CTH ˏI1$T!Ȯk@Xl<,4 Ȟf5z=`hC\!FI8魜y!,me{-jQ']0/M~#?.⣵.y8]u)!`X}r8}&\}utٸ7B2qIc"oJ) y{ew;l#!>XVFHK[x^ou)r"<0AA(<Qrqx8wC%U!mqؘ[/-~a|0T*V)v"-N&N)mv.ݚux%?о5.aDIūD5d 7vἵc{>~k.Q1GC"asi-*sOod@}8˴6 XKqvxe[2hu yI$Qu+þUŇ }X*(=K~^ c Po|4 ֶ1Әwqh|}=V*e19yTvz͠f,}PUQ1-c{>؞8vpNH'ױ԰F.P#::;D?}O}8MtM^(HdՑSQJ4EpF#  d#>^X\⩼d |۔ @} Ӊ]VNۉTw e JRZAzB`*/J`rs{eJ=p"cOS{㽠A˅]!BUN P\}8˚EC7ϰ3hzŶid{V*bjAX l9Cf3fVS:I]\= T#9HCP)~c,ț6&?k4,U\b9R L"gf -~͉ሠXnA}PRd7?cOǏ4v#HE 0!O]+Y~2V/k>~:Nt~ dBOiz Bm)O'BܖFKIP˿\|SƭIPfKHRf*smM+ U *KF!i8@) iRdO+c췃p9;3Cg9mLw jO+ rsªeQOOc˓W7Sq2y5HgAs)nc7S^X<?L]iFJE0.>iǕ8g8'}ҩmv*UVdiٺM'=F8JPv%hâu ";1 $q8"^P^ugF 跄S4f[n7}t4-th6yִ%L 1=?:yqSdvmeEKŸ'gn f~VE2FK:-GuCc5AW_+xXƲ TĦMT'ZW3RLׯ>CJ?L)'Hh.~RT/4c*%1Lx0Ⱦ8+-FtoU/qXNV3," rhv?~2(v,iEFVUr䁋p~ӱI/~jPFB&o15Ք=J fGk9_B^M4D~h—da/kRX>5wa+f9ɘITfS<56J9-+Ov]Dט]"aaߋAp yQfR/T@R% 6ډ5Nls'UНnpD&wǟ9o/t"cY#Hp^6Iݟ}bw=y.a'ۭf1d:{Jl -JˤvZ{Do jɓͬyB=^ pk2c4/(MQ<~Z1 " ζEċx[G?)psD &MFm/ީ/uz> K) %,>*} o\%^t o`ͩr_hW'UQ1LASa􄤷x9Thd QR5Dd]%&c:4GkM6MR,>T=QMqVC$>[2kH+L6' \acWo%F7-,Z-~BpZ?8u=x \34q+&tk^\׽CxyJmr@Gf+hlY>y|!eAR4} 4[;f1:YWD$mw%;׈GVr'#rsPry>L2*?+8O3ueO95I4LEsj;Qe^:?}/:pk2 }Љ\h](Qm8;+F9;ξq:ͧgGM lr ;%`("*Ň2 #X 举ztMLAH/˞U =<eDci%IF[h &RTՁgR.Z.DZVrar鮢{o闞mOlfPwӋ=Z<{'SNMGM/>xǕsbT7Q1"[z}E~qvQRCBAܗ2|G*1 Wq`+TEsQEH,GW"A@h(&n'* H*MTfAaLp uڄ׍P@ף|a aRBBU螲K@Nv0HN~s+WcUKx6 dB5)u۟= GW\r Z%á@h U;ΎJ[JOc9x~`L[ťZ㹑^+)m*oabyBbh'wdq'K22ӯSJVՏzl8ϰQM+13.Q) [$Qΐ%BQ,!~8Lc,(gNBcT0#>C9uc+< zk=I&B)uyvd^O}Ozőn³91HQ( H)0<@?XӃ, tjcj;y,cs@ZRgHeY|e5>fU?r[|-_C݄0^^B *W*U b…+\;- h,؇VV}DyTDRX:MR<5 @3(?8i4/?SJ[>dl_1vyUH#wa5PBR&K Pig̟Y$;w"E >g= 4҈%:;'l;- sJd>P9бy ;ҙK|-#wm:\`7gjvFf!<-xΰΊT֊fPI@V(`KHS1 b7.Rn@[r&넆YJ;}"k#,ߴd6TVfp ٓڬ́2.C8 S0e6GMῌa~cG>v)qeB{尙uDiA (H{6N_c#;o~Obny|+WR@E;?Q(a :U䖯!PYY РfZ~QdF8`4]kLūK y7J R GW^yVX Aid'hՕQm9ЀxYړXMCG3elOy =ݼS>`ݤ:{;`ҕW׹=] _,w9F>:'+Mz?y[Oxӿ9qw>y=mߟ80ϟ?L_y#S3\p.>ez-"\# aUpD]afjA455eR(~"$tHHdWPM|zѽ@D=*OseV 8Sݩ-̩S)'1 !S E4Hb#$;BNV2Xca@ "fŽw[>6A/zMs_1LJЧ#!3.%-"_&L7R۹7a\j|`^~s]ӿǦQ˿-;twz@GttY iއQ`,O|?[IEJpRA)֧[+5* +{(X&N OTt5 Ґ*J)5|/|AjL^|<'h Y])ٛX"M }k6Kv?;~szH:vƐx { z1r֒6LG3 B`zY ߧH@?m"Ҵi΁a3 T5INuDQSrhQH7ć ]+j.hF69!%V‚)+%Pɝu$L96 FE12lq!W&.ےw윒- jRRAϢ2-$!M8L& lpY ӓ0tT]|t~;Zmm~Ҿjxz\f*a77]jh cн.{md~(a6Ƙ`oA 0% ،јC`.EOgTWh,eZnR t(zQQ̔₾c3cHߕ:fN0yt^<1lgɇ87$:|#֩[A{ٱ`Q΄gjawO.x~(ucZkOjz΁e # Yj_Bri&^m* ;g Q$8OȧňV1ߚhٹ:5ƟȝMSP1E!% \,U}KքC0fQ! W>Sxd"ԛP;QB 5MӚK֗NHZLʈ1fgаX8M1 yH;_X$aYb{Ics~_J-1~%d~iB:lw<>Y-Z2+zL/htZX_B pC#%S7\"Ĩh#Y#1)Ȝ^T8Yf@RP)PBi@雑¤r޵եF n DMn9\|t"@,ˤɑoW6\_߇jK]"/uXeuSfa!e[|EHB5iOk/R2pqDQu&i O2BV43;ҲCZ =`$(\P:ϧ1Yg,( +6ͮakD*5~"fsQ xj p BA v<$U³m;NJ1%^Pq8:r6<Ri@EzpX@a#G)YlEBK[5'/9@vU:6%3Z8ۛ@ wxjF.}[1C5tԁ.<5 %Z bTBMw/{r4Hh2*ɌGY%^0qwwJ1P=?D2_=| 'BM R ?$z^/0TX"fwaprX !)/"bg:bm:P+vȌ9;}')zԅ̴EQEM9 xmt| S@7JVF ͘p [ҷIFvLreBaw@3eϓ,WhWzTfLS 9^tA+ 'SwK8HφxK+[G/Riz+ބSMF:Q "" fFrER_I䪧.2sqHFV;^#rfYI<2᱁(.9RIBQiCWW4lϙ(9ن 21>W%h9 yrP=cJًctt |,#OD0g?>Ͻgk+6`o:_~ͺ <5BPbJM8h~E<AYW^pO3f)Ƿ^*gc-{Wa2y`l!ik`O%x@x=x<]8iLD,z1uǣ~1HZ<2*SI \#}`:u$8?W(ՖSb8)rpЬ(X O'Ke "k/ԄoFwO'YKo?>tD&FrAB}$1@-Gu3DTsrIX+7 F,ޭO<5̙JNIPu@Xbj f8B=E B:vY> %pl 4UG%4y=cTf E6*M@ϛ "+Q[hドKL.KE=Z'aQYc /d?\~髞w;=o摝snx+H TJF l&[W;)CFR@B/t2Yo!046Y>똘#x|*m~醱YK9dF>p^QVB`E i.H`@%q1+y=YŁrg/JNS\OEӋ3~w>q'/<<,$DBc }6T`/TB<+3 SCy`e  vTbym`g &qgԖ2]PIy`'~AX NG @,t>x&Ԇ}#߯#qE4꺋5A,O*X6 7mDhg沅D񳈶[B@f?qct k&HG?utϧ􉽟7akoN9jVF=35:ّtm{ (?}|'OSWwу_J6ex7ߵkZJ,p!DjA.Ir ]m~'uh.P) *{~Fg̦Tgqꆰ09`&퀠:²Dy?\ k40\ni֊H22B07LiW`h Z֜â&CV`c/; Nۧ}r >qbNډ}o7߂Ùc{cӷ+L>yb'~9rgN''}#x9ɂ28rKXTن TW=l$x=|Б=z=?}߿z򂏞<?N0wN__W+cPuz͓?mɅoTUR4 CH>f?2͑]=: ]3JlJ_3Ž+4AZS2`Ӧt{>|`S~gQʋ 1;1(_(si ԝMv8m^iҗEUP}&]l @6)IVGVҴ'Ra.h$KiAʚph[~m %4oZ % Οǧs4]Yx֬@ǧ֢'$ϯe+0):Z+dU 'ܕ>Ĉk|le$@ .6}6cbR@|VްG^O{H@%ؽ+эB Zp9(\OzF"YO ZG.LfCo,wg\ iZ {7F%Mcb* (15;}a c!rA  A!Ő}SYJX.s}`u` X1+Qsi>@3NJS$'䳆 ,)uf="{1& qh s.<>73hFq z@+ɞ$0`>AWaTmYG<SGԤ*llQ(UX,#bs7c*uMZ)M8+AL凒/#t"j\@(܏]W"5#Q 8>f4ܴ+dcF@M(M2X>ash(kemEȐ.|m VJQ(D>]EL-J6 E)6g (B 1Cw?Ǫ>]|xR% Te 鋵l%#)K1qaGַ{'3A|6Bf*ߠaTJ}FcŨCB}<CaYxGJ+CKHcMـe#:!W+2p'kqna6Reœ *"paI:1>Vc*ZE;fS2y.i5NG)}eƄ]qM}lig?E(Y<]TO i :z$"lmCj1C_TRB2Xa8LzVF?F&jҏ&v$4]lU 5z~VD6D n9~*"m)ͭؿFV$TsY(ّ$N "z&٫pdl' v{)="XִBi(Z(-04Ɔ+t:9 ZK" (Q$ ]7T~<vI͏1:՝ad3eZ;HQTSMahu sv*µ`BOQ[I(vU6REdtc qf038 .̲iWޝBwr8`Z ?zbRiʁѐ:f~yV-?jA g rprUg翮& x",_Թ-Ehh=٦Tr,S1oWhyV앉\={7({P)$4m b$өZBz{8VsDEg|!xo=yeyrwtsbdq NȷQ .sdJ0,UB5[^@{'܄z7}mp9u(')jX$]Dj șis/LFj@ #{ݱ-8Kx}l:ϕqȁf#gS"6Bezr3Ԣh6& 5񎣤ïpu+EVߋ'qLI7 [ˬIT:Z%ljPNA+, lM徲zI(Yau.hO1D!Ti*kP @*u+uwi!ވu& f-+GdB$f3l c 3SV4 jpV-Bk,sbQ ƘAwm m4ZwWiB7pέ\_!73Fq+t}T +[`$ q.Ϻx$ {&7 kHUdLn(!i{' 8eM[U%"q฀@7$ la-S2A C}CYPIlx p~?Fe"ϱl|@q\h/36k7uN> N)LKmՃ1ye5]U3bZ~Kڏah ?U[OQֳgIqxQ"#BlƔk<&!bNG QiKh-Ozĝ<|}KwOSo1c[=l5c`74AF%bLsjzP_]qgLeOfdaES.Ri#˓gaV+VfFi9%pZSG'ÌE C/`N&Uu}^6Kb_+P7wRրVKEhӫsiZ->E*TX'ff[48ʇ΁n)*ܬB }ıBNcբ :j4vwϏӋrf[ypr0pZ"v2 2ȥ"=z1v2 d.|a ֫҂sOc婴&],$%JQá)X]p@??  pKpz T_Pr NoWuw{-DL8 C+8\CbXxvT+/loF m%cFԃZD˽LuQ6qqY “<4TF]ql8eǾfVP%hUz&|G &CLA{cXrMt(Givռe6fci)~lֵEC.=~@ [Q(T":$DBpd8==f3[i9B).Y-qgE*h wq)1Ce05_4$]҃c`Hj4Mʐ1"gҼ6v҉ i~TK-<8q2pbHMBrp#׺_c<%u_•ejO:MR <Ö 44aĸ9!38; )B2?Y#;4bs3ێzЗ+h_ ifs)ys8_tSC#Ň voe%%X *~O“܁p>hxM`~wkp1.jgӘ'! ,ƒ4o\@K6]~|"*Q[[x©gi4K!ceS0 ;t-7X8>a~WbxG_DZW3h Q5$!З^J4MRs!|ɜ$yۉMENv*LKh̔DW6˦XT nB7)[^& jWX HٵB _B+#Z`lHբҌmȏyCIE*_+ssOW G #7|$"/EH\KeZAP"JJQf18?e*p@LBdWw*tîS|E7.ۘЀtPӾ CDJKX'y|&za,H Z0Pd 3dfcӎ۬Ѥ񭴸nC{,<`Bk(xr $I1`OTfҨyTNQ-H% .PWHd TMhVyץ:}xY(gv6h0PqA݁ Iw0 #4rҪ!;E'rlȸdpchjߧ"Pm1[a=DKͅ Cth65>_YImma #"݄S.^J U)WQQ("+Tf`4?څ2崩P~ŚlH1V*` _\)AX LX4A+p@̿_e$fAS]hJi WiAKI!PoW4K+˨`W8rF`jჀ'D9|~H49F urhdQII$"yo>3$̗F 6? wWX2MK$wL֛kT#$jS]V2 *5i+';M , Ld @CJygVVy.~lIDAT勔r5{mڑ% ~79U>H3 D !3l }j (XgOXef.LY ɺ jOTWE--Z1 $95Y F ,DgO4g qXG@5+[ UU0(Jz=Y5Ōpe b%DBO 4">#kf4#i@-bY_Ĭ!: rW\ֱtn*%WuxKL(jy6*Ǥy<aUϺDJcjtp5hg\h)~Ek Av`Ql8A[zXբ/<*P̤Mȵj³/ `."=hOdm!VbBSWȜOmU~kE tqv%k4kO牲HP)X,5 ԓQ`.xEe7\npU9w_@gRrPQ|`H(R(_xݯ:Xv]R2p& #tVLB١?Y K!!$ @dX`S++ TU\aqX!Ջua^%^(` %0f$Jh+ gSh@L$'oN\;Ͽ}~: R )qf/݈3O1!O;-*qy*Wr ɝC-|4ҼrH!~`+8ÖF_hj!jkg[?t' gXD' =TJ\0^#=oHpZyJjh# *:Pf*a,!7gOaS\6%fqƘћzPhgTw"`â'Gl01_ԏu+ SR聫\YvE*}<:j$ɣR&Vqіbm/ o^_ݫrBlב5QX]rՀ{@V]y6<&Һ$URgadN$/k>f?vn%UB&.dbw -&9dM PT=u{NBVdǩLСv5u@pW<]. LWYULڤmfCjhSV;#קJOq&}Qs[ߢmJ6"#f#R⼸J-VZsPm Q$X,H<. -]Ŷ;IePbf#vVT F} fm(˔ڷTܑ!5" *oS0‡q{9 (ܢ49@mȞ= M„b2mhu2ҳApU倩?o UӜAcSp.cͪQiۃ yoQ]{)c2`uHx}S:.Tg ,Ƀ|F"˲ \;#yz4i1Š"dVCnkZQϨf|T&sڅ15,דN.iL h j&DU Yl`4cJ9=jDzi"i$ eg@ ^CX[K|`f9H`eh4L%!uNk5nIZN }]}VghP:Cxo;d?_M$r5";ciKשۘמ ȭy19x]CMM2d:bQ5NLcJ'cq65r6bi(FZyTL*0aД.o.ߕS }^&lF' 5 eO8ĸ= `˿&-cH%sjb5Jٛ##}E_.ׄ$&[p^#59nulh|nJǩXȥCN")weHT7]s;8tAƌD<ϴIGyZ.#ef H{@"j@;ÖkԛgCXCFgp"c9o%=P.&'/jw";?x9AXxŇC0{2 @D|}TQ6JH`%ޫLp`WY.`3CYW(]\Kl9ir \p$3 Ȯ U@p\@Nll2F„%f=pք9DَV!d8bvǖb~I i[dے'kZ\B~6bҡ68cr M9IS| d)IArU,[1^&ʉS%Z11 դÈXqCP@0d8l,@٥/|,Hщ-3@7'}s{4429 a* ++ @JJ)bIᵇf]un'ŕ_> &Egc%k%Kj a[q`~Y|6ԶpBufIJāawKi@G$83l3;R IѴ~G> W Bat)7i bA!g!8"Za$g2  \Har:03HDžYP4:O^#:+>l$Vgg4ǜl(9 ٔ*3w<.fVlfJlnWTV3>uVā|xts%hɨ'!](Ho~Y,bTJKU,*[Myt㬓DW} I%Y >"@3%Q&UH5aaX> H8`41^g)%DdJ$_/J _<1U1&`>3y|RY~e`SdjEmLVe7. 8%%JZ2+Xar&8̢|e/V^[ ZԬnr꒏+I7X2t\yK&x;pİ  ='\6[@z.*k*RJQiԭ07ds(oow%@ڟqU, | ;LA=&{y]RKcʊ9 vZ/:dèf¼Ķ uqYNݖ [+C"NaNjvi5D;P&'F =\S.yKiF< 8tc&f ًJ|++`;*n"TzGNY[(x`eEN^ Z9XuV6EWךd>T?e p>fJ7t?:(=f_xOO@iȧ:9oުRAG&<񂫵)) /d4 cpBL|B¨dC5DPH$ACiYUl n$1_pIp黔кjV0Le6`9,1cxDY2JרK9',6G5J(\=n^\^j.E469aG|t*:fϞr`+ W^i9E ylb-o1lGrL@HJ1~)[V=DIe4xWa 'AXmӵq;6drXע/ 5zYV_t%h5G#nrP򎜵}/}$Xea5:JO*Kgf #|ϋKXrQ7C;ITu _. WuA<`;OٸgV w]q*(\]C45up}(4)"JoW4Aw\tBp 6\x"0XDSr[mlH@[Qka]nu|ARh3L(* 'I.=c CPyb?939:B&+[%ev SFWKhYrŦUym2:ϵ\?Y$AIzZ1JW<m>[ѓaJbGSF p_ϒ!=VvQ/F"f/ZTYF_U1d`٣H,#PY=bWqvž`\HE-k\0jS)]Su 5O-BŹaYlPr 1p$IA]^@^״:ᷰ6 JӤ5(]z^(Z1UWfJŊjWpkaz!vo(;ZfUEX!U@V CCbr9\1 +ّ9wfI0<. hH0tK 4a#*xJ8-$`FM%5+t^kdsT@{e]J~9*e'uD2 7B~ )ܿ)bhIRc!aD9B^eQ:'p?.5?nٔH̍kp}Nvg Bט=l Gʦ8Զgؑq}T/j]Dm~̜@~JﯲCa0N)P[?dv0eh wTK_Ǜ[&]>Tؙks'Q#kD b8&MDԆ`~:doQ!EPB.FV˜q Clfzqԅy(˽EZY41|$yΚj?J%:QZWYKqS-Q`L?l !ky nyOD˱֚SM$6DRICYkzqT{:}B3^9kUp L[>XQMĘ#Pb᯹*dMb1rS/&+f_`jصX{H#ANqh`  9AUBjؤ XO9,ķw!*sݖtm6ĄH &Zh&eY>Xb(K_jѬ:T|,3x5+ɯ1]+YG6vȫ#yeK]XgeD3:2^lxQ gL@+KϞZȭ NRI!E?I+ Bj s17,S`غZErcv}(Hln2kkLZ͐"hcP Y㲹HhiOŪ18E3R!XV{J܄i>,ȏ0Lgo¦rXͺVi`="bތ7sVlpGWIb."W(.ŹKDgQmƀM 4.`^D~X .<[HSUT2^!_eϝ :ɾYh9+8:H[MUgaR{pHjB  xVM"|Zq+=)<(;Fc,nU3L' cƛ톆I uG/P+̸>к\K-dpc<=$HoY t!zA z1@#HGV7LYԖeOJnzK5̭ԭ8cs:q]|Q$* NM1Żs50lr"hx;}"cTnؠP[Azf;u0ԳbҋiDol-M+l0xN"AU9"AJ&4lX|֠!n]79:AbS00Vy; r]/lȮŴCBt6 ݘ;yhL;X&KRFUTfA=eJ7N&%?*9 p>}Y3r-F#9ą`yo2'Mő9]yfznI-0|zBJi0'\&RN{^ NݜA(ר)=~dc7ϠMPe}b+J5$nbxÜDJ ]J2o"{ଛQ{G&-ۻ.AU6 R9.WV >=42P1B9i uIP͇Z#Yf Vh`rYw9qe6A0 H~nIKU 2| 'q/{.(tsAINw-CXH.faS"&[M7Y=w T_I*I łŸ'D=+FxI7蚌:pqVa'u[̧"qۗ"\B AT$K FLh6Y#V<|.?QN㶅WoȮBNkq&2C8d{1_=x4-5S?R;XC[ڂv[ )MXO]^5$X?nkՈI)<m.gH:lUn! oEyBp4Ks~Kj 9BŅ]Gר#|V JE.˜`6*^ H#sһ*IpMN+/^*2Ix8d+bCItMCB3!O-cDk RI4 ?E=؈Cj+ &1aD,*k۴ @Xkа2Sy#j5nlJ3Og7s56ʙ; ԈEu7D^b"+* Fq΄:y[μM-&&LU[ 1Dk $-W>GX_w%BܱfQ,{^#&Z~2ā*>`ZY"5;9'f>)\ºZ!rF#'׉bؔɕVR* )*1rpLٛ+8y͟jFCd7ʪKCOOZ,BiNjwv\Ịg31k:;eMZٵhρ/+pftg&S4jixR /6ZH9~=[Aza y9`.?dj }cE8{\ +ivؙ#Q@~qW1r/ U􁮟1ǹZ XYpV _t[ILՙT#wbH8+!-O"١(CE2*3pMѓ#@' (ji;v#tD!t%d*ai qӯW$c3o08r*C;!+\s*0JN,4!!R,U2̶4 2E,-=C,q: {dp*zb](}16Ҕ:$wm.A<gy)oFwH aMzk:nso~P:ko01y@N!t^ .@Jr$K B ~L:#@/= RXB]Lax0u*#W3@e1.@{\YY`9B۵f`3'n"VwOVRz/<ddN8806RbtB<2}LSs&Mڌˉrq℉pugdxO2C}!ztTxTlą gqSxK\}"(ch 20>MEWgP"+T`u&1T0K}0z岈:x >E =h7*:ԤO<|B +7#^y W6#b 黯 ScuKSc3N@cOL;w i@6> W @Di\>]rqO%c( MB;`t4Z%Ei,aEM"i~}qa> nَ-0w8 JnT(D{ztz[ޙ!L;: #eUQX :_ތ"AIA:Hl$q 34oea\tRͷĪt6DأK]q񘠦l_.CHB[gj#(_S}>5lg@jFɥT' RIfZTIuE9kݕ͌r4!(zFQ Cm~Z!Ƴ MAyA)YjXLlBB*,h'# hAzUv,ry7xg@tlkx1Mځ*ydgIk@+UNU!xAGK)ow8(jT֓l^L 5iؘ+xG>6 ?Й*D wP"`[Q ;D8Fh+ KL}7-&IEwch,-&yshY`Xڈ4'J2\q|_nkgp W ӖcZB+5.I2!K'#k!GiyNCjx L"$gϻiL cMҰ2n3uzBg`6D]P4);̳+K4MmbJjޔ)+ߩNT PF T{2#r0t\94+يGÛ&KAx<.73mѻy}(w\*/,^^&Tcw%bek-,uDt#ۡi?MAp:JubξJ$Su6Sv8=f|OW+K娆c}\]y4Xև <5%2 rMUtEL9ݎ+.=g:dPy#6YJx,EI֟򲇽L |5PXȷٷ,؉jd5+.ʭ.Z?{6ZWD^@نi40&wƒ<)hkj~:(,_2KQxya\lUsN쑇GQTî{W=KA4jէN]jpDlJ8I6~>²CΐlG]S@֗捨GL+{.5[qeuz,?W|;q?nw81-aβ*[TˁpDJK{d&'M6V#17WǶ|dAQQ5k҃_ιu 95d2^[, 8yǼ1\L+g%Jq>\tS{bWu׭LuPy;9 fIREÂ}Y-].AY&&˜H*@:Wk;ZWqFu!IhKqrťr:b},CtRXyD9ah`TvպQlJj" a fUKZ 'Fuu"+8iз߬N|ABmdJۏr;Bo`~f׋Gv]y蘢ʊ%P8X͞ %P^VC]k+z)+|B|dKn] ;s2Y {Ye&8+bHZ$DsſE b9Mq: rgUs ,Vq˧u/RxJ1zX$REUGJ)F rITbá7Xf촙z3v.s#'hȦͅ|x:x< &-ЊTsLX2dYB jcDmCL"cb"{8(GDKLH;0VjC|-^NO9Ć8jx[RGJM*#U%֐^DJ KmW< 1X?Ӌf8PG W!ϥyi$ǢҬMXZy30$;_ȫy E6K0-2Es&`cVipDk]!-TB =c'&#8ˋ ͓6.؃jͱعa hndl}F2NkI3TP`I E>a~¹V%#}yS ވ\> RlTfM, 䡉5>کyAUcベa!m-Re%21HDC MX!- bfe-Ւ#'s4*GpMc[;EF޲DU%>n2⊵L҄YY׬l[W9YRtu>/b+a&j $@[e9p؁nz 1sȍG&(C OJ+; ofd`ijm"y]Re-Jo^dyC<&1`@oami._ێ]xM2)?K 9%Fr™>˓PyZ0B{~Mҙg $ H*['GNYv^C* W:`GZ;G6Wɢu[Je߈ qH|=ӆڕ'Y:)1hGELD&HF.xu(D͐2 c(F:>b&Ich*_,1D`;z6n7bO siƈIKj'%^')E\ mZCz0wTWZD69|]{ư&|Y;X:bHWz3 SjSHLU dJw)P1Ȝ\lΣꀘ^R)=18HFOo"թE##LofuWR neIY2B9 6,RS&>|NPPgLxSL41fWA_e굠^,+Z95,Z\)>&UP! Du L|LT1>b!&;=C"6eNv֧ ^&ЗtU@} +,*īOEbnHFHXtDaQhV5 0Hm,ཁuiؘEpź^o+`)2S4A\ ܰq=k\T*lKR% W[KeY30Xø5EK#a7OɛA'nw@*ؼ|AY):L"V6zeIVzĩ4 & 璞&UgpsKRr>CK"ŸV:&I=lm`uPD|^+Ez҄9SGi Z؅_zLk@[Ȥi84QFΫ+6|@k4m;XiZ\ J(/o ~MLOtBMta^Q^x0f#ZBW|\41$5er>5SvgygZVZg2wR]6^qB$ g'Y SD%uSo ;JkÕVTBQ*j3D.EH"LKƕVelAWJSj*)?lTɇNg/wokPjاeɦBخ`{OQذ!)@\ftU ؽ+q\otńA p5;(Tsٯ6 |C9">g&F6*kXXN]Q6=fi+h]b3fqNc \PK28O>?IHNsz/tɀm"A^AtJKyMS< ?2aGv ݃ԋɚ<#+suQR*m2[qrU&0U*~ƛs{[ߺh,"\o DmL)"&?'P|V`aFC#7K(T #s"꿘P_>JNNL|SO)*+*y^'s 8B;خڄPQ/g4&[k4r[R?ݎ. G㛩[UȎ-!pQG|TR{]aO{>NX$oBҀ١cʥ*$$qg\Xb-Q4b+"e@Ajwx :$zAk*v*!5`667dZp:ћb!p!,#gi2Yo ХnO߱ɱ\Э I>#?jcAo{>)rP)`"`RM΁P¤=@B!rshb@X ff.wp _ad+?T873 fN=C RB=@ Φ$Q$:K❭ÕغU$!D.nQW0ϒU4}kn=1 } i]a/6sCsFWF'}Q!;``RKNOHCm[Y[Jʓpj2\(~5MzȧJb{ͼJ0+fw`f7`EB'/$Ř8:( J S5OM,t ͋ (`5x+V520W= ;7" Ա~%XH]ü>#?@ʯ q,Jۈ$&֏Gomb')i^Y)S4/}%CcEv؍i$T~'5! E_mV_ZV202%䟃\+QLҕ4ãVRYGB$ #IU?t@~DC F~DE ˇXI&[CdL(s3k^&@v jgG &jyZ%?_ST-#7WCHown8R? ɴ'd b.`1IIXִŀ(iD"47㕃ZW`t'tS*9X5Yg`ŏx<>i)}Ti<&WXLܥ-O)ДE<mHJ=}NUhEBU3tFBoe E Fwݾm^׼ՙQ bSl,Eڨh1~uYxo D,N$IiiDن YnU-_z8|;oTp3rޘcaqJ(tTk}l)CcXy"ITmE"T4j TjvSa܇&$DIE"mױ[0tr::4`;mQJ.F*H[sGDi1Xɵ69KSoeN/SAr3_}Lg?~'^%,l.Nn(#=)>GV W2ky<5 s쐤 H>)6nJ\hk.+5,vA,17;kC/ϕ,ƪy"wv.ARATUKKi/uA8raGF3JJ4^L'< ox߿{w Pq!`bYrdWΖ;:mAV`-pML72@;f x-QēXjU2Rwa]re,.g4]N>v){ohE .Α#K~a۸7 {sDߕE|fNO0{QDl.GSU.JO.dj45cYFJM䦊# M5/ }u}<~&5BB+DѶT1m%i]@Ev N.},Bg>vvɹȝKGJ$;rD~ Sa)?Qw=~~8OvHR"u*UfuC=\6yK(MG1ubD2 2`Fi9u }J}@ɨNeMDn[%qJYbϪ(SMO-s,D'@Λeu䳳2)cPY0r$930K*!ay\З芒P_Tx O[F&Jma.Vd-/~ֳ=St3?'/VY܌a -Jp9@vgjk9wz 5} QN?V8$*[u3UMͱDjWiYApC ޼̋'f'U705o+Z{%b! Ia0Q%Hr ?-K={t<1zKk%\VbT 1znG.c8QzM93ֲ)g4MTɦ  )r=TTLD4U3Z7hъ'٨bWt<47ED7WKz}N0<Ǭ*ދW96~;O: ` 6JA8[8}+OzϏxӉ=ف{}Wwm9e $sY ,?W K u^zp/ӓwһ2n슷 !}#;#W^u@o7K͓svBͧBpQFBQ'D*hiYfaSijTnV;JaW}ۿsݮx+^1 qd7gO8'%߇pm@hiU)qrZƟ.cL0K3RmMzy:w=Wy,x]Hj$iʍP?ŕeb8!@VCS`D2pdD]-gYq3x;w]a,1KD!@(&<7͗-)[3Gy_YEt1Ƨʓ@^exϊUի,F$ѸJ"5aNVL貅,6$:U~^hLɃe*jS{@ ҙQأb*Hg)=vMF4/pIQ~K,9f >=M }^_93[&9wNdUظ*j-@١y˩+1nvː,ܯ@$$F㝁} NuUy ب=1uYfBT0BBI;:krdeLC#C1Q ӦA!nY`w.ATf ֹ'9U}3q4ђ;C`8xjWa;&P29N8Vez=Q&`ӺRf,qAs ?#w۫ۏ|{8ëBsJ$|J3i֝d$ɞ]lF) &{+F^Vx+)f4^ݗ\+z :Kq5@C V-ZOr[PbRxS M02Ti!YfW@/f:eD}ϫ_*k?K=[c?cBBX-Mv(-!|T&m~P^ B_uWO'jA=pG&:r#+fX]=U^̅ ^!_-ZnD78]Y|.&,U$ՁCꃃ|sBX#'ah55#oe/{nwPr9i ymHk,GTKWi5a0 A&*e>?ف׉6r$$߳nٜf]&s9= 5ڒp]X%7yB۟@c&)p"*gDmœa79Ig%8+qn9VqP&C+\ޡ:Rұi"c.˪$jY\pܵG|.l,q0q҆\>kQ!j"1;VC!)6rB[i^Pkطf?znB{ض-ݾzS  8je&g壂use2ceWj 9VB# ix=\vJ"cte9pc) 54d^zCNg>b\OӤTZws攌 J3S%3;c,͡0G"hRݤ፠ZZzծ'/ . AV﻾;zC˒rijyHV)..w@ b 72D_T/} /@|_Uϙ  WI|BAA"м2H̊^*k蹃ꂈS.iGBr. [ 0$2h{6R0ݰjKډ#` Tȕ#ed$l)}dܙNRZU:ILgdφZrRg4';ۿ {^%7GL7@Ӥ%#J6z7'>r-vnM$\l+㓽&$A?er+"!.WR%2balR5]OifDZ](=1c]J9]^!wT%wJZd;_ӳ=iNu-B#u a!e ^pW܄hpvyyHuKfdڮ: 8>oEխ ^+LˉC>EC«S xQ;Kyϗev]wkkrëܨ([B!%؈*'jqLFCIM2\ D-Odם#Y &1B6+^ٱWU!nzUڗ6F0"5̠xR>\Z)^qو?زBM˔}"iFu4h[ t{_g;5JdQdʔk, ȤgҎrdODU#i,OekWB(ƈ)8ʻa@7Ct_N67ak8\C8i`HM{Q/٬Pkv+=o~}ofu5XLaӠw|1'TۦV*¿En Fn,pU1s؃[dzt޻eC"4k 5Es, jXːzg!OA[ԧ'YW a`<.{ŶoSh+ "fl6yM2 M E8d3Kvf4&'r1LU وa@IC70@$?3N?Yq_dJ:Rga2rZH5"Oi .;/Cajf@(toYf`튺VA&F;'cbv(}B׿ZTGAIdxjøU!9#RԉjwNCuʳHY{J_;;'~~'4#\R dN3v@ ] ZK$!X9l4j6A}UI!׽mY ,r]i| o:0|ʪd3̎>A)"5a|NL~-xVBzF"\_T`PIQH- #NiYݝ睗Ao|K"lA,5@'–e:,yM񠑖=Oz%e+Sn:C&Ju"Q[ l;$*tR>o4A}7om.*0gur3>xI+U)G(gR;C?_ݱ4eʳ@ ,ժ.[ܪBXۈ b(tab̸ $OzF]c:#n}.r:3g].AVo:B Ǥ5`l檚v$6|WYAń֢*@n#a& 1xKoG!?؋EJ}WϞn7z[rBRԛL,iOA>i>yikE8ӧ_ a:ND)59pEyΰ.OPX΍))#^!cY˵ EQIJQFUn{;DJ#dBk5~38g;-fdM{wwCž%nz޶}s e V*_A5ħJ״qζ7=Q5NW_a(S29꣊ބTٳF6g5nK-&qS3.UYUQ̧3eiٕP[X=\NjuKsaΉ֟,=Gaԗ؏|!&v!/x3[pPa-FV)餘U7,$\5_Uas 9j\+DvD|z+ 5'npg31_{*cUlhOkCJq!%=VKS&¡cӌ\56U(IiM#+㚯}kE}6I>7qb@1i0N2 ;dW3\Dyb:V-o gۂz@l@YN "Yj_ޮ<<^b%"ڗ@ ;Θxx:fipYH["ZjD˭!aR f,\!)3p#-Od^[]!mx=iPnG)\%o]'Է̿'Uy̹IBᨍc j4Ƚ$c AV i`d\ɿỹ ElL{>1O^ DW, aЈJں7mP++NBH&FtHU'~'??-^=Ey[Ͼ6X4Jp"eedRoftd3#ǂY+LC=/cHI6A6^DŽWv > dkMJC!AF\2 ֆPPc~\᳈m4lo3+ŵc0w0y-P"V.δ3{o6"N=HPO4: _RڵY.Ti j~{@+9=5](U3)^lU$j7L p{Mwv| )Q5aX.3gS[0FR+w8-W^jTĄma;6ȡwS9uEr"]/7o[2%^'Êno_\z劈O7$ rCNLNSa}R3'5:ҾFJx Avdu~Yk[Vksc!fLTMBQDztB`t;\ޒzBpeOeOcGpf~+v}kws:^DeBlT$\g|쏾%|D==mz?|M$bb^1p7@eJbvܒĸsd0"1ez.il;q&U INf )-OH*]ˑkDNBDФz3P#lZ*PY0y яx|g؈I ˿޻I36CzJ3+EQY1fK"nc >y؝HwY vNWt, ISMҚ)ab^zV%̓|޼B2~DŤi-(1BL2g"%BDa17P{+aG> O_u6.ߗzLcXy&&?ʮ >۠4.j| =l2X4 D wՓө֧`0PTqzo776S㐥ay}y138Fuիab,jo[R j:D#zE#49e~#m=!٭޾}< p ۟%^8ފI)ѐ>"eug(Xša5M7_2V>1>o,o{]}d[O[{oѼDǔl]K%a@E[3ٗ x%! 9 bKMwH^lG[eag]퍮g .YX)gBi`X#cdWd;WLX2%tvƘ=+56#O? J8׼5/~ы ??oS>>OzxO߿{W{v\pt=0:ËaĘ=Ln^#'k`#o`4\}_m0J1tC3=}ZӀDZ1v39,,b`1q֬ ~:jT< -xȏʓ_g}w}f^W_5 T}0^߶{ ?y;7^ޕk]xy GC \O#?~_$z^;~_Jپ#<x?>|G?&턴xIENDB`python-diskcache-5.4.0/docs/_static/no-caching.png000066400000000000000000000715301416346170000220570ustar00rootroot00000000000000PNG  IHDR5sBIT|d pHYsaa?i9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATx{\Uϲ"x(V%MS3H+},͞JViZOݵL5}4ҟ]DMadv~ ;33̜339$I6" 0$"""DDDD6f @""""aHDDDd3 l iiip\hР+R􈈬" ux<L>?iKۑ#FଳBqq1֭[[o&Ny>袋p)DGG"`j:w "''M6,o~;z?{ؼy3vܩYrDDD &&<&`"RĉQYYӧ8oEEMt\.aĉx<5. Á˗gt 7|_ի6l:uk׮XjUu/[ ݻwGll,ׯ._{A֭tRx)|׮]Dll,5k~<+qqqHJJ„ G_! UV9r$.\_5cƌɓqygE߾}C\֭E]-ZٳѥKL:O>$"##qu>/77#F@TTN\O?o޽kq%`ƌ_>nf|w5ĉKѩS'̘1m۶C=5k)))A'ƤIW_ᡇDD5+HM6I"##}J۷m6 4fO? %=#;~K:t?JUW]%UVVz}lR }iG\.t}_^ _7o߾iҥiGJIIߴ3fH~7ԩSR۶m6XHDjݺ5F ~ 8χ~~}@n7l N:O8"[nMz1ydDD wƍq9jKݺu1|p޽߲k׮EfpW`ر2f Hu<***8΀/e4q𩧞-[^PPBl2zcccѿ|8tPx7>r .2deeU/==^v풱ujٲ%W-(ܻwA9""aHDHOO/~f>sLAB{ʔ)$ #Fɓ'˖-Xd ?jߗA8~o!C ""SN޵s/w}7 .5Dd]43i$سgڷoީS'5 ,@aa!틍7bɒ%2d233CW^;w.mK xw#9s&.RtM8rΝ6momڴI0m4W_}5\.6mڄM"//Oq0gx㍸{ФI,_7tmΉbHDiӦ ꥗^B֭xb^))))Sd{ܸq81c ,]GEݺuqyW^iۿ˘>}:^j O=8ԩSѪU+<4ibccѱcG1"@ݺu駟b={6֭#GW^k" Cbc""S5k&L͚53:;Ddb t) ХKTVV~00gDdl&"W_-Zs(**²e˰{n,_0$"K/a娬DFF^up Fg,MDDDD6ql 0$"""DDDD6f @""""aHDطoƍ֭[#&& ݻ7fϞSN=""[@Hu|:\.9:t@yy9 <`IDd[jر#7oO?M4޽{{1()--Elll$ u1 WDDʰ TOɓx嗫ЦM_WQQiӦ!==. iii8q"<2iiitС6YYfVZU?B$uYhܸ#G߼yjY:sz#G/7xOƈ#dm+!"$$$iӦعseL9΀%K2Po5knݺ~]0tPL0˗/ĉl2t su1$"U < ,_={e˖zѮ];e˖NOO1_>T{5IЪU+}٪sÆ 8}4נA 4˗/ǰa_b֬Y䁈MD|qqq3f ߾}={6.r̜90h i_s5ؾ};V^]gj L}Wt"77Z-$Ikc0gΜj{#F]t|5DtX7pڵk% onfs=5j,XB7nĒ%K0dVzj*\wu[еkW?~.ϟN:} ǏAxQQQ?0dcXz5n69rH,]ظq#郒|';4h6l7x]vT."D+3wyrбcG̘1cǎKhݺ5/^իW#%%9992eJҭ[.bʔ)Xz5,Y$\|h޼o˗cܸq>}:ի[oKT~g}6}YRSS1`\qt:'+oaÆ qn/_ )8""0a^~e>|8g눈Bʰl2\s5 ((6Y#G'`ժU}[&"  ڵ Æ CRR{9wa@""""a@""""aHDDDd3 l/_E||$IBqq16m{օ1 ïTADDDp! 0JHH087DDD$Fjj>nG p7!! ع=l 0$"""DDDD6f @""""l6m Á~$ahҤ ԩ,喈H? KJJЩS'̝;7~i6l؀8 8eee:甈H_e].,$I¬Y#+,]x1tP=JDDD+߿FVVoZbb"z聯ڔ'=< HNN_ & =rƂlK2keX[yyy5:;z^ $։ 8OqY^l>]^EOcuv\W՚ƆqсT'H$WeWv%>F;Q'WZ^{ʏwE":zz^ EEr:(ݟuQUM/wӨa~l" UU|L$O$D9#ܫ}Tx}5Z _mUcS^ZDE .:𱑓މr_lYv"&;_"#}9+A͖G/ 8;bOeczczxW.2ӕ^5i y:ӻsV|M斟q<7nrݾg?aٍͳX 2ݦp]?`=CsRͳId pq$5ຮkl~h]'<a]qYSXZS? HM W~?!e?c?CŨ^iu$DWUυ7ø-/> 2:QΠMZ<׃~ۧ5pNu¦'♏rk66K|M0e_'?A2Ou]"AeA0Gˀ:RbċR/3p]{ &e _7ܾl+`?2p-B潮+x!` СYby͹M0śn'}ѰMbȐ!:1WTP3KֺtWfYip`ԩ:uƙ UV-P*XjO`R3(OO 4d'^[nJZ^-#ouWǩ_X ͖}FZ`#T}W/ J4lBj[{@UvV@Maz $Jd7Tݦj@oޥIHԂj'{j)d> ٿOB]iͥjȅ2}p,;ȖI - 6d3U߃nᥭwޝULOZN­&ns5Ml9@ P.p#oԢRڄ9S"Mpץh=aUY(%%>yard' x@!_P5=-"(强DѻPhDJWL#MBBό\,ods 'S|.ȮRg]k86"F =-j8CwQVVF>dCSZm•[sw5_ҽXyB.o`4V KO۰(?yO -DԤP1ӗo54ot8FM6Vi9u\a6N1 +\otnrF~$ %+޴Cah"Xl lun UJW1mS:d 0hM7UNɭEӃ! m=Zo@A}EuMx>olF{*4 oMC!7c BZ'XO.I]M5.w>n"CѢJ$d0i cODPSWlҡ#CqV'a1Ozd\L O,`h} D^g/I5n } 9ZTkڿպwvF,_>~zy.;chgp\z? O^)4ȉr9WVT|A|_Yw:|Ѧg)1Dlmn"@Uxhc$A7#p۟U/(gTaHWmlkSK\71>).ͨa3*-?{.!m1MA+LW\ 4K˫Ef ^?A51=SbF1(P鷼{Z@; 1^ozo]N9~VDqfU\҃_"1$Y*y陬֚fp]~ V4ZWb9fWllVVVGEVPNcڴieҋK rPUNM^J;IJkkuu{U~סZyq_Ӹf@- 婧¼ydo7oѣ)8!G,=]ppTԻit&]V$[mW_+ĠAiiixװqFspI_"T%J |(PKVY/XP[DҐ׎PEٶ W^Xn~Hj M IDAT_. 7y_@S.5'*)T]jkҗ?4w0 #23>}uT>r5dm ? ۍmtO< tLM5ftV[ZWvH]lI- U\WoUd&.Mmժ\U[/78w֠fZ.T~DM"O 7=i bQVC\~U%\ `tt4JKK|  hРf:CU @LZ]V J|BMayDw})ũݻ76n܈+W~4oܙב゙7k>_j4)8^"r{oQ>hF;w.j*̛7͚5Y^z(NOd1O@գɛP/ߛ:@ J,!T `EEpBg5(W Z YcEzao*¥8"P5x ^M>h'56nɊkK*ݻo5:E*ӻsR,p;_>]">pƬ߿PMpw?]"..;v4(g Y+C=;1݇FW>dV'y@D MSAoDϟ :pw9HÁJF*S$Q[T\tϟ;Bz HA/葜8{j"\~@2zX*KʇepU$VK6/sl-U[n-[4: q;9Dzhp04u"ͼ['.-<4;^W_}{FӦMqAY;3 Z3hE9Do ux".7oq嗣^z5ki/>|86l:us͛UMLW_^ύ%~ 4I/TǪ„6-,[Cbc$Uߧ=v \c…4iNoznݰc9qz(YvŒ3P~}03]F<'@Va1FV!Q.a-uo!d\~RQ>\'$\K.զ\.SO=T+iZRm"3k C#f @Mfi#b=1?*=lժmVmڵkѮ];ywѭ[7\wuHJJB.]pBo"U4g`ѿEyޢM>QIJ#R5~d&6jqw $aƍxא^zItpĉشinDGGcԨQx<~sݪGkZ/j˓дz!n3ƫZ{4IB3?o 41cƠN:xGPZZn M6ٳ}D[nx']tΝ;1`^^rssU˃htҨ]f18]mr$J3%W/-ȯBIxYѽlZZ|A [  'OÇ?[oU5&M ##oZvO?]&''EEEC'0r;tɒM$^{{ll,|?ӻwoٳo?r j˅ )P` Ez{Tq(>h^X: Ě5kM0a-[Z:&L7|'|{Ŋ+`y睪!;;uSbRN2˗o_|6~xUKիkC6mf͚aÆوjX ZS3S#AkwA~׌>U[4̋T}3J%A^W\?/2y_g}i <Vud ;=D}w+}lA30uӾ¼z[4"\7t ѻwo4n}ڴictES5ɭHNM:izhu`Ļ\kSԀSle`vvv7y睇^x7m̙ze#BC>~^ V+bMѼ$jL)xYC~zȄۀ۴iOUV!7JJ/f Jfm L1ýZ <=)fG:9QƜ{BP͗;Hz*k!red0Y0:`D|/i\6jETB¨KʊpbA7jIO>Hܹ謘(407Qi-ˇ-O3<0P`TTZhJbJ_fS>d:dVF)zrk,5 n@4i&NǏK+Z-YU)"[H=53C5\MTaR0BjΜ9ػw/6m-["..[n5(gdlR6q)8^*? oO(QUC 1: 2]n4U BrSxDS(Wg 3  هp)S*kqZ>|(0Ҋpo?pq?~;wwmtL{_/^Tjo.c漓}WvZ|'h׮oZFFΝ3GսӵH.@4jau0rDyӬˇ+mb p5^QQQզGEE#PZn}TJ},T: 0t粩y=Lz ;< {o/ &/60g&EiNBv&L7y,R yD.3gn7Ґtj n?#lf5_Ts d~ء<~G>GV[O>ݻڵCVV9Mf$7:at ը-FR >"gr {HGhٲ%233ѿ\r" Y b}ӻyQ޸|Wsޟc0i~)󑟟^{ hݺ5Ldff"99lY@YMY FIba%0#NC>c=|8q1nF|ѴiSo^OÁ{W4H$:Ė/-A~lAcMBbbbп\xĚ5k/mӦMxѱcGMo "HPNZ}6Of/esOgOyԬ}6߱1]ME@(//\dff^zq ̙3W=͓'ObذaXp!ׯ,da( +6)e½.JV!3"^[W^(QT-/aj 6UV۷/ƍ+VI&{wbРA?iZ*+2mS`L/zgKKĻ3$3'T0_4ip߾}ѰaCM|ױuVlڴI*kuMZXJCA[(Ze2-0MX`bccSOiӦ8sq]waժU8z:ts/_Y!11j'.˅lUǪtq\/}S|o* kVuYE&RYih[ #LK/ӧOdž p1<ӈO?͛Ce9rw"##> ="##QYYYm~:Z~F^AcH؀El{@MskWSXqM#jEcgli84h 4@^_|رcߴѣGm۶x衇t:-rrT˃ىt0Ik ®9p'3ujQT ^/6oތ|__~%JJJЬY3dffbܹT-j5qqqhذ52?weAֽɘl?(ZB$b+>0`zPRRdffgE~ntlCiACrzP<x 3=Pwy YKl00!VڟW)XҌIgAff&>l򐟟oXvd8]! Ln E#ETB 7,Bb5-hƭP9=uE T0o~ԺGG:ENaUU"rO)oQF dClh6!ILZUoKO\#kP{JZ|̪dRqTkK,hVqt hJ{A/Vaߣ- 6&4V<'*FŕcH>f.dR1=C%ރ)o׆(y7viXC&!#cH&{gTsH~VK!/hLQDmr&du xidOR:nᑖÿKD2kke/ >qۄ/>{*XYoh$1.I ɧPě]7)YCӥF*C=)`7 [z$(umw IszfA^ZZ9זZ^ŀ7SqQ/چ%MBU|R*VAuHD[fp; 7" @"W7nɲ! ZJUb35zk5%bpǁv; ~~yd "#aȻEj֔:/@ )0=:]+> jDÀkUy҃ןőcHOHt mB`D~QJ -&k3DjaG^%3SH0F$b+HMPJxloY_P1mQ}U;`hp% 6i6 He< xz&bޅ̓(PDaH0[WL⧯H>+O=B4Z^jRYNNCNI6!5ٌt .,/h#BI^hp 5dfFY[zfl`^^?|#)) C ={Ζ%0;kEmV'TKj0 ^#f"6wl l~g;7?ӧ1`5CX@A%k\0Gz/GФ*D(AdFgHk׮{HJJ–-[pE+f 0. I&|U3 ci4h%_fśnv $kCHy9CinG|wD 5ɕN Zl\Žދ޽{CCbb'55U\֞H}7ʊt(͝Ƣ8 twbΝx׃Γ"ϡCt̡|+WHszr?(`2Ԋ >Ӗ1t۰;0 u9h2^rKa0 >>s4o<|. .Kǜ-dW5ƽT½iȽ/ɹL,W޺49McP$?WF~~>ZjetH'zF6ڹ1^1OUY+o`e} k%16e-xV|`*[wy'VXwy8|0 11u18wb,#NԼ|ÔS!sfӕy]˹~Ba,tH }XňXlp޼y(**B~ФIʕ+Κ)ظIUr&`Uv5kͨ2#!kMX!m0|CFR0i)85hZl3d~ -'Td򉸫%zNq8 oFQ`\}d̏4bW-f>Gk.Cȗ: jsL w-м0$UǶP Tg-u? uQ }7V)`8![vtiM^@z3sKoF Ud>\W姨Do^;[hG=x]8Qz:֦zS>K]E`(~55΍?43 p?%㥅O!o:"VS<P>iQp*}F@ja( /Kzޣey{c*ii(T}dͣ4L|‰8޻Sw #?*t@" wIeX\ӵyy bKxүz3`DgPɘG&`"#%)TRmMxWK6lTuvzRzDej׺_Y!``)pc #u&L>c t_~u̮:WpjU;ܷ~m&"""+MDDDdW l 0$"""DDDD6xb8l޼9C~~:#"21Dd eHDDDDDL'OF׮]8ׯs4n ÁBڽ{7Z4h111֭}]4GFƍ G5k֠o߾GBB?|X0eDEE\nCzPVV."" K/~ᩧc=GbضmqƘ7oફ«W_}W_}5\?0f̘8 2W}vL2?{ᮻg4h?L>q1b***rJ˱j*\s5Q}$""+iӦM_QQ!y<i'N[n7ѣiʔ)qK{TVVz^zIguVdeeI^7}„ %IB)>>^ѣt).׳gOG~뭷$"Lt"::zqqTTT[nغuk?~~)zرc8v~w 8?#~en68}Aee%<Q\\~Z-^F 6`߾}i˗/Gjj*|gLoɒ%ر#bbbаaC4n|j\v޽$ >(7n3e#GiѢ8q|]B} 7ra"6l_HDH3@De˖oƐ!C )) Nyyy~5kx^ߏM6~;΀I(|rL<VH)DdjVB֭[o՚;#XZ֭QQQR%O;wV nȑ+i&,_]tAUQ0l&"S;SWmÆ 拍MOJJB~/~@ôdG^^^\^KxeQFxꩧgt@"2EP_~x뭷pUWaРAؿ?ϟ DDDD6f @""""aHDDDd3 l 0$"""DDDD6f @""""aHDDDd3 l 0$"""DDDD6f @""""aHDDDd3 Ν4ĠGظq^u8 2D2ʕ+)S`֭ԩ#G\ѧOrJDDDd,3gرc1zhddd`ŢE.SYYaÆ!77[1DDDDƱDX^^-[ ++7-""YYY믃.7uT$%%[o#DDDDB4:j8v*++7=99w_|_~۶mvka""""YPb1 .DFd/DOjj$"""҆%j5jӉHII6}pMzHٳՖAvvo L`tt4vurzXnjm;v#gԹ\.\.7HG ;;FBnн{w̚5 %%%=z4`ȑh֬:-_^=6j,p 8z(&OÇsXvŐ~ HDDD!Idt&v#11EEEHHH0:;DDD$6}  @""""aHDDDd3 l 0$"""DDDD6f @""""aHDDDd3 l 0$"""DDDD6f O:RĬY\YW^K ѣG̘1W^y%͛gpp֭ӧ`ժUHNNtR€GABB#"""ɓ'GZZz聞={6K.玈z ZOؼy3֮]~gUs"-- 111ѣ6nt޷z ݺuCzΝ;W_vEM޽u\٘?>zYfaسgߠAL4 m۶Ett4}=III8p`XCDDD$2$Iމ^}ղ}뭷dףG3g"55Ǐ?,kw iӦɚv#11EEEHDDdIHHuyflقu!11Q˱edeeEDD ++ _uKuaϞ=袋xv~Ɛ&W^yC=믿χTVV;;v HNNݻw]͚5 /K.$yyy͕'""""QȢEp?p:ƢE4M;>>۶mæMO ;;AAQQСC揈H RQQݻws{nx^YhԨN' T{iйsg|C~rr{."` B %wY l/%+ؚ~O]\X%S35\W@3&|g_3~;93T/3=r233$iNNN EAARPP3gJEף;ADDDdC+WVիW={믿yINrr2dee"닌 w 444`Ϟ=㏱vZ$J%x Q3$;>>׮]Cjj*|2(wXWWW_}₾}O>A||yvJ ^p!"""/888@Rȼ<wFPP QgFPP._b ={򈈈;8|0<<<<==h'wQSSlymm-TDDDDd߄qW^#G`0`0paDŽ DGDDDdwl!** jjݻ7}]֭v܉ׯz-2""""$<6ݻ7CxҤIXlY˗/s='"""""&<8pcƌi|8pt/P}"//&sss,*p T܂WW5"=R*DeQ?5ꐾ4.POW9&.NωnY.x/Fn!~sbõY{UҦO-hpZ4RNo~@х <﫿HiOF|_Ybg̵Ɵw￈twuĮYO»ђl 7 `Yg}'O? (((gl۶ qqq"kSuu54 n^[ժ[e=5jƨ=uұIN:lVï!!4{;+Ob;D~s7~7 ƟcV|zx;T{[~ܼJOƌ{7 R{~U,ITd#Mء"=SJ{R1׶Kbίq졮X_M9kEQJ{ת0au!*koA>X7W?Ŝ}Fm#&wR^ș 7l\s|%.J|dgk{oupO]8zߝ߶@xݻw#==%%%pqqAѥ3;P~U'{k_Qzc铜uZ M~^=zOC[lm4@)[9~r @'ں͜uH:m/^mX5'sn,A_?̱R^{qu=쎿|¬k[/*p7Z{Kgx{Шk=T [|L@g`h.k֬+P^^cՈhq 6࣏>Bii) 44魮ߒ@:Zɝ{)xkԸVs,')$'eSG_j~+g ;z*yA 8&M!OɕڭQm}ocP(>t`Sx}V1=}o:9: T>IeZ$t77oNo1ʜK%<X%ՁO!X*2ZIy&… ?><<DDDV^ ̜9A ˗/ҥK?-%X`Wv#>L*e"hsؑyګ9;Zc}Od떘,9`KNH,e@sgmIy5_GLbɡCpI"44#F!11>"""[̙3jHHH/222˖-Cjj*lقMRҁI ':pƏ( ر RSSѳgO 66~j `jyyyHNNFZZ1x`9Ki1c>{xgPWW'4q14hR?h8::?ӧj*t]ti6mٲeXv-rrr7`ٲeX|9V^-4SWWc͚5-ˑuȑ#ҥ F[nYRji`Xdd$ÑM0k,~\v ^^^(,,ēO>)W[[Codee.˦<(2n8hZ|e&M >6B۷5 y0|@UUZ-6mڄ)S,WxJ566ĉ5.S*EQQOUUCp%aƌ;vIߥ aaax!C`Æ ˲yÆ CAAΝ;׿Caт+//^Dyy9A 2238-~:t:ZrV3gz̝;0`rl^nn.q1ѥؕ .`ڵHNNƛocǎaprrBbblVJJ ѷo_T*t:,]SN]])//gMFH6c СCKy/_Ɯ9sw^j^0 R[lݺ~)lقs΅+=~lzJ T}9s&vڅ}];q*++1tP888Άt:mVϞ=l_~(++T}xב)S`xk!##CtivuaRNNN EAAq^GAAVf fΜ۷/D``ˆ#p)_aaa:u*JJJRDhMUt9<#*P*M/* z^PE)00&׳j9r3KNNFbb",!))Iti6mƌزe v܉]h":յkfߣҥ <==kaذaHOOo~=zׯEfƏKW_}wy/lNmm-Ο?oŋ())1w\xGE8R0U[zd0>|XtI6@7.̙# g `pvv6װ~z%ټjÜ9s ZmիAti6g߾}-W  -2hZaĈgϊ-Z8 ;DDDD2HDDD$3 DDDD2HDDD$3 DDDD2HDDD$3 DDDD2HD~( ܼySt)DDHDvIPZx1 WBш.Ȣ$"KMx<ٳennnpssQpHDvh4P(&ܚ}i&t vB>}ɓ'7oF@@wٳgCjhh.] 22DDs]5Gvv6rssQSS'gEnݰg\p&MBtt43gӧ l߾F©S裏 #"qm]AAAɓ'?FEEz C||<ʰqF0|cƍHOO;DD-b$"1VE@@Z-*++NNc=fxzzZh"b$" ezP[[ J'N@RA&Dd!CPYY'xBt9DDp0x0uT$$$+.^G"##w]QƍyO>ñc/4"q"h""""@""""a$"""@""""a$"""@""""a$"""@""""a$"""@""""a$"""@""""a$"""3YIENDB`python-diskcache-5.4.0/docs/_static/synchronized-locking.png000066400000000000000000000634771416346170000242270ustar00rootroot00000000000000PNG  IHDR5sBIT|d pHYsaa?i9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATxy\ײ‚!" 7jf(աIefj*W T4M45LJ,DEyJr,,;?uW]c^c3μ3 DQbHDDD$2LD 0$"""&DDDD"Hd @""""aHDlِH$yCѲ~zH$dddz0f̘]>ƌ:u<^hh(BCC? "2LLɓ'WzO>ءcH$8q "B-c@D804hcǎ'233bҥ4iC4I#Gİa ɌY駟0&D&h޼y8|0egg)*P(++-J!J \Kgccc0 tZh!!!hݺuhڴ)"""H$㏱zj@&C8|μΝË/777١iӦxtc̘1prr\.Gdd$JJJꔟLHH@- ɐ(..o d24m1Ap;wDPPd2ZhYNUu=Zxhq㐗|A0w\ԯ_kFXXN>]~f[y{{c„ שw!ΰGVt*}q!44EEEt@"oyP~}ڢ{x2WX ;vqD&=D&ĩSTi#Gbر:> V͛7ƍD"… 1d߰v kkkåK{n̛7Oky/"4|pwwǂ o0qDԭ[~~~ @rr2^}Ui{qU,YDko˖-s=+Wյ3d4jHѣJǍ#22oӱ|r;v f̚5 sE>}ЧOgϞ(++ofϞXc8 \|YkjZ+hUsN0w\^H$ŋaccUv 'hʾ .T$''GhРвeKͶBBBVDlF۷ֶ9s@=zt|p[L0ٳR4˗/֭A޽+ B^^2mт ~Q۷PZZ5OHHy,7o.(JMҥKɓ'AR) :tܹ~z2ȴ0 ѣ<ĉXp!"""P^=|wzrW_}9mRe 4Z:t(5v 999+AZJ$8x ]vŭ[P(CBBU?B*⭷*!٣UVZQèT*K(,,Ď;4f֭ѣn޼youArr2_~AYY&M-LQ)Sرc~p1cʔ):*;%''#""ݻwH&22Rk|#G֭[;v,jõ>gDdz:`Cjj*QXXgΜ5j\$"++ #GYI]A|[Ώr:^|ppp*o޼fzU*_߃ᅬ_7oJ$/\MUTT<ƍk-`N:6mUncc j_t ~Rm۶7|H|<[σkժ???CD5cL :t:I&֭[6mڄg}6m'uU]K=l!U;wb3gz5MV L>}`׮]HLLD~П"2$2#ׯkʤR)^~el۶ yyyعs'^zǺJÆ N2LUŵkPXXU~9tC믿0zh 43gԙ[nK.y_e]υ ѻaqy2k`wH$HHH@ / %% W߽{ƟBD JNN{pȑøqPTT#F{,=u޽{c߾}z fϞI&[nx# „L>&*ٳGxWf͚ ulllF &M*g…?ԙV~>Hg!&&FԩS'''Vhڴh&''GknÂ*n}RXX(L:Uƍ }֭LZW\ѣmի vvvвeKaƌµk4uT*+xyy vvvBhhp)8*SY,9sh-_\h֬`mm-xxxǏ׹݋ ܻK=^hժmq L7o pA* ֭[-}Z˖-|}}L&tQ? zz6!"GYKjn[D5EV C 5kUc, Xv-BBBQ*--wqFQpD&cXqq1;$''ɓ:܈۟S^+ҰvZ^0vxDT &Df,''/20sL 0!˖-Cnn.\\\0j(̟?n:MD5cDcD 0$"""&DDDD"Hd @""""aHDҥK7n6l[[[8::K.Xt)n߾mDO!" /@&aԨQ BYYߏӧXz$"-> *==ZB믿Kkŋ?`Fɔv: vvvF0… QTTk$ШQ#Mw]̙3d̙3T*C~~thذ!6nܨ|L:~~~d_>F7o֯_D RRR H) EPP=g}k̙3bڻw/agg>L3Mо}{Æ Cff:˗vڨW.\ӮR̞=M4-0d\t p>\qL#"bHD{n4lO?Ck5kڵk%K $$qqq6lN݋/G=h"8;;c̘18}NQQvO>={ҥKoܹs=nB޽ѦM#,,L3x饗УG,]mڴ̛7FBƍxbL2IIIxg< [ƢEЬY3;سgJB~cѢEDg+d2DFFV8Ze۷oZƋ/7oj^hܸ1שSGw;viWݺu1iҤJդIt iسg'" PXXк/_5jU '''\|YA:pvvF^^K8W^zpNم  7n 777ٳgU~:YEjڴ)jժ Fv[nŝ;w0rHJD8::N{}{Ri#ȠT ˫ij{0:uh7T`ذa:u*0sLlڴ hڴ#/,@"2~a8x :w\i=___j\p͛7הgee!??&s1ƃ=+ M412:;wںz...۷/0|p7H DdYx jƌk,.]ҥKѧOIP/^ ۷#É'ci=j~M3MRC T*EllN/ u#/͛7|ricȑ8s OTZDD$" ͛1tP4o\I ֭[1fL<GիbÆ 4hV5}tl۶ /^yoj*n-ZSO=h_5޽kϝ;Atر:MH5j6n܈(k׮(../7|S}+n݊޽{ "" n?|GصkV\ LVZaѢE;v,?GÆ ~zرFLLcN:;v`Æ pwwGQ~}M7χ^}UGiヒ&M`ɒ%gϞ0`#/O*ļyyf|puu3<-[jյСC駟"GDdaNkƍ>c,Hii)6mڄ{U,@vv6~l۶ ny$&&X! 0W r<LDDd&x@""""1cHDDD$2LD 0$"""&DDDD"Hd @""""aHDDD$2LD 0$"""&DDDD"Hd @""""aHDDD$2LD Ȉ6TT;;;`Μ9ءUZX,X+WbÆ hѢ9Hr[ڈ6ҥ ХKb˖- 7rtDDDDzWXkkkl۶ +WDz{A^3g߽{7oFϞ=ip$""2?<~X`Zo@T;""""eR tǎ3vDDDD.y7oAaoo5UVF2@픔H$*QUc&n,%ȢE _K.˗صk##"""2&\QQQӧ5cou]z#F+вeK9rĠ """25&~'Xf {=HRMypp0NSMŋk*,""""d c*,oԨ fD"ɰ,I$fRW߹sj©S 2 4J2v(DDDDˤ@x0sL;""""dc|r\x4#EFDDDdL.4hC """h&$s;MѣGqY@-жm[#GDDDDdL.ưaÐ'''@~~>_7 xҤI(,,ӧ\:u o#"""2{&7P._~ASSSѳgO)2]C@DDd~x6@Z kkkrkkkj#DDDDDdYL.֭&Ok׮iʮ^S{F2\|r( ! B'|b̞\ -- Ν;h޼9Í!e00  Cn=z;,""""c2 ௿૯BYY6lnݺ!,, aaa0vDDDDfdbHIIA^^~gK8{,ƌooohѢ?|H$L2ADDDd L~֭yaϞ=4c ЪUjY>)1@(++oX o|r|EEE>|8֬Yggg/ԘL`np!#$$ƍ͛U0ap̝;ZEDDDd L&她8$$պίiii8|^J%JBЈɜիQvm,Xhٲ%&Nm۶!''Ǡɓ[[[手\.׼||| QM ;bHNNFJJ N8ƍԩSYΝ;1x`HRMJD"J4@Q?L( rQM8;;VZ8{߽{wL0'~gܹs={DqqC#"""6Aca*rrr}g}h}B\8::@DDDxj;SRPPpqqpRRԼW(5!j5L.] ((:qqq嚗O GIDDDx _ǏǞ={~ԯ_: 0O&N[d2dD I&aǎHIIC""""vN'L͛7c׮]ppp7rvvvFzz D"/1c:?E(ܗD!"""&DDDD"Hd @""""aHDDD$2LDF49J JTYQY[IgP?6-1-i6oTYG"mPTO㟼Uswz5K].ϣX9}~>͇]VsS!>{)'`Sk^C=>Z fqVx3,8Pdv OzAiv֘٧9<5= É\y^u5sG jr'<޾rrv2 ˪N,{I9$G/!6tn9'͑Lx$|zheEOm|rIy%efV2+BM?{lFZ*sk~O76Y ѽUX߷?f&֨:[g"5#W/!ĕ?ץ6k_/좚 뉨mSH7<P:;jOowGg)̦=;֕~N^-7zT+mӎcWq166qC:/f⣽5L߽8eV5޿v*mێc6'-o6EjFĈ/D"տe'_|YowuͣQ}Cu$]cmg\>|"X<%<%} Su̩= ៻5\,dnIouh1%1ӣ%5=&&N2K.kܟx~gWC7a;U<['K&A5q=fڳTϝ%Mf(bhj-Puv|h>sg.;xMhwZPU7䥊JfWthTI6qI8}vvW~sh;mI%(s\9sJ_^MMqhnm2\bծIt)EK)5w޿7qy4bsK,U5y:L8}zUN5˓g);me^Ʌ!KM4aKR+sXXQ76=w&"=li5~D뚲wpIbf;C}n?dN~ߥզz̭gIϝyI [,df+VԩRSS8cN߭mVZcE5KĘ[{7]wLJy:ܲe 4n6vhz yҲ}O:zX,C*Zv>yz`#M[ۈŋcرD`` VZڵkcݺu?z"ZJV`gj8gS NSA,oњ Hܮ]V.k5Pc ۷ҙgو0dff~(DXn]HRdeeigeeӳyyV WW*ŸB233Xm#nm[}mo1m+ mM ڷo$ 4.)) 'NpLLUTsttΨpVnm[})l[\nh@ѣ;">>ň4vhDDDDF СCYfƍhӦ u. !"""$ٳg6vԱcGL:|ƎkAR)BCCQsjm[}mm>ܶAWBDDD$FLD 0$"""&DDDD"L!Hpȑ'ZNII fϞFDdƘ( 66 @"2keee5kڷo\{{{tɚ:pssB"@"!;w?<\\\`kk`|wZ*? ** nnn۞={8::Cؼy3 && DDD:YS( `ADD?pssʕ+Ɨ_~/C >}O=Ξ=w}-= ;vsҤI8qbbb0~x޽'NԪ~z5#Gݻزe|eeeضm{9|{""_ÇW8ݻR*<<+&&5O ;;;@UCB&!!!PPPÇJ  "z6m˜1c0h L>JYZL6 iԨ{TZa=A)vggg 5kmR#FsL4 SNEYYbbb@9rX~=nݺwwwmfzx_}Uc3gѬY3L:UQ|rt^^^>"G!u2ԉ'ЦMlܸ#G4v8D$HDddk֬A:u4O&!"n<LDd$wƙ3gzjL8DYYY_~ cDD"aQWX???ڢSNHMM!H^|:դ ܾ};wdGD5b-[ ** 111HKKC֭9::Ws<,$/رc@Z kƺu*G"S".)++ѣG)BxxWTT___jk~a`R*P*jpuuۉ̄ (,,7,/XDxMT*<;wy6mu֡UV(((~OF+'..j^fff|Kg ܹ3:wyOy0gΜ 牎FTT}AA4hL8::V{DDD |||D}E$uօT*EVVVyVV<==Z5ڶm/VZG&A&;::2$""23be'mllо}{$%%ij5zRpI>,E@TTF`t(..Fdd${[WSOQFG}˗/^3f3$CENNf͚7nM6HLL\r+}0vXܸqh߾=8@c5FQpO@P@.cHJ- 5=مpwEGH;TmA=DDD$u>2/-bWǛqYE DDD$uߔRߔS׍=L H *_U^ Tj"aHDDd@:=\/(EjznE&DDD]Xy8@"""rw5h=Ȁ:Knn";dXDZJ*`X> HFzyavkbv oMDDT zyG'B& Q5ZI9a)`""""aHDDD$2LD 0$"""&DDDD"Hd @""""aHDDD$2LDƢ+VԩRSS믿D"A9B""""㳘p˖-BLL кukDDD ;;2220m4tڵ"%"""2.I/^c"22Xjj׮uU:J `DDDDc `YY=pMqJwww5&Ie ͛PT*s*gXv-?zJ%JBx"zUaa!F5k֠nݺz\yTcDDDD"z֭ T,,xzzԿt222пMZԪU ϟG@@|шҼW(LXDhcc#))Is+Z$L8Q~fpIXtiIL&L&3|jE$ѣG#88;vD||< 5jի8"((Hk~''')'zZ@jz. K`.ZIQ,&:t(rrr0k,ܸqmڴAbb+WJC%gpTS%EL@ 2bdDDD ;sP( QPPGGGcC5,uߔ@}+GcHDdxUDOJN@S Tj"3ZKUt2s &j}zA)Rs9 Y8MKm2eLCvaԣcaKmlȍRߔ!"dt<Ll ZGApFXj,9`H:KnNIp7KMEԃ˒<"caHV$}L@2"K=[j, 9`Hzyavkr| ԃ˒p^BzyG'4AzvY!"7 J+>?""""\FO7ѩS',Zʕ+1z]m\|7nIJeˌ1zXRRO?!C O=._l興,FaΝ޽{ѳgO@vvhMDDDTΚ5 ӦM:uΝ;ضm[#GGDDDdy>r 9DMyݱdɒGZ֊+[[r IDAT[t ݾ};{{{i_~c\ij===UֱcGZƖ-[UVSNGDDΟ?www...xЬY3Gdd$D!"""2eA^!C}vu :tj>>>4i}]Ѯ];sѫB\.GAA+ t X.k^HJJ‘#G4ӏ=$rWVVG"<<\SfeeplJ% ֋_|;#_|VT*T*zg7oބJVΝ;W|WJ%R)>Sѣqqq+&""""Se@֭[iӦi?Jºuu8~8>y!** )))֏FAA教YZKUt *u$#0E wŹsдiSsAV뵌uB*"++K<++KYYYQF6mٳChhhe2d2^1YS jʼ䶈^A^FH꫸tCak666h߾=0h .IJJĉEVCT>z#HTj.,-:@j%1vXDDx:oJÃ}7 J1~SVh$Ќ=E///L>oˉѣ;">>Ś$rԨQW(J/rJ7, bwI@  zz0zhee3f`ƌjC"''f7ЦM$&&j. r X\\7|ЬY3lڴ C5Lװ`/-ѓIM RskF(xYk;D O-, KJKk>,sWRڴULC-ԫ{ˀ#` `WBAAYM /ê=YMGhˮdֳ]4KGQQ[HJ !Gn8s|ޯ׼^=3󻝮bDYYGzHMWr*J+r:LOU#^>s(..ҥK BzWF$\˪.%Zȩ`%qg"}QIoM9Aĉ8~8BBBNYFUWݯE +jȭ`s+.v'0n@I5mKQp/b{^|7%v5o %UZ=ɛJjXn\[iMҽ(>u?&}z9@6d) $55tةP7Hl1IjJ;O T|i] ˭`c+{/eSbFU2<wIh K9$ 3gѣ@޽n ;y:*ۍQ׈PF1!?쏻*K_djmW@c:^rA%> sOwd]b@,'a]O5o7gc9WXQ/bq#T(nmS+`ORm׽@(@{#8wC}\$Y w{/GT6WQ(㎬;vBs^+y,&yc3#[X9u/^hK@tvM熸RR9 aժUXjd$Aa'mq֠30Rb*4]>^䖭0ïócxvt~e$,'HENQE^*%u~x4KPT ZGL4 W\4ݧG@@RW;%m V;Mƣ^_^$@!v/ihNcX$M 7{!uDY9LCD:T Z,X\|yyyCqq1`# P)2mX1)ӆI ) Cdo|o317!@8uAy;'6WE㬁ZiZc: ,[lƯRRR%bfdI.gu46`vcv/BmCedr~Wig㬂ZiID/P]]b{MM \5cֈRZ :w:>Ϳ쟮Krif_jw7)NJLŇлJ.e5Z_Ys5D [%zǩS`4a4qI⋘:ut4ci8IFUfa<$㩝'񛵇%W\Qg8kS`jp`o\|ȱ B`a'4l9C㸥:͖^n޼JJBTT ^{MFH'Sfkˤت X4Z F}^ucYwD'65։^O?Ņ ?gh4# ( ;!rlՔ&CG`v8)տ8kqhr*P)p Tƞ,c2` 0@4ȱ;3_RY_nwp8XXfp:SĈfLY4NlB^JɌ=%+h>}:֮]buOYܺCyjʭ(k7N2-&r[b$X`$4{gƉMh&ǎäIZl8q";&BFdir٪)Bn]d`Y4^vL ~KdD/Z:]W$ GJ7F٪i ƉMp\8,ģ< gGeeu.s'?~&R! +( (4Q:ZlWjʭ N]6^hJ7'X) 9{TEࣁ9{Tg5C{[4NlHqgDtRL6 ?y@ff&>|G"g'>({xu vcj8y:_Ǎt_7gc58rvzg r*PQlLZԁ"fk:yIj3Zʫ]$9;IeL{b͚5ÇСC;=Qe\_?+4Qa@I-2|kR^X7ܡK8inQ`8}eqws6q8:+ʸ.r,3qgDɓ'㫯Bmm-q{*n ???T*DDD ''؝;wbرӧ郘v{=CE㬁mY [u^h5 Aqu /&ݮOhSm#; o&^}UTT4III_gӦM;w.̙@l߾...صkW?"$$C o LWWtfA^)JqPa]B@Ū8}Y^c4E&yc3#[׼5*l{fZXYk$+(s~J3i$644ӈifggdgg z:466ݽ&zt:GwۂOtB};\Q11]X'6wm7fPD -Xgn['%jB7N,yfqb#*uӇw'nm077/BCII(//^V5ۮjƒ%KcVD-99+;Kn3#Mh5P)vc6=,.x6g f8~W?3G{;L%(V* Ni@d#"<$uNӤxa6.nmɩՖ .zfaϞ=fwKJJBUUUea ;4#RҒۀh AHh{h{;bĺx|:3?+bPO Gz`E?Y#ѫg)~%M ?s{//nme`No Xd O.5 RR9JKK Ca_n0m* tu6]Whj8u\0: ;㥵p[v l|2X'~ 6[<['z ƍQSSOOOܼyZի#BCC&p&tDFF{֭ʕ+.%{r%+-9 \xv Lk i$r"Y?lh%ko8qgΜAMM BCC1~N~zz:o <<q9hZ̞=Crr2`ڵXl}DEE5ZZ-lNFAUU\]l~p';m(*V%_gz:aԇXXX~MO|[;Ѻqu<7駟n:!...י3gڵkXlJJJ!ŰcmЀ3f˱b Bn#Gr֖;~^8qD7K,={СC~z VUݍWDDDou󦥥!<<;wDBB6o<1,GƍfeeeaĉGmˬ2 @V"Mwѣ^]] ~hIǏ#)) ...;ׅYϜ9#"""-f\ӦMCtt4j5y8:zG]v+=""""}*j(6Z6+ gIVp%QwVpDDDDԳXDDDD6 aHDDDdcXDDDD6 aHDDDdcXDDDD6 aHDDDdcXDDDD6FV֭[JwaB@jjjfJDDD$HHHˑ`Ƣ:}111l3#""">b'` jf۵Z-Ν;gS__:bMDDDSdShbDDDDi( RR˗-DDDD=E#BCCټ`0 33;NNNpuu5{I,@BBps̞=Crr2#r V1`ɦ9s&]e˖!!!hR\\ ;_<1 aDGGѣ=>QQFIHNFAUU$2HDDDD±$"""1,l @""""ư$"""1,l @""""ư$"""1,l @""""؋u]U]#~v~ A초JϯVBx8#OY+vKߝ~~pFDDD$,%7QUCT9oX4l @t֢qDDDd;XJR8"""$*~W20UuK/(@5%l\ZyU]#~8 J۟b-Ԏvr8swn {f!3wvCih[8vRn`_|bՎƯD|nϰ<ؿ Rͭx9߸ ^Jk^ՆVGL_./7Uq z<6cvTJdا[è=z9_ng7ST_n!;{]}1 !~nHZ0Y)NF*lKwrqUe3+bͶE%gJ-2GVO2ۖ|o+)#HLn~(E'bF]sylې|[ cZ߯b{ͭxtC&Xc9޾#W!kwA0Tt/LkMok!.[* iG}!C@Raذa8p@eub`1[1|ŗK$^|7WC~!4`_~%]-?K%[F.l-KI@4jA@se)Xm/ɦLOOGBB/_<#66eeSO=?o8p&~)Xm\ޠ7JM46Ȣ3Llvdr5 X)>K[}7 (l 8/<} 4Qיys' -;idQFa˖-___KHLLl?sL?o6zh`MȣWji^GaKESv(}ΒߵגI=EX^^^Vk]ܹsNIII%%%mdm&"".@7zAj׬F K, djh0PQQ(-ȰN___\|f ^|F-ɢ۷/J%JKKͶ˫T<899|-7[Օ'n>|oc ﭭb#BCCټ`0 33NddY<>aaaGjj*jkk1gٳѯ_?$''.\hlܸ'OFZZkرC """v+V; K V^ 6{= <kqqq&n :ׯGJJ JKK[o!**J}hRĸq`o/j>|oY@"""" aHDDDdcXVn֭JBDDrrrNI򒓓1j(NKRRRP(h"S+Wg1l0|b%yzK.?+Wbرc2e |||P(w^F,[ pvvFLL ~M,Xzz:|r!88(+{PVV͛'Ohllc=ZS\>|ةƍ7|(,,ƍѧOSkb۶mزe {]֭믿.vjT[[`lݺ՟[7oq) uVgj QeMw7K/Dk׮YYYxNGjjj0rHUnILLW_}ǏxwEL Ӽh^y,^PUUV~f3]@+ЀӧO#&&ybb xIDATb-bfSUUpww97o&OlvR۷aaax'#F`Νb% cƌAff&.\oq L8Q䧨%%%fF~ .mˡjͶkZ;wN`0`ѢEBPPBZZ򐛛+v*sEl۶ xW b'it2dJ%z=V^~ZdZ~3 @iCAAN8!v*pe,\J;1  Ú5k#F@AAo>C{xC!??-[%v[}BTl{ii)DJ^ϟ?G/v:piaȑ=yfC׋y{{#00lСCQ\\,RF? 5k g}/2NMvLa~ @+Pdff6o3 DddIhg>|b$Ǐٳg O?|(JSK]phl;5I:rHxh4 ҥKZd?~&mc @""""ư$"""1,l @""""ư$"""1,f=z bBDԣX,)v+V1cpUh4%"Qɒe˖j5j-D$K^^^FBaMV~? f̘:;C>}`U__ŋ_~ի"""pQcb'@DdMyfӦM~;x"O(̜90|"-- >>>سg&Lgb"QK,؈m۶! 0c Dii)j5G̙3Q\\ݻw>>>ŋ###wƚ5k"V$"KsZ~~~fZ-gϞ^ǠA^=4Q'$"sB6RӧT*8Ʉ @".1bz=0vX!"`РAx1{l|'(**BNN~#"j @".ڽ{7fϞW^yF\\rssѿS#"j&"""1l$"""1,l @""""ư$"""1,l @""""ư$"""1,l @""""?|,DΐIENDB`python-diskcache-5.4.0/docs/_static/traditional-caching.png000066400000000000000000000702661416346170000237620ustar00rootroot00000000000000PNG  IHDR5sBIT|d pHYsaa?i9tEXtSoftwarematplotlib version 3.0.3, http://matplotlib.org/ IDATxyxo/t.-PQ@*"轠ثTPJQ,qAPم+ zYdU~,EPJ Ķ&3L&{239sAQDDDD^Z@DDDDHg @""""aHDDD3Lt 0$"""&DDDD:lݺu0  ƍzqrA ,]TP*4`ԨQa00e7DED"aH$0oZ ш'b˖-ZTO?[nVNdffh4jQ|n… m/Xׯ/I& njh4"##h׮ͺ222bm۶ zd Apwvk۷cժUK.W31$c=f,ȥKI:|||#Vu~!((;wD =x0| #S.g%`"[b^z%!88& Νѣq뭷"88aaaի~2sqرcq2S --zi7>/_Ƅ ???$&&"==gƦM4hРL>Gʨ̙3wy&CZZ=zf͚GÆ 1e2ݺu+wpy5kVN<^zZj/l~7n N<{ aaaSO=b/,,İaǏgB" uMDU6~x_Daa!q!Y?0_aر#~7DGG ХKdggcԨQ̸8;xп ТE _3h ,[  @vuVL4 ;>Su8GyO=q̙3Drr59S[FVZPG11faHKKCaa!^}Uz_}|Aԭ[GFLL ~W|Wxg땔{AǎobݺuDÆ Ri<}&;wĜ9sPfMK>(VZ$''cÆ ۷y,1|pڵkJƍb.]feR|}}SZM2EZʺ@W(۷o.߿r-Xgee)2K?PFaވ#ʶm۬bbbرúԩS+8\(O?L7 TZnmwY6d%44T|(RRRԪUKiذo_`lVmZ]twK/)aÆټ=ZjYoݺU7fTHL3)))Yvc?+W󈌌Dbb"k]o͚5HHH@޽˪U'x©1Yj^|6[hVZYתU a]j\r.]BHHZ_PPoFGر'RSSji>6ϷkΦ<36۷oӧOd2e fȑ#U?y6&D:XfYii)Nz5j@TT~w[;q6lXrSc#G/3X/}-Z!Cѣ6m={Ĕ)Sdlܸњbp"r&D'd$''om M0b̘1!!!0a111Xz5ƌ_~xq]wQ <6n܈I&7pƋߵK.Ř1cl2,_ݻwǢEдiS0B$8DDҏ?6m`x衇nQ.]Tfo oR!%`""*׫C0 믱~z>?8ƍW؇D٘  @""""aHDDD3Lt 0$"""&DDDD:\ѣxQ^= 44m۶o]cDD> 믿?chڴ)JJJe;+>C$"-BDNu1~]6;ԬY#G믿ƨQ4jTf((..F``Q9ȩN/?.@ ߕ+Wꫯ~GBB^~eL&$$$W^زe ZjԫW ,(yyy=z4ڵkc̛7Ǐy͛a0yfN:iӦسg:t耠 61}7HNNF`` >k-Z-["000`l>:w ԪU SN-SbL85B@@j֬~ѣP ӧO O?]9"/&DTWFzpwWO> &;[o;"33 (#G{iu/}xwq3<ԩS7UϣGh޼9f̘Ν;[;tys=xѼysɓ1x`4lӧO?7C˳y .Cf0m44n/֮]k]zBFFZliӦaԨQa0caڵ͵yիWh4""'W(}tݟY<6njP;x?X={VW^x &(+V<٬(|' رc6oڴIlڴɺcǎ eeӺul?~\V&Ol_~Q|||l[e&IUz!벹s*ӧWXC)Yf<( EQHDNc4!!!f@jj^xIn[GEE[na]矣Yfx|`PY [HII)Dtfي+`6999ֿX4l6mYZj6s~~~hժUrըQ#G\5B֭xbsXv-x'"91$" T' ,Exx8N8anݺe#"".\>>z(6mz3WVZ+2> EQаaCDEE8pgϞYveu- ֭[g}˗/cРAJDa`iBCC~ږ)oor+dP畖]=g6a0vrcVcg  ѣGcx嗱h"$''[nqHnLȩz?۷oG6m*\/>>fF&M˳x?~&Pf8ެCQ$&&QFN{;vpH/uV̘1)1\x _Dpp0|Idggyѣxq@e={:=ۇ+WyҢV~}?`}iS(ӊ( Ο?{>C{W湿ƠAoaر.j""Sկ_K,AѤI@mۆ> ?8F!C?D^^:v숝;wb۷p+j;˗/?_B˖-UVah֬nVu]HKKCnn."##tR\riאǏo߾ cǰrJ3N)Ʌ 9<Ûo/f͂?nvL6 C|GW͛+W"66iiiHOOϭV!==+W]v/^~SLAxx8x tsS?n84jo222uދxƚ5k0yd,Y9WvnYϛ?B H2G3gΔ;mI-C=䏈*KDD8{,6l؀˗Hh;y>DDDD:>DDDD:Hg oٌӧO#$$DDDD-EQPPP8xy- `>}u: """ YYY6 * pu 8"""Rh4N:1eP&DDDs-}^&"""1&DDDD:Hg @""""aHDDD3LtF~@޽/yEQ0aԬY֭>QDDDD#mXXXf͚a̙>?uT;={6v؁`tnȽ GѣG)3f7X`bbb_` ȭM9vΜ9nݺYuؾ};@78uE%vשQl:A pSDUw E%v׉揈`?7ETuj);q+fą8U~Vn.]x;y{PF0Sy]Fv+L>ި[=MU:/&4aDΜ9Yc}<& &h4&@}^l_xh] QU͙btxsJ*9{ϞinÃoY#eqc=968截mҪ0,4f: /('ݭQ-GMӿE(9c\sXٵagW;>AUY|`lөԶzF1!X|rkw8uRWHlObb"bccqF2ш;vM668E6p*aI,dddvU~>78ױt4˸$ m ŋqcǎFdd$֭6lD?qqq۷Qy%kĸo@5[R\Q6(IZrF=k(H޽;w>\2d͛_|xꩧvaݺu>?Pq޸%:1V_2Q*=YZqjJJj iN: &MI&1*I\E˘*9DTU\SXy 2ċZrr/kO}2^[k.SCU`uVZQv뭴o K> iFwɒ`ŕʮ,;p(-G-P #U(Fe}[WJl'V\K*vWp@z !vzMDƢ$K drCٯ7Uvj5nR? Dꫯ s Dgj~qK$%x"V"r/oYfa[{n ,, =I͑I{vyp[@Т)'2(LBb6g?nm۶O>ٳ' !!~)vܩqd!Zg;d*߈V˔,cNˋ-&UɒH|t{ ƍۇ-[Gd2h4/t3PXU˸?h\Û@w=I'Qpܸq0hܸ1QZZɓ'c&33nDPzRWӥ8KPMW 0QF$ݶŋdݻ̟?פ!??ƈ#X]~i;YT&WߙY:2m?Q]TCTm;v,ƍn68q2dH;Ô#ËFԸoʓx@H7I|}e;p!Qn[e[|ooo ^AZސHr)SI9N-{ɓQn]z駟0}t_:4ݐ?~X0#*9y4Ѱ nw}ǏǰapY駟Ƅ Mz2V2BM.BbA^Ѧ#6 30c C-Cn IDATIVC3eu7U*GƗ@YS;] DYt Jq2WIBezK.&Ɥ (6]%eʈV\n0෧n<].Ygk& ߒǐr#m1$ Or%NHwʔY8R"Q~v)Ht y4akK\F⊶?!aTvy {ۉʣy\n:lٲx̙h޼9}Q\pAdM4_2î1"@iY87RԊQ(~T%cǎh/xpرcHMM8:r*mGCLIˤgy&IPHcKrlH˸$"!ڱ$D)c'JUV駟@Գx{d,zEC-ڷe&<0 6 /N:-["88o]YD:%DPDVݠG-JD{92EQ`0PZZUhDRs`b^EnvqHIcǎiD!Z7We`v6 t Dzq `||!96$X5AQyX{:Bմ R5D2RL<&Xp!ڶm88q0c |GF$Ycwĉ-1b{h;T͠.KLLtSTM$a2KHvBԤNDKdP<011?s֭C&M9VBrr2 ::-Z9sT1QoLaUbR$:ѶZ2xtq->|8( v܉O?裏9񇵿/]v瞃 RkL&4uFidQ2D)C7 R&@'SLzq O>@FQQ}Q߶ fx-Z1{ Lddd8-QB@b՜n&)rTËZi]fgK0p@>|/^ę3gp)<N5k"))fY&Mp _|_VVSc .-Roԩ!$~*Zfz+h<.O Bttرc9m۶šClvGhh< _ 2mFu&!7mV%>,֮][fѣh"}ѣ?_Ǒ#Gd|>|>á+n"XԸQu@Q(Qx\xb<#زeuȑ#6mryXr%>S4m*f̘:3|2WmYaĂl"˼d<"qyM ={ᅬxׯ/6mBFYzB^ysX cu5ɒX%c'ɔH4ՒDr}Qm۶ hQd>0hbwKc,' HSSS];un+CA(G" mDTuO.oРFy$6GĔ)oR{M7Ak쫧rxD̛;ɜ,6d3<,[2n$;DBs|2|||~C!7H J"II oI`Iw<*EݺuQZZu(D7EƳaUS='=GZi( qG%+_Fnn֡prER73QS=#[yd#yD{8r`ݫQdl;×%Q,Xiy$K$LaR<.۷! ,`~/kDgXjjd;q `zz!ITN˼ F9mky\hg8pp뭷EGD&S%hu䲍iM5(ھ8~bm$1N&KϞ=`sXt)4baIToʚXJ]OCOmt#y2񸻀GEnn.ш{NM}XqhOLEu x\ ua4iĺ,)) 3gĽޫadDS5dp;)$c,H%^r;k4-fY!Ϡɢ8B$"ޝڢG.]`ԨQ8}uٟѣGk׮FFT1$G /[yndFkV&%F#P~}ԯ_0xw D eBw0CCZ\?<cǐ:u`޽ذa<hҤ uql2UZR:(e;C P@oIƓ)$Iׯxt]tANp=hCV|T x$ߚ.^yzL~wؼy36oތO?%%%WtΝ;sΈ:LrZdIfJ.\#<`N0qDl޼.\#x[oe?e <. rhU> _#@Z^ъ$xz1-7 @.]Ю];tk׮|`lv|o%Ocb=+m)ˤٲqLM;5DNz1-PRR~ܹ33… xp1ŋ1p@̙3NK;fz#T}OɣyL `.]c$&&cǎx駱dԬYӥ;|pݺuk" \r2)Pբ<&5kZر#W\t)݋]vZd2d2YFW&5qY%J"ce,HdL/'<\Ç~ m݆#F`8wS?/++ Fŋ5 թSǩ1ĬeW;M3&j#DhѼ7ÖZ1 `pp0>L2;v@NNN L:kFӦMy{ٳgqw>>>;e^|_VV S;(Er(ɡ8)liL1(x% Fdd$"##8piߵkW/6RRRиqcK.;-gm vɹ 1"فXܭ-I"f޽7oƦMuVVZܹ3fΜΝ;;BBBʴ(zNmie.`uS=[v)N a=1tLw˺u$(,,Dll,:wz :uB4 XnҔ(ށ3|Ժ1"7|;wFF4a͚}6OdIdL<kKM+ $LkxLOk#Ù"V81 髮m/V1wG㕌gâ${4eFtA~K[VLHR`HiMbE]u2W$KDEfV @lE kAOP-Q5A0\B2ɀ y$~Vpm'GMn I(ɯ$!s &D=‘h8لeT4ۋ1$ruc a xI=n'yuv:(H*L#Y%*CQ"&l"5kJ2a#&v2V .y[W"9=XS?rlLI3j:vN S29B"ˑE(ύD4m'`HD7MÙIoeZNOGVFŭ +\VjLeѱ]gbHf$K7EԂm%U*!a<2F5(ɱ0)S+B7G})0$rQsCUCde)Y]Nݠui$ЁHB9@" \Z.>wse@"'$XԸo-j"%߯K(vTֹiPS\ Tu>l$i;0$r&I⸑LC@$RraD20#I܂`HX%'v8ɒ a΋tfff;DHHѷo_:tHH@ ebQwY hDm\-ULN~q;D Çqe{(,,:4u@5DipLE%vkYem$>Zu<7ogtAHD-iKHY&rTM"%&@ 7iZIv:|@dddϛL&L&c薸d8U3m' @ =WtKY}*"uٿ72xѶm[4mڴu233fS$Oo2m'rZkkujZ2ȥHۈÇcXti뤥!?? La6^ɲ @:BjXS$0#F૯?ڵkW?~viQBԸoA#emkkz%Ax'D7I (9r$V\͛7#11QoDUr}NkRkDJId N*FAK\oPu8|p,Y_~%BBBp@XX+y5^Ф==n%Y,fv2 1 otp֬YGNPfM߲e˴nW5iqLe2HBzp_Ylt(c/uN;t4첬}fyZ@k[wD\*jxQꍌwcz AԺnm#=bHWqZ#a=qoO>ԍ*PHhLɣRZ*muw Rk_W2UDu;s.`y$;0$V"ud;**Z(!X9"yy@{dLuś@Mbķ"oj< [ÙU\Fxr碝UD&U,GB#D(!: D=>>9s&֭[cΝZdKM l(g2c+k:t.[ HOO޽{ѬY3tgϞ:4+k ulG2] DPU˒IfPK>jU>+q.UR5}9Ngu8}t :)))HJJٳsj!Tꑄ!f<ӂݻ@ȓv.ٳiiie^^^֭o߮adWTJױ$?eꐪ짓yQ2˪Lr )+u,砠#3ŕtLێrٵUWS%\YpaUR_tsօ"ԢnZG\pn"&&fyLL h4,Ka6TTXx__77}999& IDATѽ{w>|kz) ۣ'N_7n\V_7x`رG.[x1ԩ;:e7|~@ȑ#PǏGTT_zz:ٳ6[㈈ 5mgXx1 ??_}h(9UŢE㏣o߾;v,Lf1cн{riРcoorSš#""ЫW/,^&La2c9>DDbHDB[|9ի+VشYZ,*jQWݺusJL߿LwF>}k.,^-Z8*KD$4Kk܍o;vm ,FN௿* R{!!!,3[ {5j7?"r ΝkBF:uŠ+gϞ8vfϞ$\xѺ^`` l24jhڴ)6m3g]v0tPԫWؾ};N:}9khh(z-<䓸;裏"""CQQϟo] {ooo<#7%`N</>7|$,Z}Yi>#9GFII ѴiS$%%aypyDGGE0aMO ::SL ___4nG.{k׮YM}# ȩۇ͛c4h ̙jժYg&!"r5^&"իo?Ĉ#uHDLDdgg{Xp!BBBtBK3gDBBкukܹu͛`lj׉ȝ?K./`GDn%Ml2"=={Efн{2S8(442'̤IOC"%%III={60w _c0kqcDDDDڐ&ٳiiie^^^֭[ntEl6;믿nw &d}l6իsv"""A(K0H䠴L ^LL cϪ?d2d2Yƛ HCR: œ9sPF կDXXN:.5hQm<;;e?z(?޽{[f:y]ZZRSSF#@""" Zl7Zr1ظq#FQfƍ_~Yx+L bȐ!HNNFV0c "%%0x`ԪU @ӦMm^eF8w&L3gΠyXnƐ'OK]lEQBTFaaaGhh <~.`""""=cHDDD3Lt 0$"""&DDDD:Hg @""""aHDDD3Lb(5+y,g V2h0$Y/d [ @z$״ /[]&3xv^F@rRտA)9˲տ\DDDlLv-w#_y,}A@r'7U @rGDDDUyx%Y8q3f~aTL#Q3, bջ[%F3,"""<ӧ,XC֭1m4f8:ro/{'@$8w$""r{}˗#&&'N ;h9}Mkbcw 62olXf=v$""r#.**BHHoE~养 'N8:r=I t3 '<lР CEJJ `1w\7:uyjժooo{*\?33bdɛM*YXf@:_\nkWA HΝ;cƌ&T̝;ץvɓ͛7W~ZZYYY.ONǾOdi^rVl6z5j6˳[뼼РA4o/~adffV?BCCmrVMZJF<'ѣGѪU+;0e C˖-qF`6qF1Bu,f٦9"{_XRirB좽06-"P꾦5ѥq n?E  6 ѼtN?bcc1m4ա j֬c^P>2dѪU+̘1$rU/33ɨ_>L&֬Y b֬Y/$"8Ҋ'T*&v"izyy_ċ/hf.ΝÄ p4o֭rIq 1l0:u hܸ1-Z;`Tl!;yd;o#OfP=ohDXXUTVr_\8g+5+hw nyEo ۻa 9BoxNQXSQzP;^@4y9ַS3MHQ E!R3/$ᢀ3/$!Y~g,6}qkOjR>Gu3> >ο(|78>5rSOIIIҥ 4\Au1qe2Ee|2:H:tDXXPZWņjVBKT' 1^|Exp@@HNƊk[2#/d"|2wk0-- pPX @c$ldr` AѱcG)!P dc={[l-[pl۶ :NtxdF;m^V3K2j.kU{n1ك:&KʐSp9B(mj&<ӽ{w?ǏGEC6d͈Y=$҇CL.]26PIƮz^Ex8yd,Y婩xDD&cE(@ ҶKsɒ㓱^Ƌy58pcƌcѣqX`m=e8U/㹧fƦ{qqq`;GB4)  vvǩkӯlUq:Je;" Ih4H2rvG3: Wg'<}P4͗s:Od}=kfhG읰ݼ.|ɸMZ' Vu`eVubk[9u O'L󟨭LFAaa!^yL<٪ut Zˋ.[ )))ѿ&˺N;!}P}PTuDŽHw#ilo]8Zq_Gv'Eqx83sT4O 3Õu6M@91sxǩ7o9TWG >1HQyյKM:y8eyj1>YVV)Sرc(//?._H޽Yp\`23g"!!IMMŢEgkq:eeem 2&3u~f-tkcC}W:in'JM˪jwssMܱpEkH[x!^W@E%k$Äuj<\0a2qھV `C㨨C322z HKK͛qizƢk׮HNN,YsŇ~xyy˺qTq2 j'#u珈چ @o/q~?ĉrJY\rsEQQ°gϞ!prZf jjj0e̛7ϷFڀIWNy%98zh<xW'N޽{cҥxg*=^A)o@-y#>>+V͛EGDDD$-a k,ѣ:ͦY!"""R3a ^GAAo˳[^^e#"""rD1c !!Dbb"<==1|<?~!!!#"""Q ,I0rHxyyރk7l؀GyDTxDDDD>`YYZN{UxyyY$;ut:];vT{n @""""aHDDD2LT 0$"""R&DDDD*He @""""aHDDD2LT 0$"""R&DDDD*#Uzjiw}ɓ'#((iiivHi c޼yÀ0j(4X sRRRghđ&\|9yL>Xv-<==aÆ:K.ԩSfhđ"Ann.999!::##"""r<΢RFzzOߩFuuuw`uًw%99: :$""""ԩZ--tGbb"?/^ٺEFfff2ɄLDFF︹CDDD4R<񈋋Ð!C4TVVbXtn9uT/]AIIDAT|xyy{¶Iʕ+;w.={ ),,ӯ7<g 8e˰l29wDDDDv1fA(`NCYY$ @""""aHDDD2LT 0$"""R&DDDD*He @""""aHDDD2΢֫y /d|k7/wZ"""j PLQ9 yc##"""G.`}[kU)@yPq"""""`PslZԃ B}{̦刈H=*ֺCgm9"""Rf u]6-GDDDPVٳTfe.EP®64q[1z(iZf{?7y"fo=a4&Ö"F2^Z|6g$vh7jsɫvkŞٍoՊ<%5v  Z'Me*ny/ =Qh9ݼy-d_@ՈbEu;O7:Q-dſsd ;goQcONpuxsǟZOc6! t:۾'iSXU`׿Vݝq|(eEo⾔LA_0bG.-EzZ .cQ5Ma7jE:`մAxo+ +EzRZ|}w1@C$IPDz9Jl[~; W^ #""999M裏ЫW/_~Oi Yt6cy VwnE'pքI?1_%Rt4MJMx<9y~YP.E',z1.ܫs?StXL8d/ I322y!// QPRI_O?o'NĉqI;G O: 7oኡvFIt86qք71d0H2!wьKWoHU!˸M},6q+pB!ͨ/ 7oʦ_SͼZN. :VL&GBBcbbPYY;w/kZ7EB3\59z]rPj9R_iE})**j|QQ-߱< DMDDDҥ~t_HKbb]CɄWMS/؂`@@@.^[mp߶۶}۶ef_X Iv Zˋ<r#?uy{{Bj#ܷmp߶۶WwH1 FfSL&dff"22߉({m<,񈋋Ð!C4TVVbXtٳgcȑx70vXرcXn """js->>>Xh-[شiz x7쌉'=pwXt)RRRP\\wyQQQ¶)Z<mm;ܷmmq:i$""""H Y 0$"""R&DDDD*^AAApwwGDDrrrDx:t(ڷoΝ;cĉ8s谤F9sE.]O<___xxx_~8vh4")) @HH,XwQF۷[l6cܹҥ <<<s V: c޼yÀ0j(MѲ0c >|{Emm-yTVVM*G[oE׮]CTT\\\{n:u o:t :4[d ֬YUVǒ%K+WMq*++1`^bŊXv-9vaԨQy#U7N"""0tPZ GBBqtYYY1bpPQQA7.\0U&!!_}<(:7zNɓ'|ȔM`۶msf_K/(++^ǻヒS WUxA 77˜lɧ бcGcƌ;vKc 2=:wbÒ°aÐg[:tG\ PTTdQ7t:DDDm3NJKKa4-z>}ZPT1L3gзo_H!==yyy8zP?b͚5ǫGb֬YpuuE\\-!!zVhĢE0m4ѡIl~FTmƌ8y$:$:)\xg޽{.:L& 2/ 8'Oڵk͛i&|ӧ1gsߒ:uVb\fΜ;wb߾}֭p 4pvvFVVVXgggF!*Z.]jw(,,<^~e$$$`ԩׯ|I HNNT/m1tPco߾N)[x]v36 /`ذaXx1 ''֭úuDxǏǢE>}oMthSQQ~{AAѱcGbΜ9Xp!^#)) #NV\i 4͇hqFѡIiȑٳgC|o߾f777s^֭ y@{1kjѡ)ξ}cfl2IIIf^ovss3?C3gΈ Z8 @""""aHDDD2LT 0$"""R&DDDD*Hej߿ׯ_ ]1$")i4&?ǰapet:BDR{3ddd`ܹ8sL2///xyyH8$")t:42//;}]`Νٳ'<==1eTUUCPP:tYfh4jKڵ+ڵk߿_Ж5YtDD +V@zz:1i$>S㏘Give Support

If you or your organization uses DiskCache, consider financial support:

Give to Python DiskCache

python-diskcache-5.4.0/docs/api.rst000066400000000000000000000061371416346170000172210ustar00rootroot00000000000000.. automodule:: diskcache .. contents:: :local: Cache ----- Read the :ref:`Cache tutorial ` for example usage. .. autoclass:: diskcache.Cache :members: :special-members: :exclude-members: __weakref__ FanoutCache ----------- Read the :ref:`FanoutCache tutorial ` for example usage. .. autoclass:: diskcache.FanoutCache :members: :special-members: :exclude-members: __weakref__ DjangoCache ----------- Read the :ref:`DjangoCache tutorial ` for example usage. .. autoclass:: diskcache.DjangoCache :members: :special-members: Deque ----- .. autoclass:: diskcache.Deque :members: :special-members: :exclude-members: __weakref__ Index ----- .. autoclass:: diskcache.Index :members: :special-members: :exclude-members: __weakref__ Recipes ------- .. autoclass:: diskcache.Averager :members: .. autoclass:: diskcache.Lock :members: .. autoclass:: diskcache.RLock :members: .. autoclass:: diskcache.BoundedSemaphore :members: .. autodecorator:: diskcache.throttle .. autodecorator:: diskcache.barrier .. autodecorator:: diskcache.memoize_stampede .. _constants: Constants --------- Read the :ref:`Settings tutorial ` for details. .. data:: diskcache.DEFAULT_SETTINGS * `statistics` (int) default 0 - disabled when 0, enabled when 1. * `tag_index` (int) default 0 - disabled when 0, enabled when 1. * `eviction_policy` (str) default "least-recently-stored" - any of the keys in `EVICTION_POLICY` as described below. * `size_limit` (int, in bytes) default one gigabyte - approximate size limit of cache. * `cull_limit` (int) default ten - maximum number of items culled during `set` or `add` operations. * `sqlite_auto_vacuum` (int) default 1, "FULL" - SQLite auto vacuum pragma. * `sqlite_cache_size` (int, in pages) default 8,192 - SQLite cache size pragma. * `sqlite_journal_mode` (str) default "wal" - SQLite journal mode pragma. * `sqlite_mmap_size` (int, in bytes) default 64 megabytes - SQLite mmap size pragma. * `sqlite_synchronous` (int) default 1, "NORMAL" - SQLite synchronous pragma. * `disk_min_file_size` (int, in bytes) default 32 kilobytes - values with greater size are stored in files. * `disk_pickle_protocol` (int) default highest Pickle protocol - the Pickle protocol to use for data types that are not natively supported. .. data:: diskcache.EVICTION_POLICY * `least-recently-stored` (default) - evict least recently stored keys first. * `least-recently-used` - evict least recently used keys first. * `least-frequently-used` - evict least frequently used keys first. * `none` - never evict keys. Disk ---- Read the :ref:`Disk tutorial ` for details. .. autoclass:: diskcache.Disk :members: :special-members: :exclude-members: __weakref__ JSONDisk -------- Read the :ref:`Disk tutorial ` for details. .. autoclass:: diskcache.JSONDisk :members: :special-members: :exclude-members: __weakref__ Timeout ------- .. autoexception:: diskcache.Timeout python-diskcache-5.4.0/docs/cache-benchmarks.rst000066400000000000000000000307711416346170000216270ustar00rootroot00000000000000DiskCache Cache Benchmarks ========================== Accurately measuring performance is a difficult task. The benchmarks on this page are synthetic in the sense that they were designed to stress getting, setting, and deleting items repeatedly. Measurements in production systems are much harder to reproduce reliably. So take the following data with a `grain of salt`_. A stated feature of :doc:`DiskCache ` is performance so we would be remiss not to produce this page with comparisons. The source for all benchmarks can be found under the "tests" directory in the source code repository. Measurements are reported by percentile: median, 90th percentile, 99th percentile, and maximum along with total time and miss rate. The average is not reported as its less useful in response-time scenarios. Each process in the benchmark executes 100,000 operations with ten times as many sets as deletes and ten times as many gets as sets. Each comparison includes `Memcached`_ and `Redis`_ with default client and server settings. Note that these backends work differently as they communicate over the localhost network. The also require a server process running and maintained. All keys and values are short byte strings to reduce the network impact. .. _`grain of salt`: https://en.wikipedia.org/wiki/Grain_of_salt .. _`Memcached`: http://memcached.org/ .. _`Redis`: http://redis.io/ Single Access ------------- The single access workload starts one worker processes which performs all operations. No concurrent cache access occurs. Get ... .. image:: _static/core-p1-get.png Above displays cache access latency at three percentiles. Notice the performance of :doc:`DiskCache ` is faster than highly optimized memory-backed server solutions. Set ... .. image:: _static/core-p1-set.png Above displays cache store latency at three percentiles. The cost of writing to disk is higher but still sub-millisecond. All data in :doc:`DiskCache ` is persistent. Delete ...... .. image:: _static/core-p1-delete.png Above displays cache delete latency at three percentiles. As above, deletes require disk writes but latency is still sub-millisecond. Timing Data ........... Not all data is easily displayed in the graphs above. Miss rate, maximum latency and total latency is recorded below. ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.Cache ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 88966 9705 12.159us 17.166us 28.849us 174.999us 1.206s set 9021 0 68.903us 93.937us 188.112us 10.297ms 875.907ms delete 1012 104 47.207us 66.042us 128.031us 7.160ms 89.599ms Total 98999 2.171s ========= ========= ========= ========= ========= ========= ========= ========= The generated workload includes a ~1% cache miss rate. All items were stored with no expiry. The miss rate is due entirely to gets after deletes. ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.FanoutCache(shards=4, timeout=1.0) ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 88966 9705 15.020us 20.027us 33.855us 437.021us 1.425s set 9021 0 71.049us 100.136us 203.133us 9.186ms 892.262ms delete 1012 104 48.161us 69.141us 129.952us 5.216ms 87.294ms Total 98999 2.405s ========= ========= ========= ========= ========= ========= ========= ========= The high maximum store latency is likely an artifact of disk/OS interactions. ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.FanoutCache(shards=8, timeout=0.010) ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 88966 9705 15.020us 20.027us 34.094us 627.995us 1.420s set 9021 0 72.956us 100.851us 203.133us 9.623ms 927.824ms delete 1012 104 50.783us 72.002us 132.084us 8.396ms 78.898ms Total 98999 2.426s ========= ========= ========= ========= ========= ========= ========= ========= Notice the low overhead of the :class:`FanoutCache `. Increasing the number of shards from four to eight has a negligible impact on performance. ========= ========= ========= ========= ========= ========= ========= ========= Timings for pylibmc.Client ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 88966 9705 25.988us 29.802us 41.008us 139.952us 2.388s set 9021 0 27.895us 30.994us 40.054us 97.990us 254.248ms delete 1012 104 25.988us 29.087us 38.147us 89.169us 27.159ms Total 98999 2.669s ========= ========= ========= ========= ========= ========= ========= ========= Memcached performance is low latency and stable. ========= ========= ========= ========= ========= ========= ========= ========= Timings for redis.StrictRedis ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 88966 9705 44.107us 54.121us 73.910us 204.086us 4.125s set 9021 0 45.061us 56.028us 75.102us 237.942us 427.197ms delete 1012 104 44.107us 54.836us 72.002us 126.839us 46.771ms Total 98999 4.599s ========= ========= ========= ========= ========= ========= ========= ========= Redis performance is roughly half that of Memcached. :doc:`DiskCache ` performs better than Redis for get operations through the Max percentile. Concurrent Access ----------------- The concurrent access workload starts eight worker processes each with different and interleaved operations. None of these benchmarks saturated all the processors. Get ... .. image:: _static/core-p8-get.png Under heavy load, :doc:`DiskCache ` gets are low latency. At the 90th percentile, they are less than half the latency of Memcached. Set ... .. image:: _static/core-p8-set.png Stores are much slower under load and benefit greatly from sharding. Not displayed are latencies in excess of five milliseconds. With one shard allocated per worker, latency is within a magnitude of memory-backed server solutions. Delete ...... .. image:: _static/core-p8-delete.png Again deletes require writes to disk. Only the :class:`FanoutCache ` performs well with one shard allocated per worker. Timing Data ........... Not all data is easily displayed in the graphs above. Miss rate, maximum latency and total latency is recorded below. ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.Cache ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 71214 15.974us 23.127us 40.054us 4.953ms 12.349s set 71530 0 94.891us 1.328ms 21.307ms 1.846s 131.728s delete 7916 807 65.088us 1.278ms 19.610ms 1.244s 13.811s Total 791992 157.888s ========= ========= ========= ========= ========= ========= ========= ========= Notice the unacceptably high maximum store and delete latency. Without sharding, cache writers block each other. By default :class:`Cache ` objects raise a timeout error after sixty seconds. ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.FanoutCache(shards=4, timeout=1.0) ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 71623 19.073us 35.048us 59.843us 12.980ms 16.849s set 71530 0 108.004us 1.313ms 9.176ms 333.361ms 50.821s delete 7916 767 73.195us 1.264ms 9.033ms 108.232ms 4.964s Total 791992 72.634s ========= ========= ========= ========= ========= ========= ========= ========= Here :class:`FanoutCache ` uses four shards to distribute writes. That reduces the maximum latency by a factor of ten. Note the miss rate is variable due to the interleaved operations of concurrent workers. ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.FanoutCache(shards=8, timeout=0.010) ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 71106 25.034us 47.922us 101.089us 9.015ms 22.336s set 71530 39 134.945us 1.324ms 5.763ms 16.027ms 33.347s delete 7916 775 88.930us 1.267ms 5.017ms 13.732ms 3.308s Total 791992 58.991s ========= ========= ========= ========= ========= ========= ========= ========= With one shard allocated per worker and a low timeout, the maximum latency is more reasonable and corresponds to the specified 10 millisecond timeout. Some set and delete operations were therefore canceled and recorded as cache misses. The miss rate due to timeout is about 0.01% so our success rate is four-nines or 99.99%. ========= ========= ========= ========= ========= ========= ========= ========= Timings for pylibmc.Client ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 72043 83.923us 107.050us 123.978us 617.027us 61.824s set 71530 0 84.877us 108.004us 124.931us 312.090us 6.283s delete 7916 796 82.970us 105.858us 123.024us 288.963us 680.970ms Total 791992 68.788s ========= ========= ========= ========= ========= ========= ========= ========= Memcached performance is low latency and stable even under heavy load. Notice that cache gets are three times slower in total as compared with :class:`FanoutCache `. The superior performance of get operations put the overall performance of :doc:`DiskCache ` ahead of Memcached. ========= ========= ========= ========= ========= ========= ========= ========= Timings for redis.StrictRedis ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 72093 138.044us 169.039us 212.908us 151.121ms 101.197s set 71530 0 138.998us 169.992us 216.007us 1.200ms 10.173s delete 7916 752 136.137us 167.847us 211.954us 1.059ms 1.106s Total 791992 112.476s ========= ========= ========= ========= ========= ========= ========= ========= Redis performance is roughly half that of Memcached. Beware the impact of persistence settings on your Redis performance. Depending on your use of logging and snapshotting, maximum latency may increase significantly. python-diskcache-5.4.0/docs/case-study-landing-page-caching.rst000066400000000000000000000131431416346170000244420ustar00rootroot00000000000000Case Study: Landing Page Caching ================================ :doc:`DiskCache ` version 4 added recipes for cache stampede mitigation. Cache stampedes are a type of system overload that can occur when parallel computing systems using memoization come under heavy load. This behaviour is sometimes also called dog-piling, cache miss storm, cache choking, or the thundering herd problem. Let's look at how that applies to landing page caching. .. code-block:: python import time def generate_landing_page(): time.sleep(0.2) # Work really hard. # Return HTML response. Imagine a website under heavy load with a function used to generate the landing page. There are five processes each with two threads for a total of ten concurrent workers. The landing page is loaded constantly and takes about two hundred milliseconds to generate. .. image:: _static/no-caching.png When we look at the number of concurrent workers and the latency with no caching at all, the graph looks as above. Notice each worker constantly regenerates the page with a consistently slow latency. .. code-block:: python :emphasize-lines: 5 import diskcache as dc cache = dc.Cache() @cache.memoize(expire=1) def generate_landing_page(): time.sleep(0.2) Assume the result of generating the landing page can be memoized for one second. Memoization supports a traditional caching strategy. After each second, the cached HTML expires and all ten workers rush to regenerate the result. .. image:: _static/traditional-caching.png There is a huge improvement in average latency now but some requests experience worse latency than before due to the added overhead of caching. The cache stampede is visible too as the spikes in the concurrency graph. If generating the landing page requires significant resources then the spikes may be prohibitive. To reduce the number of concurrent workers, a barrier can be used to synchronize generating the landing page. .. code-block:: python :emphasize-lines: 1,2,3 @cache.memoize(expire=0) @dc.barrier(cache, dc.Lock) @cache.memoize(expire=1) def generate_landing_page(): time.sleep(0.2) The double-checked locking uses two memoization decorators to optimistically look up the cached result before locking. With `expire` set to zero, the cache's get-operation is performed but the set-operation is skipped. Only the inner-nested memoize decorator will update the cache. .. image:: _static/synchronized-locking.png The number of concurrent workers is now greatly improved. Rather than having ten workers all attempt to generate the same result, a single worker generates the result and the other ten benefit. The maximum latency has increased however as three layers of caching and locking wrap the function. Ideally, the system would anticipate the pending expiration of the cached item and would recompute the result in a separate thread of execution. Coordinating recomputation would be a function of the number of workers, the expiration time, and the duration of computation. Fortunately, Vattani, et al. published the solution in "Optimal Probabilistic Cache Stampede Prevention" in 2015. .. code-block:: python :emphasize-lines: 1 @dc.memoize_stampede(cache, expire=1) def generate_landing_page(): time.sleep(0.2) Early probabilistic recomputation uses a random number generator to simulate a cache miss prior to expiration. The new result is then computed in a separate thread while the cached result is returned to the caller. When the cache item is missing, the result is computed and cached synchronously. .. image:: _static/early-recomputation.png The latency is now its theoretical best. An initial warmup execution takes two hundred milliseconds and the remaining calls all return immediately from the cache. Behind the scenes, separate threads of execution are recomputing the result of workers and updating the cache. The concurrency graph shows a nearly constant stream of workers recomputing the function's result. .. code-block:: python :emphasize-lines: 1 @dc.memoize_stampede(cache, expire=1, beta=0.5) def generate_landing_page(): time.sleep(0.2) Vattani described an additional parameter, :math:`\beta`, which could be used to tune the eagerness of recomputation. As the number and frequency of concurrent worker calls increases, eagerness can be lessened by decreasing the :math:`\beta` parameter. The default value of :math:`\beta` is one, and above it is set to half. .. image:: _static/early-recomputation-05.png Latency is now still its theoretical best while the worker load has decreased significantly. The likelihood of simulated cache misses is now half what it was before. The value was determined through experimentation. .. code-block:: python :emphasize-lines: 1 @dc.memoize_stampede(cache, expire=1, beta=0.3) def generate_landing_page(): time.sleep(0.2) Lets see what happens when :math:`\beta` is set too low. .. image:: _static/early-recomputation-03.png When set too low, the cache item expires before a new value is recomputed. The real cache miss then causes the workers to synchronously recompute the landing page and cache the result. With no barrier in place, eleven workers cause a cache stampede. The eleven workers are composed of ten synchronous workers and one in a background thread. The best way to customize :math:`\beta` is through experimentation, otherwise the default is reasonable. :doc:`DiskCache ` provides data types and recipes for memoization and mitigation of cache stampedes. The decorators provided are composable for a variety of scenarios. The best way to get started is with the :doc:`tutorial`. python-diskcache-5.4.0/docs/case-study-web-crawler.rst000066400000000000000000000117331416346170000227370ustar00rootroot00000000000000Case Study: Web Crawler ======================= :doc:`DiskCache ` version 2.7 added a couple persistent data structures. Let's see how they're useful with a case study in crawling the web. Easy enough, right? We'll start with code to retrieve urls: >>> from time import sleep >>> def get(url): ... "Get data for url." ... sleep(url / 1000.0) ... return str(url) No, we're not actually crawling the web. Our urls are numbers and we'll simply go to sleep to simulate downloading a web page. >>> get(20) '20' Once we download some data, we'll need to parse it and extract the links. >>> from random import randrange, seed >>> def parse(data): ... "Parse data and return list of links." ... seed(int(data)) ... count = randrange(1, 10) ... return [randrange(100) for _ in range(count)] Again, we're not really parsing data. We're just returning a list of one to ten integers between zero and one hundred. In our imaginary web, urls are just integers. >>> parse('20') [68, 76, 90, 25, 63, 90, 87, 57, 16] Alright, this is a pretty basic pattern. The ``get`` function returns data and the ``parse`` function returns a list of more data to go get. We can use the deque data type from the standard library's collection module to crawl our web. >>> from collections import deque >>> def crawl(): ... urls = deque([0]) ... results = dict() ... ... while True: ... try: ... url = urls.popleft() ... except IndexError: ... break ... ... if url in results: ... continue ... ... data = get(url) ... ... for link in parse(data): ... urls.append(link) ... ... results[url] = data ... ... print('Results: %s' % len(results)) We're doing a breadth-first search crawl of the web. Our initial seed is zero and we use that to initialize our queue. All the results are stored in a dictionary mapping url to data. We then iterate by repeatedly popping the first url from our queue. If we've already visited the url then we continue, otherwise we get the corresponding data and parse it. The parsed results are appended to our queue. Finally we store the data in our results dictionary. >>> crawl() Results: 99 The results of our current code are ephemeral. All results are lost once the program terminates. To make the results persistent, we can use :doc:`DiskCache ` data structures and store the results in the local file system. :doc:`DiskCache ` provides both :class:`Deque ` and :class:`Index ` data structures which can replace our urls and results variables. >>> from diskcache import Deque, Index >>> def crawl(): ... urls = Deque([0], 'data/urls') ... results = Index('data/results') ... ... while True: ... try: ... url = urls.popleft() ... except IndexError: ... break ... ... if url in results: ... continue ... ... data = get(url) ... ... for link in parse(data): ... urls.append(link) ... ... results[url] = data ... ... print('Results: %s' % len(results)) Look familiar? Only three lines changed. The import at the top changed so now we're using ``diskcache`` rather than the ``collections`` module. Then, when we initialize the urls and results objects, we pass relative paths to directories where we want the data stored. Again, let's try it out: >>> crawl() Results: 99 Our results are now persistent. We can initialize our results index outside of the crawl function and query it. >>> results = Index('data/results') >>> len(results) 99 As an added benefit, our code also now works in parallel. >>> results.clear() >>> from multiprocessing import Process >>> processes = [Process(target=crawl) for _ in range(4)] >>> for process in processes: ... process.start() >>> for process in processes: ... process.join() >>> len(results) 99 Each of the processes uses the same deque and index to crawl our web. Work is automatically divided among the processes as they pop urls from the queue. If this were run as a script then multiple Python processes could be started and stopped as desired. Interesting, no? Three simple changes and our code goes from ephemeral and single-process to persistent and multi-process. Nothing truly new has happened here but the API is convenient and that makes a huge difference. We're also no longer constrained by memory. :doc:`DiskCache ` makes efficient use of your disk and you can customize how much memory is used. By default the maximum memory consumption of deque and index objects is only a few dozen megabytes. Now our simple script can efficiently process terabytes of data. Go forth and build and share! python-diskcache-5.4.0/docs/conf.py000066400000000000000000000052671416346170000172200ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. import os import sys sys.path.insert(0, os.path.abspath('..')) import diskcache # -- Project information ----------------------------------------------------- project = 'DiskCache' copyright = '2022, Grant Jenks' author = 'Grant Jenks' # The full version, including alpha/beta/rc tags release = diskcache.__version__ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.todo', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { 'logo': 'gj-logo.png', 'logo_name': True, 'logo_text_align': 'center', 'analytics_id': 'UA-19364636-2', 'show_powered_by': False, 'show_related': True, 'github_user': 'grantjenks', 'github_repo': 'python-diskcache', 'github_type': 'star', } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': [ 'about.html', 'gumroad.html', 'localtoc.html', 'relations.html', 'searchbox.html', ] } def setup(app): app.add_css_file('custom.css') python-diskcache-5.4.0/docs/development.rst000066400000000000000000000175071416346170000207750ustar00rootroot00000000000000DiskCache Development ===================== :doc:`DiskCache ` development is lead by Grant Jenks . Collaborators Welcome --------------------- #. Search issues or open a new issue to start a discussion around a bug. #. Fork the `GitHub repository`_ and make your changes in a new branch. #. Write a test which shows the bug was fixed. #. Send a pull request and message the development lead until its merged and published. .. _`GitHub repository`: https://github.com/grantjenks/python-diskcache Requests for Contributions -------------------------- #. Command-line interface. Operations to support: get, set, store, delete, expire, evict, clear, path, check, stats. #. Django admin interface for cache stats and interaction. #. Cache stampede barrier (source prototype in repo). #. API Compatibility #. `Shelf interface `_ #. `DBM interface `_ #. Backend Compatibility #. `Flask-Caching `_ #. `Beaker `_ #. `dogpile.cache `_ Get the Code ------------ :doc:`DiskCache ` is actively developed in a `GitHub repository`_. You can either clone the public repository:: $ git clone https://github.com/grantjenks/python-diskcache.git Download the `tarball `_:: $ curl -OL https://github.com/grantjenks/python-diskcache/tarball/master Or, download the `zipball `_:: $ curl -OL https://github.com/grantjenks/python-diskcache/zipball/master Installing Dependencies ----------------------- Install development dependencies with `pip `_:: $ pip install -r requirements.txt All packages for running tests will be installed. Additional packages like ``pylibmc`` and ``redis`` along with their server counterparts are necessary for some benchmarks. Testing ------- :doc:`DiskCache ` currently tests against five versions of Python: * CPython 3.5 * CPython 3.6 * CPython 3.7 * CPython 3.8 Testing uses `tox `_. If you don't want to install all the development requirements, then, after downloading, you can simply run:: $ python setup.py test The test argument to setup.py will download a minimal testing infrastructure and run the tests. :: $ tox GLOB sdist-make: python-diskcache/setup.py py27 inst-nodeps: python-diskcache/.tox/dist/diskcache-0.9.0.zip py27 runtests: PYTHONHASHSEED='3527394681' py27 runtests: commands[0] | nosetests ......................................................................... ---------------------------------------------------------------------- Ran 98 tests in 29.404s OK py34 inst-nodeps: python-diskcache/.tox/dist/diskcache-0.9.0.zip py34 runtests: PYTHONHASHSEED='3527394681' py34 runtests: commands[0] | nosetests ......................................................................... ---------------------------------------------------------------------- Ran 98 tests in 22.841s OK py35 inst-nodeps: python-diskcache/.tox/dist/diskcache-0.9.0.zip py35 runtests: PYTHONHASHSEED='3527394681' py35 runtests: commands[0] | nosetests ......................................................................... ---------------------------------------------------------------------- Ran 98 tests in 23.803s OK ____________________ summary ____________________ py27: commands succeeded py34: commands succeeded py35: commands succeeded congratulations :) Coverage testing uses `nose `_: :: $ nosetests --cover-erase --with-coverage --cover-package diskcache ......................................................................... Name Stmts Miss Cover Missing -------------------------------------------------------- diskcache.py 13 2 85% 9-11 diskcache/core.py 442 4 99% 22-25 diskcache/djangocache.py 43 0 100% diskcache/fanout.py 66 0 100% -------------------------------------------------------- TOTAL 564 6 99% ---------------------------------------------------------------------- Ran 98 tests in 28.766s OK It's normal to not see 100% coverage. Some code is specific to the Python runtime. Stress testing is also based on nose but can be run independently as a module. Stress tests are kept in the tests directory and prefixed with ``stress_test_``. Stress tests accept many arguments. Read the help for details. :: $ python -m tests.stress_test_core --help usage: stress_test_core.py [-h] [-n OPERATIONS] [-g GET_AVERAGE] [-k KEY_COUNT] [-d DEL_CHANCE] [-w WARMUP] [-e EXPIRE] [-t THREADS] [-p PROCESSES] [-s SEED] [--no-create] [--no-delete] [-v EVICTION_POLICY] optional arguments: -h, --help show this help message and exit -n OPERATIONS, --operations OPERATIONS Number of operations to perform (default: 10000) -g GET_AVERAGE, --get-average GET_AVERAGE Expected value of exponential variate used for GET count (default: 100) -k KEY_COUNT, --key-count KEY_COUNT Number of unique keys (default: 10) -d DEL_CHANCE, --del-chance DEL_CHANCE Likelihood of a key deletion (default: 0.1) -w WARMUP, --warmup WARMUP Number of warmup operations before timings (default: 10) -e EXPIRE, --expire EXPIRE Number of seconds before key expires (default: None) -t THREADS, --threads THREADS Number of threads to start in each process (default: 1) -p PROCESSES, --processes PROCESSES Number of processes to start (default: 1) -s SEED, --seed SEED Random seed (default: 0) --no-create Do not create operations data (default: True) --no-delete Do not delete operations data (default: True) -v EVICTION_POLICY, --eviction-policy EVICTION_POLICY If stress exits normally then it worked successfully. Some stress is run by tox and nose but the iteration count is limited. More rigorous testing requires increasing the iteration count to millions. At that level, it's best to just let it run overnight. Stress testing will stop at the first failure. Running Benchmarks ------------------ Running and plotting benchmarks is a two step process. Each is a Python script in the tests directory. Benchmark scripts are prefixed with ``benchmark_``. For example: :: $ python tests/benchmark_core.py --help usage: benchmark_core.py [-h] [-p PROCESSES] [-n OPERATIONS] [-r RANGE] [-w WARMUP] optional arguments: -h, --help show this help message and exit -p PROCESSES, --processes PROCESSES Number of processes to start (default: 8) -n OPERATIONS, --operations OPERATIONS Number of operations to perform (default: 100000) -r RANGE, --range RANGE Range of keys (default: 100) -w WARMUP, --warmup WARMUP Number of warmup operations before timings (default: 1000) Benchmark output is stored in text files prefixed with ``timings_`` in the `tests` directory. Plotting the benchmarks is done by passing the timings file as an argument to ``plot.py``. python-diskcache-5.4.0/docs/djangocache-benchmarks.rst000066400000000000000000000225631416346170000230120ustar00rootroot00000000000000DiskCache DjangoCache Benchmarks ================================ :doc:`DiskCache ` provides a Django-compatible cache API in :class:`diskcache.DjangoCache`. A discussion of its options and abilities are described in the :doc:`tutorial `. Here we try to assess its performance compared to other Django cache backends. Keys and Values --------------- A survey of repositories on Github showed a diversity of cached values. Among those observed values were: 1. Processed text, most commonly HTML. The average HTML page size in 2014 was 59KB. Javascript assets totalled an average of 295KB and images range dramatically but averaged 1.2MB. 2. QuerySets, the building blocks of the Django ORM. 3. Numbers, settings, and labels. Generally small values that vary in how often they change. The diversity of cached values presents unique challenges. Below, keys and values, are constrained simply to short byte strings. This is done to filter out overhead from pickling, etc. from the benchmarks. Backends -------- Django ships with four cache backends: Memcached, Database, Filesystem, and Local-memory. The Memcached backend uses the `PyLibMC`_ client backend. Included in the results below is also Redis provided by the `django-redis`_ project built atop `redis-py`_. Not included were four projects which were difficult to setup and so impractical for testing. 1. | uWSGI cache backend. | https://pypi.python.org/pypi/django-uwsgi-cache 2. | Amazon S3 backend. | https://pypi.python.org/pypi/django-s3-cache 3. | MongoDB cache backend. | https://pypi.python.org/pypi/django-mongodb-cash-backend 4. | Cacheops - incompatible filebased caching. | https://pypi.python.org/pypi/django-cacheops Other caching related projects worth mentioning: 5. | Request-specific in-memory cache. | http://pythonhosted.org/johnny-cache/localstore_cache.html 6. | Cacheback moves all cache store operations to background Celery tasks. | https://pypi.python.org/pypi/django-cacheback 7. | Newcache claims to improve Django's Memcached backend. | https://pypi.python.org/pypi/django-newcache 8. | Supports tagging cache entries. | https://pypi.python.org/pypi/cache-tagging There are also Django packages which automatically cache database queries by patching the ORM. `Cachalot`_ has a good comparison and discussion in its introduction. .. _`PyLibMC`: https://pypi.python.org/pypi/pylibmc .. _`django-redis`: https://pypi.python.org/pypi/django-redis .. _`redis-py`: https://pypi.python.org/pypi/redis .. _`Cachalot`: http://django-cachalot.readthedocs.org/en/latest/introduction.html Filebased --------- Django's filesystem cache backend has a severe drawback. Every `set` operation checks whether a cull operation is necessary. This check requires listing all the files in the directory. To do so a call to ``glob.glob1`` is made. As the directory size increases, the call slows linearly. ============ ============ Timings for glob.glob1 ------------------------- Count Time ============ ============ 1 1.602ms 10 2.213ms 100 8.946ms 1000 65.869ms 10000 604.972ms 100000 6.450s ============ ============ Above, the count regards the number of files in the directory and the time is the duration of the function call. At only a hundred files, it takes more than five milliseconds to construct the list of files. Concurrent Access ----------------- The concurrent access workload starts eight worker processes each with different and interleaved operations. None of these benchmarks saturated all the processors. Operations used 1,100 unique keys and, where applicable, caches were limited to 1,000 keys. This was done to illustrate the impact of the culling strategy in ``locmem`` and ``filebased`` caches. Get ... .. image:: _static/djangocache-get.png Under heavy load, :class:`DjangoCache ` gets are low latency. At the 99th percentile they are on par with the Memcached cache backend. Set ... .. image:: _static/djangocache-set.png Not displayed above is the filebased cache backend. At all percentiles, the latency exceeded five milliseconds. Timing data is available below. Though :doc:`DiskCache ` is the slowest, its latency remains competitive. Delete ...... .. image:: _static/djangocache-delete.png Like sets, deletes require writes to disk. Though :class:`DjangoCache ` is the slowest, it remains competitive with latency less than five milliseconds. Remember that unlike Local-memory, Memached, and Redis, it persists all cached data. Timing Data ........... Not all data is easily displayed in the graphs above. Miss rate, maximum latency and total latency is recorded below. ========= ========= ========= ========= ========= ========= ========= ========= Timings for locmem ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 140750 36.001us 57.936us 60.081us 10.202ms 28.962s set 71530 0 36.955us 39.101us 45.061us 2.784ms 2.709s delete 7916 0 32.902us 35.048us 37.193us 1.524ms 265.399ms Total 791992 31.936s ========= ========= ========= ========= ========= ========= ========= ========= Notice the high cache miss rate. This reflects the isolation of local memory caches from each other. Also the culling strategy of local memory caches is random. ========= ========= ========= ========= ========= ========= ========= ========= Timings for memcached ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 69185 87.023us 99.182us 110.865us 576.973us 61.758s set 71530 0 89.169us 102.043us 114.202us 259.876us 6.395s delete 7916 0 85.115us 97.990us 108.957us 201.941us 672.212ms Total 791992 68.825s ========= ========= ========= ========= ========= ========= ========= ========= Memcached performance is low latency and stable. ========= ========= ========= ========= ========= ========= ========= ========= Timings for redis ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 69526 160.933us 195.980us 239.134us 1.365ms 116.816s set 71530 0 166.178us 200.987us 242.949us 587.940us 12.143s delete 7916 791 143.051us 177.860us 217.915us 330.925us 1.165s Total 791992 130.124s ========= ========= ========= ========= ========= ========= ========= ========= Redis performance is roughly half that of Memcached. Beware the impact of persistence settings on your Redis performance. Depending on your use of logging and snapshotting, maximum latency may increase significantly. ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712546 69509 33.855us 56.982us 79.155us 11.908ms 30.078s set 71530 0 178.814us 1.355ms 5.032ms 26.620ms 34.461s delete 7916 0 107.050us 1.280ms 4.738ms 17.217ms 3.303s Total 791992 67.842s ========= ========= ========= ========= ========= ========= ========= ========= :class:`DjangoCache ` defaults to using eight shards with a 10 millisecond timeout. Notice that cache get operations are in aggregate more than twice as fast as Memcached. And total cache time for all operations is comparable. The higher set and delete latencies are due to the retry behavior of :class:`DjangoCache ` objects. If lower latency is required then the retry behavior can be disabled. ========= ========= ========= ========= ========= ========= ========= ========= Timings for filebased ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712749 103843 112.772us 193.119us 423.908us 18.428ms 92.428s set 71431 0 8.893ms 11.742ms 14.790ms 44.201ms 646.879s delete 7812 0 223.875us 389.099us 679.016us 15.058ms 1.940s Total 791992 741.247s ========= ========= ========= ========= ========= ========= ========= ========= Notice the higher cache miss rate. That's a result of the cache's random culling strategy. Get and set operations also take three to twenty times longer in aggregate as compared with :class:`DjangoCache `. python-diskcache-5.4.0/docs/index.rst000066400000000000000000000003411416346170000175460ustar00rootroot00000000000000.. include:: ../README.rst .. toctree:: :hidden: tutorial cache-benchmarks djangocache-benchmarks case-study-web-crawler case-study-landing-page-caching sf-python-2017-meetup-talk api development python-diskcache-5.4.0/docs/make.bat000066400000000000000000000014331416346170000173150ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd python-diskcache-5.4.0/docs/sf-python-2017-meetup-talk.rst000066400000000000000000000116001416346170000232030ustar00rootroot00000000000000Talk: All Things Cached - SF Python 2017 Meetup =============================================== * `Python All Things Cached Slides`_ * Can we have some fun together in this talk? * Can I show you some code that I would not run in production? * Great talk by David Beazley at PyCon Israel this year. * Encourages us to scratch our itch under the code phrase: "It's just a prototype." Not a bad place to start. Often how it ends :) Landscape --------- * At face value, caches seem simple: get/set/delete. * But zoom in a little and you find just more and more detail. Backends -------- * Backends have different designs and tradeoffs. Frameworks ---------- * Caches have broad applications. * Web and scientific communities reach for them first. I can haz mor memory? --------------------- * Redis is great technology: free, open source, fast. * But another process to manage and more memory required. :: $ emacs talk/settings.py $ emacs talk/urls.py $ emacs talk/views.py :: $ gunicorn --reload talk.wsgi :: $ emacs benchmark.py :: $ python benchmark.py * I dislike benchmarks in general so don't copy this code. I kind of stole it from Beazley in another great talk he did on concurrency in Python. He said not to copy it so I'm telling you not to copy it. :: $ python manage.py shell .. code-block:: pycon >>> import time >>> from django.conf import settings >>> from django.core.cache import caches .. code-block:: pycon >>> for key in settings.CACHES.keys(): ... caches[key].clear() :: >>> while True: ... !ls /tmp/filebased | wc -l ... time.sleep(1) Fool me once, strike one. Feel me twice? Strike three. ------------------------------------------------------ * Filebased cache has two severe drawbacks. 1. Culling is random. 2. set() uses glob.glob1() which slows linearly with directory size. DiskCache --------- * Wanted to solve Django-filebased cache problems. * Felt like something was missing in the landscape. * Found an unlikely hero in SQLite. I'd rather drive a slow car fast than a fast car slow ----------------------------------------------------- * Story: driving down the Grapevine in SoCal in friend's 1960s VW Bug. Features -------- * Lot's of features. Maybe a few too many. Ex: never used the tag metadata and eviction feature. Use Case: Static file serving with read() ----------------------------------------- * Some fun features. Data is stored in files and web servers are good at serving files. Use Case: Analytics with incr()/pop() ------------------------------------- * Tried to create really functional APIs. * All write operations are atomic. Case Study: Baby Web Crawler ---------------------------- * Convert from ephemeral, single-process to persistent, multi-process. "get" Time vs Percentile ------------------------ * Tradeoff cache latency and miss-rate using timeout. "set" Time vs Percentile ------------------------ * Django-filebased cache so slow, can't plot. Design ------ * Cache is a single shard. FanoutCache uses multiple shards. Trick is cross-platform hash. * Pickle can actually be fast if you use a higher protocol. Default 0. Up to 4 now. * Don't choose higher than 2 if you want to be portable between Python 2 and 3. * Size limit really indicates when to start culling. Limit number of items deleted. SQLite ------ * Tradeoff cache latency and miss-rate using timeout. * SQLite supports 64-bit integers and floats, UTF-8 text and binary blobs. * Use a context manager for isolation level management. * Pragmas tune the behavior and performance of SQLite. * Default is robust and slow. * Use write-ahead-log so writers don't block readers. * Memory-map pages for fast lookups. Best way to make money in photography? Sell all your gear. ---------------------------------------------------------- * Who saw eclipse? Awesome, right? * Hard to really photograph the experience. * This is me, staring up at the sun, blinding myself as I hold my glasses and my phone to take a photo. Clearly lousy. * Software talks are hard to get right and I can't cover everything related to caching in 20 minutes. I hope you've learned something tonight or at least seen something interesting. Conclusion ---------- * Windows support mostly "just worked". * SQLite is truly cross-platform. * Filesystems are a little different. * AppVeyor was about half as fast as Travis. * check() to fix inconsistencies. * Caveats: * NFS and SQLite do not play nice. * Not well suited to queues (want read:write at 10:1 or higher). * Alternative databases: BerkeleyDB, LMDB, RocksDB, LevelDB, etc. * Engage with me on Github, find bugs, complain about performance. * If you like the project, star-it on Github and share it with friends. * Thanks for letting me share tonight. Questions? .. _`Python All Things Cached Slides`: http://bit.ly/dc-2017-slides python-diskcache-5.4.0/docs/tutorial.rst000066400000000000000000001067411416346170000203150ustar00rootroot00000000000000DiskCache Tutorial ================== .. contents:: :depth: 1 :local: Installation ------------ This part of the documentation covers the installation of :doc:`DiskCache `. The first step to using any software package is getting it properly installed. Pip & PyPI .......... Installing :doc:`DiskCache ` is simple with `pip `_:: $ pip install --upgrade diskcache The versioning scheme uses `major.minor.micro` with `micro` intended for bug fixes, `minor` intended for small features or improvements, and `major` intended for significant new features and breaking changes. While it is intended that only `major` version changes are backwards incompatible, it is not always guaranteed. When running in production, it is recommended to pin at least the `major` version. Get the Code ............ :doc:`DiskCache ` is actively developed on GitHub, where the code is always available. You can either clone the `DiskCache repository `_:: $ git clone https://github.com/grantjenks/python-diskcache.git Download the `tarball `_:: $ curl -OL https://github.com/grantjenks/python-diskcache/tarball/master Or, download the `zipball `_:: $ curl -OL https://github.com/grantjenks/python-diskcache/zipball/master Once you have a copy of the source, you can embed it in your Python package, or install it into your site-packages easily:: $ python setup.py install :doc:`DiskCache ` is looking for a Debian package maintainer. If you can help, please open an issue in the `DiskCache Issue Tracker`_. :doc:`DiskCache ` is looking for a CentOS/RPM package maintainer. If you can help, please open an issue in the `DiskCache Issue Tracker`_. .. _`DiskCache Issue Tracker`: https://github.com/grantjenks/python-diskcache/issues/ .. _tutorial-cache: Cache ----- The core of :doc:`DiskCache ` is :class:`diskcache.Cache` which represents a disk and file backed cache. As a Cache, it supports a familiar Python mapping interface with additional cache and performance parameters. >>> from diskcache import Cache >>> cache = Cache() Initialization expects a directory path reference. If the directory path does not exist, it will be created. When not specified, a temporary directory is automatically created. Additional keyword parameters are discussed below. Cache objects are thread-safe and may be shared between threads. Two Cache objects may also reference the same directory from separate threads or processes. In this way, they are also process-safe and support cross-process communication. Cache objects open and maintain one or more file handles. But unlike files, all Cache operations are atomic and Cache objects support process-forking and may be serialized using Pickle. Each thread that accesses a cache should also call :meth:`close <.Cache.close>` on the cache. Cache objects can be used in a `with` statement to safeguard calling :meth:`close `. >>> cache.close() >>> with Cache(cache.directory) as reference: ... reference.set('key', 'value') True Closed Cache objects will automatically re-open when accessed. But opening Cache objects is relatively slow, and since all operations are atomic, may be safely left open. >>> cache.close() >>> cache.get('key') # Automatically opens, but slower. 'value' Set an item, get a value, and delete a key using the usual operators: >>> cache['key'] = 'value' >>> cache['key'] 'value' >>> 'key' in cache True >>> del cache['key'] There's also a :meth:`set ` method with additional keyword parameters: `expire`, `read`, and `tag`. >>> from io import BytesIO >>> cache.set('key', BytesIO(b'value'), expire=5, read=True, tag='data') True In the example above: the key expires in 5 seconds, the value is read as a file-like object, and tag metadata is stored with the key. Another method, :meth:`get ` supports querying extra information with `default`, `read`, `expire_time`, and `tag` keyword parameters. >>> result = cache.get('key', read=True, expire_time=True, tag=True) >>> reader, timestamp, tag = result >>> print(reader.read().decode()) value >>> type(timestamp).__name__ 'float' >>> print(tag) data The return value is a tuple containing the value, expire time (seconds from epoch), and tag. Because we passed ``read=True`` the value is returned as a file-like object. Use :meth:`touch <.Cache.touch>` to update the expiration time of an item in the cache. >>> cache.touch('key', expire=None) True >>> cache.touch('does-not-exist', expire=1) False Like :meth:`set `, the method :meth:`add ` can be used to insert an item in the cache. The item is inserted only if the key is not already present. >>> cache.add(b'test', 123) True >>> cache[b'test'] 123 >>> cache.add(b'test', 456) False >>> cache[b'test'] 123 Item values can also be incremented and decremented using :meth:`incr ` and :meth:`decr ` methods. >>> cache.incr(b'test') 124 >>> cache.decr(b'test', 24) 100 Increment and decrement methods also support a keyword parameter, `default`, which will be used for missing keys. When ``None``, incrementing or decrementing a missing key will raise a :exc:`KeyError`. >>> cache.incr('alice') 1 >>> cache.decr('bob', default=-9) -10 >>> cache.incr('carol', default=None) Traceback (most recent call last): ... KeyError: 'carol' Increment and decrement operations are atomic and assume the value may be stored in a SQLite integer column. SQLite supports 64-bit signed integers. Like :meth:`delete ` and :meth:`get `, the method :meth:`pop ` can be used to delete an item in the cache and return its value. >>> cache.pop('alice') 1 >>> cache.pop('dave', default='does not exist') 'does not exist' >>> cache.set('dave', 0, expire=None, tag='admin') True >>> result = cache.pop('dave', expire_time=True, tag=True) >>> value, timestamp, tag = result >>> value 0 >>> print(timestamp) None >>> print(tag) admin The :meth:`pop ` operation is atomic and using :meth:`incr ` together is an accurate method for counting and dumping statistics in long-running systems. Unlike :meth:`get ` the `read` argument is not supported. .. _tutorial-culling: Another four methods remove items from the cache:: >>> cache.clear() 3 >>> cache.reset('cull_limit', 0) # Disable automatic evictions. 0 >>> for num in range(10): ... _ = cache.set(num, num, expire=1e-9) # Expire immediately. >>> len(cache) 10 >>> list(cache) [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] >>> import time >>> time.sleep(1) >>> cache.expire() 10 :meth:`Expire ` removes all expired keys from the cache. Resetting the :ref:`cull_limit ` to zero will disable culling during :meth:`set ` and :meth:`add ` operations. Because culling is performed lazily, the reported length of the cache includes expired items. Iteration likewise includes expired items because it is a read-only operation. To exclude expired items you must explicitly call :meth:`expire ` which works regardless of the :ref:`cull_limit `. >>> for num in range(100): ... _ = cache.set(num, num, tag='odd' if num % 2 else 'even') >>> cache.evict('even') 50 .. _tutorial-tag-index: :meth:`Evict ` removes all the keys with a matching tag. The default tag is ``None``. Tag values may be any of integer, float, string, bytes and None. To accelerate the eviction of items by tag, an index can be created. To do so, initialize the cache with ``tag_index=True``. >>> cache.clear() 50 >>> for num in range(100): ... _ = cache.set(num, num, tag=(num % 2)) >>> cache.evict(0) 50 Likewise, the tag index may be created or dropped using methods:: >>> cache.drop_tag_index() >>> cache.tag_index 0 >>> cache.create_tag_index() >>> cache.tag_index 1 But prefer initializing the cache with a tag index rather than explicitly creating or dropping the tag index. To manually enforce the cache's size limit, use the :meth:`cull ` method. :meth:`Cull ` begins by removing expired items from the cache and then uses the eviction policy to remove items until the cache volume is less than the size limit. >>> cache.clear() 50 >>> cache.reset('size_limit', int(1e6)) 1000000 >>> cache.reset('cull_limit', 0) 0 >>> for count in range(1000): ... cache[count] = b'A' * 1000 >>> cache.volume() > int(1e6) True >>> cache.cull() > 0 True >>> cache.volume() < int(1e6) True Some users may defer all culling to a cron-like process by setting the :ref:`cull_limit ` to zero and manually calling :meth:`cull ` to remove items. Like :meth:`evict ` and :meth:`expire `, calls to :meth:`cull ` will work regardless of the :ref:`cull_limit `. :meth:`Clear ` simply removes all items from the cache. >>> cache.clear() > 0 True Each of these methods is designed to work concurrent to others. None of them block readers or writers in other threads or processes. Caches may be iterated by either insertion order or sorted order. The default ordering uses insertion order. To iterate by sorted order, use :meth:`iterkeys <.Cache.iterkeys>`. The sort order is determined by the database which makes it valid only for `str`, `bytes`, `int`, and `float` data types. Other types of keys will be serialized which is likely to have a meaningless sorted order. >>> for key in 'cab': ... cache[key] = None >>> list(cache) ['c', 'a', 'b'] >>> list(cache.iterkeys()) ['a', 'b', 'c'] >>> cache.peekitem() ('b', None) >>> cache.peekitem(last=False) ('c', None) If only the first or last item in insertion order is desired then :meth:`peekitem <.Cache.peekitem>` is more efficient than using iteration. Three additional methods use the sorted ordering of keys to maintain a queue-like data structure within the cache. The :meth:`push <.Cache.push>`, :meth:`pull <.Cache.pull>`, and :meth:`peek <.Cache.peek>` methods automatically assign the key within the cache. >>> key = cache.push('first') >>> print(key) 500000000000000 >>> cache[key] 'first' >>> _ = cache.push('second') >>> _ = cache.push('zeroth', side='front') >>> _, value = cache.peek() >>> value 'zeroth' >>> key, value = cache.pull() >>> print(key) 499999999999999 >>> value 'zeroth' The `side` parameter supports access to either the ``'front'`` or ``'back'`` of the cache. In addition, the `prefix` parameter can be used to maintain multiple queue-like data structures within a single cache. When prefix is ``None``, integer keys are used. Otherwise, string keys are used in the format “prefix-integer”. Integer starts at 500 trillion. Like :meth:`set <.Cache.set>` and :meth:`get <.Cache.get>`, methods :meth:`push <.Cache.push>`, :meth:`pull <.Cache.pull>`, and :meth:`peek <.Cache.peek>` support cache metadata like the expiration time and tag. Lastly, three methods support metadata about the cache. The first is :meth:`volume ` which returns the estimated total size in bytes of the cache directory on disk. >>> cache.volume() < int(1e5) True .. _tutorial-statistics: The second is :meth:`stats ` which returns cache hits and misses. Cache statistics must first be enabled. >>> cache.stats(enable=True) (0, 0) >>> for num in range(100): ... _ = cache.set(num, num) >>> for num in range(150): ... _ = cache.get(num) >>> hits, misses = cache.stats(enable=False, reset=True) >>> (hits, misses) (100, 50) Cache statistics are useful when evaluating different :ref:`eviction policies `. By default, statistics are disabled as they incur an extra overhead on cache lookups. Increment and decrement operations are not counted in cache statistics. The third is :meth:`check ` which verifies cache consistency. It can also fix inconsistencies and reclaim unused space. The return value is a list of warnings. >>> warnings = cache.check() Caches do not automatically remove the underlying directory where keys and values are stored. The cache is intended to be persistent and so must be deleted manually. >>> cache.close() >>> import shutil >>> try: ... shutil.rmtree(cache.directory) ... except OSError: # Windows wonkiness ... pass To permanently delete the cache, recursively remove the cache's directory. .. _tutorial-fanoutcache: FanoutCache ----------- Built atop :class:`Cache ` is :class:`diskcache.FanoutCache` which automatically `shards` the underlying database. `Sharding`_ is the practice of horizontally partitioning data. Here it is used to decrease blocking writes. While readers and writers do not block each other, writers block other writers. Therefore a shard for every concurrent writer is suggested. This will depend on your scenario. The default value is 8. Another parameter, `timeout`, sets a limit on how long to wait for database transactions. Transactions are used for every operation that writes to the database. When the timeout expires, a :exc:`diskcache.Timeout` error is raised internally. This `timeout` parameter is also present on :class:`diskcache.Cache`. When a :exc:`Timeout ` error occurs in :class:`Cache ` methods, the exception may be raised to the caller. In contrast, :class:`FanoutCache ` catches all timeout errors and aborts the operation. As a result, :meth:`set ` and :meth:`delete ` methods may silently fail. Most methods that handle :exc:`Timeout ` exceptions also include a `retry` keyword parameter (default ``False``) to automatically repeat attempts that timeout. The mapping interface operators: :meth:`cache[key] `, :meth:`cache[key] = value `, and :meth:`del cache[key] ` automatically retry operations when :exc:`Timeout ` errors occur. :class:`FanoutCache ` will never raise a :exc:`Timeout ` exception. The default `timeout` is 0.010 (10 milliseconds). >>> from diskcache import FanoutCache >>> cache = FanoutCache(shards=4, timeout=1) The example above creates a cache in a temporary directory with four shards and a one second timeout. Operations will attempt to abort if they take longer than one second. The remaining API of :class:`FanoutCache ` matches :class:`Cache ` as described above. The :class:`.FanoutCache` :ref:`size_limit ` is used as the total size of the cache. The size limit of individual cache shards is the total size divided by the number of shards. In the example above, the default total size is one gigabyte and there are four shards so each cache shard has a size limit of 256 megabytes. Items that are larger than the size limit are immediately culled. Caches have an additional feature: :meth:`memoizing ` decorator. The decorator wraps a callable and caches arguments and return values. >>> from diskcache import FanoutCache >>> cache = FanoutCache() >>> @cache.memoize(typed=True, expire=1, tag='fib') ... def fibonacci(number): ... if number == 0: ... return 0 ... elif number == 1: ... return 1 ... else: ... return fibonacci(number - 1) + fibonacci(number - 2) >>> print(sum(fibonacci(value) for value in range(100))) 573147844013817084100 The arguments to memoize are like those for `functools.lru_cache `_ and :meth:`Cache.set <.Cache.set>`. Remember to call :meth:`memoize <.FanoutCache.memoize>` when decorating a callable. If you forget, then a TypeError will occur:: >>> @cache.memoize ... def test(): ... pass Traceback (most recent call last): ... TypeError: name cannot be callable Observe the lack of parenthenses after :meth:`memoize ` above. .. _`Sharding`: https://en.wikipedia.org/wiki/Shard_(database_architecture) .. _tutorial-djangocache: DjangoCache ----------- :class:`diskcache.DjangoCache` uses :class:`FanoutCache ` to provide a Django-compatible cache interface. With :doc:`DiskCache ` installed, you can use :class:`DjangoCache ` in your settings file. .. code-block:: python CACHES = { 'default': { 'BACKEND': 'diskcache.DjangoCache', 'LOCATION': '/path/to/cache/directory', 'TIMEOUT': 300, # ^-- Django setting for default timeout of each key. 'SHARDS': 8, 'DATABASE_TIMEOUT': 0.010, # 10 milliseconds # ^-- Timeout for each DjangoCache database transaction. 'OPTIONS': { 'size_limit': 2 ** 30 # 1 gigabyte }, }, } As with :class:`FanoutCache ` above, these settings create a Django-compatible cache with eight shards and a 10ms timeout. You can pass further settings via the ``OPTIONS`` mapping as shown in the Django documentation. Only the ``BACKEND`` and ``LOCATION`` keys are necessary in the above example. The other keys simply display their default value. :class:`DjangoCache ` will never raise a :exc:`Timeout ` exception. But unlike :class:`FanoutCache `, the keyword parameter `retry` defaults to ``True`` for :class:`DjangoCache ` methods. The API of :class:`DjangoCache ` is a superset of the functionality described in the `Django documentation on caching`_ and includes many :class:`FanoutCache ` features. :class:`DjangoCache ` also works well with `X-Sendfile` and `X-Accel-Redirect` headers. .. code-block:: python from django.core.cache import cache def media(request, path): try: with cache.read(path) as reader: response = HttpResponse() response['X-Accel-Redirect'] = reader.name return response except KeyError: # Handle cache miss. When values are :meth:`set ` using ``read=True`` they are guaranteed to be stored in files. The full path is available on the file handle in the `name` attribute. Remember to also include the `Content-Type` header if known. .. _`Django documentation on caching`: https://docs.djangoproject.com/en/3.2/topics/cache/#the-low-level-cache-api Deque ----- :class:`diskcache.Deque` (pronounced "deck") uses a :class:`Cache ` to provide a `collections.deque `_-compatible double-ended queue. Deques are a generalization of stacks and queues with fast access and editing at both front and back sides. :class:`Deque ` objects use the :meth:`push <.Cache.push>`, :meth:`pull <.Cache.pull>`, and :meth:`peek <.Cache.peek>` methods of :class:`Cache <.Cache>` objects but never evict or expire items. >>> from diskcache import Deque >>> deque = Deque(range(5, 10)) >>> deque.pop() 9 >>> deque.popleft() 5 >>> deque.appendleft('foo') >>> len(deque) 4 >>> type(deque.directory).__name__ 'str' >>> other = Deque(directory=deque.directory) >>> len(other) 4 >>> other.popleft() 'foo' :class:`Deque ` objects provide an efficient and safe means of cross-thread and cross-process communication. :class:`Deque ` objects are also useful in scenarios where contents should remain persistent or limitations prohibit holding all items in memory at the same time. The deque uses a fixed amount of memory regardless of the size or number of items stored inside it. Index ----- :class:`diskcache.Index` uses a :class:`Cache ` to provide a `mutable mapping `_ and `ordered dictionary `_ interface. :class:`Index ` objects inherit all the benefits of :class:`Cache ` objects but never evict or expire items. >>> from diskcache import Index >>> index = Index([('a', 1), ('b', 2), ('c', 3)]) >>> 'b' in index True >>> index['c'] 3 >>> del index['a'] >>> len(index) 2 >>> other = Index(index.directory) >>> len(other) 2 >>> other.popitem(last=False) ('b', 2) :class:`Index ` objects provide an efficient and safe means of cross-thread and cross-process communication. :class:`Index ` objects are also useful in scenarios where contents should remain persistent or limitations prohibit holding all items in memory at the same time. The index uses a fixed amount of memory regardless of the size or number of items stored inside it. .. _tutorial-transactions: Transactions ------------ Transactions are implemented by the :class:`.Cache`, :class:`.Deque`, and :class:`.Index` data types and support consistency and improved performance. Use transactions to guarantee a group of operations occur atomically. For example, to calculate a running average, the total and count could be incremented together:: >>> with cache.transact(): ... total = cache.incr('total', 123.45) ... count = cache.incr('count') >>> total 123.45 >>> count 1 And to calculate the average, the values could be retrieved together: >>> with cache.transact(): ... total = cache.get('total') ... count = cache.get('count') >>> average = None if count == 0 else total / count >>> average 123.45 Keep transactions as short as possible because within a transaction, no other writes may occur to the cache. Every write operation uses a transaction and transactions may be nested to improve performance. For example, a possible implementation to set many items within the cache:: >>> def set_many(cache, mapping): ... with cache.transact(): ... for key, value in mapping.items(): ... cache[key] = value By grouping all operations in a single transaction, performance may improve two to five times. But be careful, a large mapping will block other concurrent writers. Transactions are not implemented by :class:`.FanoutCache` and :class:`.DjangoCache` due to key sharding. Instead, a cache shard with transaction support may be requested. >>> fanout_cache = FanoutCache() >>> tutorial_cache = fanout_cache.cache('tutorial') >>> username_queue = fanout_cache.deque('usernames') >>> url_to_response = fanout_cache.index('responses') The cache shard exists in a subdirectory of the fanout-cache with the given name. .. _tutorial-recipes: Recipes ------- :doc:`DiskCache ` includes a few synchronization recipes for cross-thread and cross-process communication: * :class:`.Averager` -- maintains a running average like that shown above. * :class:`.Lock`, :class:`.RLock`, and :class:`.BoundedSemaphore` -- recipes for synchronization around critical sections like those found in Python's `threading`_ and `multiprocessing`_ modules. * :func:`throttle <.throttle>` -- function decorator to rate-limit calls to a function. * :func:`barrier <.barrier>` -- function decorator to synchronize calls to a function. * :func:`memoize_stampede <.memoize_stampede>` -- memoizing function decorator with cache stampede protection. Read :doc:`case-study-landing-page-caching` for a comparison of memoization strategies. .. _threading: https://docs.python.org/3/library/threading.html .. _multiprocessing: https://docs.python.org/3/library/multiprocessing.html .. _tutorial-settings: Settings -------- A variety of settings are available to improve performance. These values are stored in the database for durability and to communicate between processes. Each value is cached in an attribute with matching name. Attributes are updated using :meth:`reset `. Attributes are set during initialization when passed as keyword arguments. * `size_limit`, default one gigabyte. The maximum on-disk size of the cache. * `cull_limit`, default ten. The maximum number of keys to cull when adding a new item. Set to zero to disable automatic culling. Some systems may disable automatic culling in exchange for a cron-like job that regularly calls :meth:`cull ` in a separate process. * `statistics`, default False, disabled. The setting to collect :ref:`cache statistics `. * `tag_index`, default False, disabled. The setting to create a database :ref:`tag index ` for :meth:`evict `. * `eviction_policy`, default "least-recently-stored". The setting to determine :ref:`eviction policy `. The :meth:`reset ` method accepts an optional second argument that updates the corresponding value in the database. The return value is the latest retrieved from the database. Notice that attributes are updated lazily. Prefer idioms like :meth:`len `, :meth:`volume `, and :meth:`keyword arguments ` rather than using :meth:`reset ` directly. >>> cache = Cache(size_limit=int(4e9)) >>> print(cache.size_limit) 4000000000 >>> cache.disk_min_file_size 32768 >>> cache.reset('cull_limit', 0) # Disable automatic evictions. 0 >>> cache.set(b'key', 1.234) True >>> cache.count # Stale attribute. 0 >>> cache.reset('count') # Prefer: len(cache) 1 More settings correspond to :ref:`Disk ` attributes. Each of these may be specified when initializing the :ref:`Cache `. Changing these values will update the unprefixed attribute on the :class:`Disk ` object. * `disk_min_file_size`, default 32 kilobytes. The minimum size to store a value in a file. * `disk_pickle_protocol`, default highest Pickle protocol. The Pickle protocol to use for data types that are not natively supported. An additional set of attributes correspond to SQLite pragmas. Changing these values will also execute the appropriate ``PRAGMA`` statement. See the `SQLite pragma documentation`_ for more details. * `sqlite_auto_vacuum`, default 1, "FULL". * `sqlite_cache_size`, default 8,192 pages. * `sqlite_journal_mode`, default "wal". * `sqlite_mmap_size`, default 64 megabytes. * `sqlite_synchronous`, default 1, "NORMAL". Each of these settings can passed to :class:`DjangoCache ` via the ``OPTIONS`` key mapping. Always measure before and after changing the default values. Default settings are programmatically accessible at :data:`diskcache.DEFAULT_SETTINGS`. .. _`SQLite pragma documentation`: https://www.sqlite.org/pragma.html .. _tutorial-eviction-policies: Eviction Policies ----------------- :doc:`DiskCache ` supports four eviction policies each with different tradeoffs for accessing and storing items. * ``"least-recently-stored"`` is the default. Every cache item records the time it was stored in the cache. This policy adds an index to that field. On access, no update is required. Keys are evicted starting with the oldest stored keys. As :doc:`DiskCache ` was intended for large caches (gigabytes) this policy usually works well enough in practice. * ``"least-recently-used"`` is the most commonly used policy. An index is added to the access time field stored in the cache database. On every access, the field is updated. This makes every access into a read and write which slows accesses. * ``"least-frequently-used"`` works well in some cases. An index is added to the access count field stored in the cache database. On every access, the field is incremented. Every access therefore requires writing the database which slows accesses. * ``"none"`` disables cache evictions. Caches will grow without bound. Cache items will still be lazily removed if they expire. The persistent data types, :class:`.Deque` and :class:`.Index`, use the ``"none"`` eviction policy. For :ref:`lazy culling ` use the :ref:`cull_limit ` setting instead. All clients accessing the cache are expected to use the same eviction policy. The policy can be set during initialization using a keyword argument. >>> cache = Cache() >>> print(cache.eviction_policy) least-recently-stored >>> cache = Cache(eviction_policy='least-frequently-used') >>> print(cache.eviction_policy) least-frequently-used >>> print(cache.reset('eviction_policy', 'least-recently-used')) least-recently-used Though the eviction policy is changed, the previously created indexes will not be dropped. Prefer to always specify the eviction policy as a keyword argument to initialize the cache. .. _tutorial-disk: Disk ---- :class:`diskcache.Disk` objects are responsible for serializing and deserializing data stored in the cache. Serialization behavior differs between keys and values. In particular, keys are always stored in the cache metadata database while values are sometimes stored separately in files. To customize serialization, you may pass in a :class:`Disk ` subclass to initialize the cache. All clients accessing the cache are expected to use the same serialization. The default implementation uses Pickle and the example below uses compressed JSON, available for convenience as :class:`JSONDisk `. .. code-block:: python import json, zlib class JSONDisk(diskcache.Disk): def __init__(self, directory, compress_level=1, **kwargs): self.compress_level = compress_level super(JSONDisk, self).__init__(directory, **kwargs) def put(self, key): json_bytes = json.dumps(key).encode('utf-8') data = zlib.compress(json_bytes, self.compress_level) return super(JSONDisk, self).put(data) def get(self, key, raw): data = super(JSONDisk, self).get(key, raw) return json.loads(zlib.decompress(data).decode('utf-8')) def store(self, value, read): if not read: json_bytes = json.dumps(value).encode('utf-8') value = zlib.compress(json_bytes, self.compress_level) return super(JSONDisk, self).store(value, read) def fetch(self, mode, filename, value, read): data = super(JSONDisk, self).fetch(mode, filename, value, read) if not read: data = json.loads(zlib.decompress(data).decode('utf-8')) return data with Cache(disk=JSONDisk, disk_compress_level=6) as cache: pass Four data types can be stored natively in the cache metadata database: integers, floats, strings, and bytes. Other datatypes are converted to bytes via the Pickle protocol. Beware that integers and floats like ``1`` and ``1.0`` will compare equal as keys just as in Python. All other equality comparisons will require identical types. Caveats ------- Though :doc:`DiskCache ` has a dictionary-like interface, Python's `hash protocol`_ is not used. Neither the `__hash__` nor `__eq__` methods are used for lookups. Instead lookups depend on the serialization method defined by :class:`Disk ` objects. For strings, bytes, integers, and floats, equality matches Python's definition. But large integers and all other types will be converted to bytes and the bytes representation will define equality. The default :class:`diskcache.Disk` serialization uses pickling for both keys and values. Unfortunately, pickling produces inconsistencies sometimes when applied to container data types like tuples. Two equal tuples may serialize to different bytes objects using pickle. The likelihood of differences is reduced by using `pickletools.optimize` but still inconsistencies occur (`#54`_). The inconsistent serialized pickle values is particularly problematic when applied to the key in the cache. Consider using an alternative Disk type, like :class:`JSONDisk `, for consistent serialization of keys. SQLite is used to synchronize database access between threads and processes and as such inherits all SQLite caveats. Most notably SQLite is `not recommended`_ for use with Network File System (NFS) mounts. For this reason, :doc:`DiskCache ` currently `performs poorly`_ on `Python Anywhere`_. Users have also reported issues running inside of `Parallels`_ shared folders. When the disk or database is full, a :exc:`sqlite3.OperationalError` will be raised from any method that attempts to write data. Read operations will still succeed so long as they do not cause any write (as might occur if cache statistics are being recorded). Asynchronous support using Python's ``async`` and ``await`` keywords and `asyncio`_ module is blocked by a lack of support in the underlying SQLite module. But it is possible to run :doc:`DiskCache ` methods in a thread-pool executor asynchronously. For example:: import asyncio async def set_async(key, val): loop = asyncio.get_running_loop() future = loop.run_in_executor(None, cache.set, key, val) result = await future return result asyncio.run(set_async('test-key', 'test-value')) The cache :meth:`volume ` is based on the size of the database that stores metadata and the size of the values stored in files. It does not account the size of directories themselves or other filesystem metadata. If directory count or size is a concern then consider implementing an alternative :class:`Disk `. .. _`#54`: https://github.com/grantjenks/python-diskcache/issues/54 .. _`hash protocol`: https://docs.python.org/library/functions.html#hash .. _`not recommended`: https://www.sqlite.org/faq.html#q5 .. _`performs poorly`: https://www.pythonanywhere.com/forums/topic/1847/ .. _`Python Anywhere`: https://www.pythonanywhere.com/ .. _`Parallels`: https://www.parallels.com/ .. _`asyncio`: https://docs.python.org/3/library/asyncio.html Implementation -------------- :doc:`DiskCache ` is mostly built on SQLite and the filesystem. Some techniques used to improve performance: * Shard database to distribute writes. * Leverage SQLite native types: integers, floats, unicode, and bytes. * Use SQLite write-ahead-log so reads and writes don't block each other. * Use SQLite memory-mapped pages to accelerate reads. * Store small values in SQLite database and large values in files. * Always use a SQLite index for queries. * Use SQLite triggers to maintain key count and database size. python-diskcache-5.4.0/mypy.ini000066400000000000000000000000661416346170000164600ustar00rootroot00000000000000[mypy] [mypy-django.*] ignore_missing_imports = True python-diskcache-5.4.0/requirements.txt000066400000000000000000000003551416346170000202460ustar00rootroot00000000000000-e . blue coverage django==3.2.* django_redis doc8 flake8 ipython jedi==0.17.* # Remove after IPython bug fixed. pickleDB pylibmc pylint pytest pytest-cov pytest-django pytest-env pytest-xdist rstcheck sphinx sqlitedict tox twine wheel python-diskcache-5.4.0/setup.py000066400000000000000000000034731416346170000165000ustar00rootroot00000000000000from io import open from setuptools import setup from setuptools.command.test import test as TestCommand import diskcache class Tox(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import tox errno = tox.cmdline(self.test_args) exit(errno) with open('README.rst', encoding='utf-8') as reader: readme = reader.read() setup( name=diskcache.__title__, version=diskcache.__version__, description='Disk Cache -- Disk and file backed persistent cache.', long_description=readme, author='Grant Jenks', author_email='contact@grantjenks.com', url='http://www.grantjenks.com/docs/diskcache/', project_urls={ 'Documentation': 'http://www.grantjenks.com/docs/diskcache/', 'Funding': 'https://gum.co/diskcache', 'Source': 'https://github.com/grantjenks/python-diskcache', 'Tracker': 'https://github.com/grantjenks/python-diskcache/issues', }, license='Apache 2.0', packages=['diskcache'], tests_require=['tox'], cmdclass={'test': Tox}, python_requires='>=3', install_requires=[], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: Implementation :: CPython', ), ) python-diskcache-5.4.0/tests/000077500000000000000000000000001416346170000161215ustar00rootroot00000000000000python-diskcache-5.4.0/tests/__init__.py000066400000000000000000000000001416346170000202200ustar00rootroot00000000000000python-diskcache-5.4.0/tests/benchmark_core.py000066400000000000000000000121731416346170000214410ustar00rootroot00000000000000"""Benchmark diskcache.Cache $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/benchmark_core.py -p 1 > tests/timings_core_p1.txt $ python tests/benchmark_core.py -p 8 > tests/timings_core_p8.txt """ import collections as co import multiprocessing as mp import os import pickle import random import shutil import time import warnings from utils import display PROCS = 8 OPS = int(1e5) RANGE = 100 WARMUP = int(1e3) caches = [] ############################################################################### # Disk Cache Benchmarks ############################################################################### import diskcache # noqa caches.append( ( 'diskcache.Cache', diskcache.Cache, ('tmp',), {}, ) ) caches.append( ( 'diskcache.FanoutCache(shards=4, timeout=1.0)', diskcache.FanoutCache, ('tmp',), {'shards': 4, 'timeout': 1.0}, ) ) caches.append( ( 'diskcache.FanoutCache(shards=8, timeout=0.010)', diskcache.FanoutCache, ('tmp',), {'shards': 8, 'timeout': 0.010}, ) ) ############################################################################### # PyLibMC Benchmarks ############################################################################### try: import pylibmc caches.append( ( 'pylibmc.Client', pylibmc.Client, (['127.0.0.1'],), { 'binary': True, 'behaviors': {'tcp_nodelay': True, 'ketama': True}, }, ) ) except ImportError: warnings.warn('skipping pylibmc') ############################################################################### # Redis Benchmarks ############################################################################### try: import redis caches.append( ( 'redis.StrictRedis', redis.StrictRedis, (), {'host': 'localhost', 'port': 6379, 'db': 0}, ) ) except ImportError: warnings.warn('skipping redis') def worker(num, kind, args, kwargs): random.seed(num) time.sleep(0.01) # Let other processes start. obj = kind(*args, **kwargs) timings = co.defaultdict(list) for count in range(OPS): key = str(random.randrange(RANGE)).encode('utf-8') value = str(count).encode('utf-8') * random.randrange(1, 100) choice = random.random() if choice < 0.900: start = time.time() result = obj.get(key) end = time.time() miss = result is None action = 'get' elif choice < 0.990: start = time.time() result = obj.set(key, value) end = time.time() miss = result is False action = 'set' else: start = time.time() result = obj.delete(key) end = time.time() miss = result is False action = 'delete' if count > WARMUP: delta = end - start timings[action].append(delta) if miss: timings[action + '-miss'].append(delta) with open('output-%d.pkl' % num, 'wb') as writer: pickle.dump(timings, writer, protocol=pickle.HIGHEST_PROTOCOL) def dispatch(): for name, kind, args, kwargs in caches: shutil.rmtree('tmp', ignore_errors=True) obj = kind(*args, **kwargs) for key in range(RANGE): key = str(key).encode('utf-8') obj.set(key, key) try: obj.close() except Exception: pass processes = [ mp.Process(target=worker, args=(value, kind, args, kwargs)) for value in range(PROCS) ] for process in processes: process.start() for process in processes: process.join() timings = co.defaultdict(list) for num in range(PROCS): filename = 'output-%d.pkl' % num with open(filename, 'rb') as reader: output = pickle.load(reader) for key in output: timings[key].extend(output[key]) os.remove(filename) display(name, timings) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '-p', '--processes', type=int, default=PROCS, help='Number of processes to start', ) parser.add_argument( '-n', '--operations', type=float, default=OPS, help='Number of operations to perform', ) parser.add_argument( '-r', '--range', type=int, default=RANGE, help='Range of keys', ) parser.add_argument( '-w', '--warmup', type=float, default=WARMUP, help='Number of warmup operations before timings', ) args = parser.parse_args() PROCS = int(args.processes) OPS = int(args.operations) RANGE = int(args.range) WARMUP = int(args.warmup) dispatch() python-diskcache-5.4.0/tests/benchmark_djangocache.py000066400000000000000000000075061416346170000227430ustar00rootroot00000000000000"""Benchmark diskcache.DjangoCache $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/benchmark_djangocache.py > tests/timings_djangocache.txt """ import collections as co import multiprocessing as mp import os import pickle import random import shutil import time from utils import display PROCS = 8 OPS = int(1e5) RANGE = int(1.1e3) WARMUP = int(1e3) def setup(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings_benchmark') import django django.setup() def worker(num, name): setup() from django.core.cache import caches obj = caches[name] random.seed(num) timings = co.defaultdict(list) time.sleep(0.01) # Let other processes start. for count in range(OPS): key = str(random.randrange(RANGE)).encode('utf-8') value = str(count).encode('utf-8') * random.randrange(1, 100) choice = random.random() if choice < 0.900: start = time.time() result = obj.get(key) end = time.time() miss = result is None action = 'get' elif choice < 0.990: start = time.time() result = obj.set(key, value) end = time.time() miss = result is False action = 'set' else: start = time.time() result = obj.delete(key) end = time.time() miss = result is False action = 'delete' if count > WARMUP: delta = end - start timings[action].append(delta) if miss: timings[action + '-miss'].append(delta) with open('output-%d.pkl' % num, 'wb') as writer: pickle.dump(timings, writer, protocol=pickle.HIGHEST_PROTOCOL) def prepare(name): setup() from django.core.cache import caches obj = caches[name] for key in range(RANGE): key = str(key).encode('utf-8') obj.set(key, key) try: obj.close() except Exception: pass def dispatch(): setup() from django.core.cache import caches # noqa for name in ['locmem', 'memcached', 'redis', 'diskcache', 'filebased']: shutil.rmtree('tmp', ignore_errors=True) preparer = mp.Process(target=prepare, args=(name,)) preparer.start() preparer.join() processes = [ mp.Process(target=worker, args=(value, name)) for value in range(PROCS) ] for process in processes: process.start() for process in processes: process.join() timings = co.defaultdict(list) for num in range(PROCS): filename = 'output-%d.pkl' % num with open(filename, 'rb') as reader: output = pickle.load(reader) for key in output: timings[key].extend(output[key]) os.remove(filename) display(name, timings) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '-p', '--processes', type=int, default=PROCS, help='Number of processes to start', ) parser.add_argument( '-n', '--operations', type=float, default=OPS, help='Number of operations to perform', ) parser.add_argument( '-r', '--range', type=int, default=RANGE, help='Range of keys', ) parser.add_argument( '-w', '--warmup', type=float, default=WARMUP, help='Number of warmup operations before timings', ) args = parser.parse_args() PROCS = int(args.processes) OPS = int(args.operations) RANGE = int(args.range) WARMUP = int(args.warmup) dispatch() python-diskcache-5.4.0/tests/benchmark_glob.py000066400000000000000000000016251416346170000214340ustar00rootroot00000000000000"""Benchmark glob.glob1 as used by django.core.cache.backends.filebased.""" import os import os.path as op import shutil import timeit from utils import secs shutil.rmtree('tmp', ignore_errors=True) os.mkdir('tmp') size = 12 cols = ('Count', 'Time') template = ' '.join(['%' + str(size) + 's'] * len(cols)) print() print(' '.join(['=' * size] * len(cols))) print('Timings for glob.glob1') print('-'.join(['-' * size] * len(cols))) print(template % ('Count', 'Time')) print(' '.join(['=' * size] * len(cols))) for count in [10 ** exp for exp in range(6)]: for value in range(count): with open(op.join('tmp', '%s.tmp' % value), 'wb') as writer: pass delta = timeit.timeit( stmt="glob.glob1('tmp', '*.tmp')", setup='import glob', number=100 ) print(template % (count, secs(delta))) print(' '.join(['=' * size] * len(cols))) shutil.rmtree('tmp', ignore_errors=True) python-diskcache-5.4.0/tests/benchmark_incr.py000066400000000000000000000026601416346170000214440ustar00rootroot00000000000000"""Benchmark cache.incr method. """ import json import multiprocessing as mp import shutil import time import diskcache as dc from .utils import secs COUNT = int(1e3) PROCS = 8 def worker(num): """Rapidly increment key and time operation.""" time.sleep(0.1) # Let other workers start. cache = dc.Cache('tmp') values = [] for _ in range(COUNT): start = time.time() cache.incr(b'key') end = time.time() values.append(end - start) with open('output-%s.json' % num, 'w') as writer: json.dump(values, writer) def main(): """Run workers and print percentile results.""" shutil.rmtree('tmp', ignore_errors=True) processes = [ mp.Process(target=worker, args=(num,)) for num in range(PROCS) ] for process in processes: process.start() for process in processes: process.join() with dc.Cache('tmp') as cache: assert cache.get(b'key') == COUNT * PROCS for num in range(PROCS): values = [] with open('output-%s.json' % num) as reader: values += json.load(reader) values.sort() p50 = int(len(values) * 0.50) - 1 p90 = int(len(values) * 0.90) - 1 p99 = int(len(values) * 0.99) - 1 p00 = len(values) - 1 print(['{0:9s}'.format(val) for val in 'p50 p90 p99 max'.split()]) print([secs(values[pos]) for pos in [p50, p90, p99, p00]]) if __name__ == '__main__': main() python-diskcache-5.4.0/tests/benchmark_kv_store.py000066400000000000000000000043411416346170000223430ustar00rootroot00000000000000"""Benchmarking Key-Value Stores $ python -m IPython tests/benchmark_kv_store.py """ from IPython import get_ipython import diskcache ipython = get_ipython() assert ipython is not None, 'No IPython! Run with $ ipython ...' value = 'value' print('diskcache set') dc = diskcache.FanoutCache('/tmp/diskcache') ipython.magic("timeit -n 100 -r 7 dc['key'] = value") print('diskcache get') ipython.magic("timeit -n 100 -r 7 dc['key']") print('diskcache set/delete') ipython.magic("timeit -n 100 -r 7 dc['key'] = value; del dc['key']") try: import dbm.gnu # Only trust GNU DBM except ImportError: print('Error: Cannot import dbm.gnu') print('Error: Skipping import shelve') else: print('dbm set') d = dbm.gnu.open('/tmp/dbm', 'c') ipython.magic("timeit -n 100 -r 7 d['key'] = value; d.sync()") print('dbm get') ipython.magic("timeit -n 100 -r 7 d['key']") print('dbm set/delete') ipython.magic( "timeit -n 100 -r 7 d['key'] = value; d.sync(); del d['key']; d.sync()" ) import shelve print('shelve set') s = shelve.open('/tmp/shelve') ipython.magic("timeit -n 100 -r 7 s['key'] = value; s.sync()") print('shelve get') ipython.magic("timeit -n 100 -r 7 s['key']") print('shelve set/delete') ipython.magic( "timeit -n 100 -r 7 s['key'] = value; s.sync(); del s['key']; s.sync()" ) try: import sqlitedict except ImportError: print('Error: Cannot import sqlitedict') else: print('sqlitedict set') sd = sqlitedict.SqliteDict('/tmp/sqlitedict', autocommit=True) ipython.magic("timeit -n 100 -r 7 sd['key'] = value") print('sqlitedict get') ipython.magic("timeit -n 100 -r 7 sd['key']") print('sqlitedict set/delete') ipython.magic("timeit -n 100 -r 7 sd['key'] = value; del sd['key']") try: import pickledb except ImportError: print('Error: Cannot import pickledb') else: print('pickledb set') p = pickledb.load('/tmp/pickledb', True) ipython.magic("timeit -n 100 -r 7 p['key'] = value") print('pickledb get') ipython.magic( "timeit -n 100 -r 7 p = pickledb.load('/tmp/pickledb', True); p['key']" ) print('pickledb set/delete') ipython.magic("timeit -n 100 -r 7 p['key'] = value; del p['key']") python-diskcache-5.4.0/tests/db.sqlite3000066400000000000000000000040001416346170000200060ustar00rootroot00000000000000SQLite format 3@ - X!!}tabletests_polltests_pollCREATE TABLE tests_poll (id, question, answer, pub_date) python-diskcache-5.4.0/tests/issue_109.py000066400000000000000000000027411416346170000202200ustar00rootroot00000000000000"""Benchmark for Issue #109 """ import time import diskcache as dc def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('--cache-dir', default='/tmp/test') parser.add_argument('--iterations', type=int, default=100) parser.add_argument('--sleep', type=float, default=0.1) parser.add_argument('--size', type=int, default=25) args = parser.parse_args() data = dc.FanoutCache(args.cache_dir) delays = [] values = {str(num): num for num in range(args.size)} iterations = args.iterations for i in range(args.iterations): print(f'Iteration {i + 1}/{iterations}', end='\r') time.sleep(args.sleep) for key, value in values.items(): start = time.monotonic() data[key] = value stop = time.monotonic() diff = stop - start delays.append(diff) # Discard warmup delays, first two iterations. del delays[: (len(values) * 2)] # Convert seconds to microseconds. delays = sorted(delay * 1e6 for delay in delays) # Display performance. print() print(f'Total #: {len(delays)}') print(f'Min delay (us): {delays[0]:>8.3f}') print(f'50th %ile (us): {delays[int(len(delays) * 0.50)]:>8.3f}') print(f'90th %ile (us): {delays[int(len(delays) * 0.90)]:>8.3f}') print(f'99th %ile (us): {delays[int(len(delays) * 0.99)]:>8.3f}') print(f'Max delay (us): {delays[-1]:>8.3f}') if __name__ == '__main__': main() python-diskcache-5.4.0/tests/issue_85.py000066400000000000000000000072651416346170000201510ustar00rootroot00000000000000"""Test Script for Issue #85 $ export PYTHONPATH=`pwd` $ python tests/issue_85.py """ import collections import os import random import shutil import sqlite3 import threading import time import django def remove_cache_dir(): print('REMOVING CACHE DIRECTORY') shutil.rmtree('.cache', ignore_errors=True) def init_django(): global shard print('INITIALIZING DJANGO') os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings') django.setup() from django.core.cache import cache shard = cache._cache._shards[0] def multi_threading_init_test(): print('RUNNING MULTI-THREADING INIT TEST') from django.core.cache import cache def run(): cache.get('key') threads = [threading.Thread(target=run) for _ in range(50)] _ = [thread.start() for thread in threads] _ = [thread.join() for thread in threads] def show_sqlite_compile_options(): print('SQLITE COMPILE OPTIONS') options = shard._sql('pragma compile_options').fetchall() print('\n'.join(val for val, in options)) def create_data_table(): print('CREATING DATA TABLE') shard._con.execute('create table data (x)') nums = [(num,) for num in range(1000)] shard._con.executemany('insert into data values (?)', nums) commands = { 'begin/read/write': [ 'BEGIN', 'SELECT MAX(x) FROM data', 'UPDATE data SET x = x + 1', 'COMMIT', ], 'begin/write/read': [ 'BEGIN', 'UPDATE data SET x = x + 1', 'SELECT MAX(x) FROM data', 'COMMIT', ], 'begin immediate/read/write': [ 'BEGIN IMMEDIATE', 'SELECT MAX(x) FROM data', 'UPDATE data SET x = x + 1', 'COMMIT', ], 'begin immediate/write/read': [ 'BEGIN IMMEDIATE', 'UPDATE data SET x = x + 1', 'SELECT MAX(x) FROM data', 'COMMIT', ], 'begin exclusive/read/write': [ 'BEGIN EXCLUSIVE', 'SELECT MAX(x) FROM data', 'UPDATE data SET x = x + 1', 'COMMIT', ], 'begin exclusive/write/read': [ 'BEGIN EXCLUSIVE', 'UPDATE data SET x = x + 1', 'SELECT MAX(x) FROM data', 'COMMIT', ], } values = collections.deque() def run(statements): ident = threading.get_ident() try: for index, statement in enumerate(statements): if index == (len(statements) - 1): values.append(('COMMIT', ident)) time.sleep(random.random() / 10.0) shard._sql(statement) if index == 0: values.append(('BEGIN', ident)) except sqlite3.OperationalError: values.append(('ERROR', ident)) def test_transaction_errors(): for key, statements in commands.items(): print(f'RUNNING {key}') values.clear() threads = [] for _ in range(100): thread = threading.Thread(target=run, args=(statements,)) threads.append(thread) _ = [thread.start() for thread in threads] _ = [thread.join() for thread in threads] errors = [pair for pair in values if pair[0] == 'ERROR'] begins = [pair for pair in values if pair[0] == 'BEGIN'] commits = [pair for pair in values if pair[0] == 'COMMIT'] print('Error count:', len(errors)) print('Begin count:', len(begins)) print('Commit count:', len(commits)) begin_idents = [ident for _, ident in begins] commit_idents = [ident for _, ident in commits] print('Serialized:', begin_idents == commit_idents) if __name__ == '__main__': remove_cache_dir() init_django() multi_threading_init_test() show_sqlite_compile_options() create_data_table() test_transaction_errors() python-diskcache-5.4.0/tests/models.py000066400000000000000000000005751416346170000177650ustar00rootroot00000000000000from django.db import models from django.utils import timezone def expensive_calculation(): expensive_calculation.num_runs += 1 return timezone.now() class Poll(models.Model): question = models.CharField(max_length=200) answer = models.CharField(max_length=200) pub_date = models.DateTimeField( 'date published', default=expensive_calculation ) python-diskcache-5.4.0/tests/plot.py000066400000000000000000000074641416346170000174640ustar00rootroot00000000000000"""Plot Benchmarks for docs $ export PYTHONPATH=/Users/grantj/repos/python-diskcache $ python tests/plot.py --show tests/timings_core_p1.txt """ import argparse import collections as co import re import sys import matplotlib.pyplot as plt def parse_timing(timing, limit): """Parse timing.""" if timing.endswith('ms'): value = float(timing[:-2]) * 1e-3 elif timing.endswith('us'): value = float(timing[:-2]) * 1e-6 else: assert timing.endswith('s') value = float(timing[:-1]) return 0.0 if value > limit else value * 1e6 def parse_row(row, line): """Parse row.""" return [val.strip() for val in row.match(line).groups()] def parse_data(infile): """Parse data from `infile`.""" blocks = re.compile(' '.join(['=' * 9] * 8)) dashes = re.compile('^-{79}$') title = re.compile('^Timings for (.*)$') row = re.compile(' '.join(['(.{9})'] * 7) + ' (.{8,9})') lines = infile.readlines() data = co.OrderedDict() index = 0 while index < len(lines): line = lines[index] if blocks.match(line): try: name = title.match(lines[index + 1]).group(1) except Exception: index += 1 continue data[name] = {} assert dashes.match(lines[index + 2]) cols = parse_row(row, lines[index + 3]) assert blocks.match(lines[index + 4]) get_row = parse_row(row, lines[index + 5]) assert get_row[0] == 'get' set_row = parse_row(row, lines[index + 6]) assert set_row[0] == 'set' delete_row = parse_row(row, lines[index + 7]) assert delete_row[0] == 'delete' assert blocks.match(lines[index + 9]) data[name]['get'] = dict(zip(cols, get_row)) data[name]['set'] = dict(zip(cols, set_row)) data[name]['delete'] = dict(zip(cols, delete_row)) index += 10 else: index += 1 return data def make_plot(data, action, save=False, show=False, limit=0.005): """Make plot.""" fig, ax = plt.subplots(figsize=(8, 10)) colors = ['#ff7f00', '#377eb8', '#4daf4a', '#984ea3', '#e41a1c'] width = 0.15 ticks = ('Median', 'P90', 'P99') index = (0, 1, 2) names = list(data) bars = [] for pos, (name, color) in enumerate(zip(names, colors)): bars.append( ax.bar( [val + pos * width for val in index], [ parse_timing(data[name][action][tick], limit) for tick in ticks ], width, color=color, ) ) ax.set_ylabel('Time (microseconds)') ax.set_title('"%s" Time vs Percentile' % action) ax.set_xticks([val + width * (len(data) / 2) for val in index]) ax.set_xticklabels(ticks) box = ax.get_position() ax.set_position( [box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8] ) ax.legend( [bar[0] for bar in bars], names, loc='lower center', bbox_to_anchor=(0.5, -0.25), ) if show: plt.show() if save: plt.savefig('%s-%s.png' % (save, action), dpi=120, bbox_inches='tight') plt.close() def main(): parser = argparse.ArgumentParser() parser.add_argument( 'infile', type=argparse.FileType('r'), default=sys.stdin, ) parser.add_argument('-l', '--limit', type=float, default=0.005) parser.add_argument('-s', '--save') parser.add_argument('--show', action='store_true') args = parser.parse_args() data = parse_data(args.infile) for action in ['get', 'set', 'delete']: make_plot(data, action, args.save, args.show, args.limit) if __name__ == '__main__': main() python-diskcache-5.4.0/tests/plot_early_recompute.py000066400000000000000000000106211416346170000227300ustar00rootroot00000000000000"""Early Recomputation Measurements """ import functools as ft import multiprocessing.pool import shutil import threading import time import diskcache as dc def make_timer(times): """Make a decorator which accumulates (start, end) in `times` for function calls. """ lock = threading.Lock() def timer(func): @ft.wraps(func) def wrapper(*args, **kwargs): start = time.time() func(*args, **kwargs) pair = start, time.time() with lock: times.append(pair) return wrapper return timer def make_worker(times, delay=0.2): """Make a worker which accumulates (start, end) in `times` and sleeps for `delay` seconds. """ @make_timer(times) def worker(): time.sleep(delay) return worker def make_repeater(func, total=10, delay=0.01): """Make a repeater which calls `func` and sleeps for `delay` seconds repeatedly until `total` seconds have elapsed. """ def repeat(num): start = time.time() while time.time() - start < total: func() time.sleep(delay) return repeat def frange(start, stop, step=1e-3): """Generator for floating point values from `start` to `stop` by `step`.""" while start < stop: yield start start += step def plot(option, filename, cache_times, worker_times): """Plot concurrent workers and latency.""" import matplotlib.pyplot as plt fig, (workers, latency) = plt.subplots(2, sharex=True) fig.suptitle(option) changes = [(start, 1) for start, _ in worker_times] changes.extend((stop, -1) for _, stop in worker_times) changes.sort() start = (changes[0][0] - 1e-6, 0) counts = [start] for mark, diff in changes: # Re-sample between previous and current data point for a nicer-looking # line plot. for step in frange(counts[-1][0], mark): pair = (step, counts[-1][1]) counts.append(pair) pair = (mark, counts[-1][1] + diff) counts.append(pair) min_x = min(start for start, _ in cache_times) max_x = max(start for start, _ in cache_times) for step in frange(counts[-1][0], max_x): pair = (step, counts[-1][1]) counts.append(pair) x_counts = [x - min_x for x, y in counts] y_counts = [y for x, y in counts] workers.set_title('Concurrency') workers.set_ylabel('Workers') workers.set_ylim(0, 11) workers.plot(x_counts, y_counts) latency.set_title('Latency') latency.set_ylabel('Seconds') latency.set_ylim(0, 0.5) latency.set_xlabel('Time') x_latency = [start - min_x for start, _ in cache_times] y_latency = [stop - start for start, stop in cache_times] latency.scatter(x_latency, y_latency) plt.savefig(filename) def main(): shutil.rmtree('/tmp/cache') cache = dc.Cache('/tmp/cache') count = 10 cache_times = [] timer = make_timer(cache_times) options = { ('No Caching', 'no-caching.png'): [ timer, ], ('Traditional Caching', 'traditional-caching.png'): [ timer, cache.memoize(expire=1), ], ('Synchronized Locking', 'synchronized-locking.png'): [ timer, cache.memoize(expire=0), dc.barrier(cache, dc.Lock), cache.memoize(expire=1), ], ('Early Recomputation', 'early-recomputation.png'): [ timer, dc.memoize_stampede(cache, expire=1), ], ('Early Recomputation (beta=0.5)', 'early-recomputation-05.png'): [ timer, dc.memoize_stampede(cache, expire=1, beta=0.5), ], ('Early Recomputation (beta=0.3)', 'early-recomputation-03.png'): [ timer, dc.memoize_stampede(cache, expire=1, beta=0.3), ], } for (option, filename), decorators in options.items(): print('Simulating:', option) worker_times = [] worker = make_worker(worker_times) for decorator in reversed(decorators): worker = decorator(worker) worker() repeater = make_repeater(worker) with multiprocessing.pool.ThreadPool(count) as pool: pool.map(repeater, [worker] * count) plot(option, filename, cache_times, worker_times) cache.clear() cache_times.clear() if __name__ == '__main__': main() python-diskcache-5.4.0/tests/settings.py000066400000000000000000000064541416346170000203440ustar00rootroot00000000000000""" Django settings for tests project. Generated by 'django-admin startproject' using Django 1.9.1. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '5bg%^f37a=%mh8(qkq1#)a$e*d-pt*dzox0_39-ywqh=@m(_ii' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [u'testserver'] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'tests', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'project.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'tests', 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = False USE_TZ = False # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/' # Caching CACHE_DIR = os.path.join(BASE_DIR, '.cache') CACHES = { 'default': { 'BACKEND': 'diskcache.DjangoCache', 'LOCATION': CACHE_DIR, }, } python-diskcache-5.4.0/tests/settings_benchmark.py000066400000000000000000000020551416346170000223470ustar00rootroot00000000000000from .settings import * # noqa CACHES = { 'default': { 'BACKEND': 'diskcache.DjangoCache', 'LOCATION': CACHE_DIR, # noqa }, 'memcached': { 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', 'LOCATION': '127.0.0.1:11211', }, 'redis': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': 'redis://127.0.0.1:6379/1', 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.DefaultClient', }, }, 'filebased': { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': '/tmp/django_cache', 'OPTIONS': { 'CULL_FREQUENCY': 10, 'MAX_ENTRIES': 1000, }, }, 'locmem': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'diskcache', 'OPTIONS': { 'CULL_FREQUENCY': 10, 'MAX_ENTRIES': 1000, }, }, 'diskcache': { 'BACKEND': 'diskcache.DjangoCache', 'LOCATION': 'tmp', }, } python-diskcache-5.4.0/tests/stress_test_core.py000066400000000000000000000234631416346170000220750ustar00rootroot00000000000000"""Stress test diskcache.core.Cache.""" import collections as co import multiprocessing as mp import os import pickle import queue import random import shutil import threading import time import warnings from diskcache import Cache, EmptyDirWarning, Timeout, UnknownFileWarning from .utils import display OPERATIONS = int(1e4) GET_AVERAGE = 100 KEY_COUNT = 10 DEL_CHANCE = 0.1 WARMUP = 10 EXPIRE = None def make_keys(): def make_int(): return random.randrange(int(1e9)) def make_long(): value = random.randrange(int(1e9)) return value << 64 def make_unicode(): word_size = random.randint(1, 26) word = u''.join( random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) ) size = random.randint(1, int(200 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) word = u''.join( random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') size = random.randint(1, int(200 / 13)) return word * size def make_float(): return random.random() def make_object(): return (make_float(),) * random.randint(1, 20) funcs = [ make_int, make_long, make_unicode, make_bytes, make_float, make_object, ] while True: func = random.choice(funcs) yield func() def make_vals(): def make_int(): return random.randrange(int(1e9)) def make_long(): value = random.randrange(int(1e9)) return value << 64 def make_unicode(): word_size = random.randint(1, 26) word = u''.join( random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) ) size = random.randint(1, int(2 ** 16 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) word = u''.join( random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') size = random.randint(1, int(2 ** 16 / 13)) return word * size def make_float(): return random.random() def make_object(): return [make_float()] * random.randint(1, int(2e3)) funcs = [ make_int, make_long, make_unicode, make_bytes, make_float, make_object, ] while True: func = random.choice(funcs) yield func() def key_ops(): keys = make_keys() vals = make_vals() key = next(keys) while True: value = next(vals) yield 'set', key, value for _ in range(int(random.expovariate(1.0 / GET_AVERAGE))): yield 'get', key, value if random.random() < DEL_CHANCE: yield 'delete', key, None def all_ops(): keys = [key_ops() for _ in range(KEY_COUNT)] for _ in range(OPERATIONS): ops = random.choice(keys) yield next(ops) def worker(queue, eviction_policy, processes, threads): timings = co.defaultdict(list) cache = Cache('tmp', eviction_policy=eviction_policy) for index, (action, key, value) in enumerate(iter(queue.get, None)): start = time.time() try: if action == 'set': cache.set(key, value, expire=EXPIRE) elif action == 'get': result = cache.get(key) else: assert action == 'delete' cache.delete(key) except Timeout: miss = True else: miss = False stop = time.time() if ( action == 'get' and processes == 1 and threads == 1 and EXPIRE is None ): assert result == value if index > WARMUP: delta = stop - start timings[action].append(delta) if miss: timings[action + '-miss'].append(delta) queue.put(timings) cache.close() def dispatch(num, eviction_policy, processes, threads): with open('input-%s.pkl' % num, 'rb') as reader: process_queue = pickle.load(reader) thread_queues = [queue.Queue() for _ in range(threads)] subthreads = [ threading.Thread( target=worker, args=(thread_queue, eviction_policy, processes, threads), ) for thread_queue in thread_queues ] for index, triplet in enumerate(process_queue): thread_queue = thread_queues[index % threads] thread_queue.put(triplet) for thread_queue in thread_queues: thread_queue.put(None) # start = time.time() for thread in subthreads: thread.start() for thread in subthreads: thread.join() # stop = time.time() timings = co.defaultdict(list) for thread_queue in thread_queues: data = thread_queue.get() for key in data: timings[key].extend(data[key]) with open('output-%s.pkl' % num, 'wb') as writer: pickle.dump(timings, writer, protocol=2) def percentile(sequence, percent): if not sequence: return None values = sorted(sequence) if percent == 0: return values[0] pos = int(len(values) * percent) - 1 return values[pos] def stress_test( create=True, delete=True, eviction_policy=u'least-recently-stored', processes=1, threads=1, ): shutil.rmtree('tmp', ignore_errors=True) if processes == 1: # Use threads. func = threading.Thread else: func = mp.Process subprocs = [ func(target=dispatch, args=(num, eviction_policy, processes, threads)) for num in range(processes) ] if create: operations = list(all_ops()) process_queue = [[] for _ in range(processes)] for index, ops in enumerate(operations): process_queue[index % processes].append(ops) for num in range(processes): with open('input-%s.pkl' % num, 'wb') as writer: pickle.dump(process_queue[num], writer, protocol=2) for process in subprocs: process.start() for process in subprocs: process.join() with Cache('tmp') as cache: warnings.simplefilter('error') warnings.simplefilter('ignore', category=UnknownFileWarning) warnings.simplefilter('ignore', category=EmptyDirWarning) cache.check() timings = co.defaultdict(list) for num in range(processes): with open('output-%s.pkl' % num, 'rb') as reader: data = pickle.load(reader) for key in data: timings[key] += data[key] if delete: for num in range(processes): os.remove('input-%s.pkl' % num) os.remove('output-%s.pkl' % num) display(eviction_policy, timings) shutil.rmtree('tmp', ignore_errors=True) def stress_test_lru(): """Stress test least-recently-used eviction policy.""" stress_test(eviction_policy=u'least-recently-used') def stress_test_lfu(): """Stress test least-frequently-used eviction policy.""" stress_test(eviction_policy=u'least-frequently-used') def stress_test_none(): """Stress test 'none' eviction policy.""" stress_test(eviction_policy=u'none') def stress_test_mp(): """Stress test multiple threads and processes.""" stress_test(processes=4, threads=4) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '-n', '--operations', type=float, default=OPERATIONS, help='Number of operations to perform', ) parser.add_argument( '-g', '--get-average', type=float, default=GET_AVERAGE, help='Expected value of exponential variate used for GET count', ) parser.add_argument( '-k', '--key-count', type=float, default=KEY_COUNT, help='Number of unique keys', ) parser.add_argument( '-d', '--del-chance', type=float, default=DEL_CHANCE, help='Likelihood of a key deletion', ) parser.add_argument( '-w', '--warmup', type=float, default=WARMUP, help='Number of warmup operations before timings', ) parser.add_argument( '-e', '--expire', type=float, default=EXPIRE, help='Number of seconds before key expires', ) parser.add_argument( '-t', '--threads', type=int, default=1, help='Number of threads to start in each process', ) parser.add_argument( '-p', '--processes', type=int, default=1, help='Number of processes to start', ) parser.add_argument( '-s', '--seed', type=int, default=0, help='Random seed', ) parser.add_argument( '--no-create', action='store_false', dest='create', help='Do not create operations data', ) parser.add_argument( '--no-delete', action='store_false', dest='delete', help='Do not delete operations data', ) parser.add_argument( '-v', '--eviction-policy', type=str, default=u'least-recently-stored', ) args = parser.parse_args() OPERATIONS = int(args.operations) GET_AVERAGE = int(args.get_average) KEY_COUNT = int(args.key_count) DEL_CHANCE = args.del_chance WARMUP = int(args.warmup) EXPIRE = args.expire random.seed(args.seed) start = time.time() stress_test( create=args.create, delete=args.delete, eviction_policy=args.eviction_policy, processes=args.processes, threads=args.threads, ) end = time.time() print('Total wall clock time: %.3f seconds' % (end - start)) python-diskcache-5.4.0/tests/stress_test_deque.py000066400000000000000000000055501416346170000222450ustar00rootroot00000000000000"""Stress test diskcache.persistent.Deque.""" import collections as co import functools as ft import random import diskcache as dc OPERATIONS = 1000 SEED = 0 SIZE = 10 functions = [] def register(function): functions.append(function) return function def lencheck(function): @ft.wraps(function) def wrapper(sequence, deque): assert len(sequence) == len(deque) if not deque: return function(sequence, deque) return wrapper @register @lencheck def stress_get(sequence, deque): index = random.randrange(len(sequence)) assert sequence[index] == deque[index] @register @lencheck def stress_set(sequence, deque): index = random.randrange(len(sequence)) value = random.random() sequence[index] = value deque[index] = value @register @lencheck def stress_del(sequence, deque): index = random.randrange(len(sequence)) del sequence[index] del deque[index] @register def stress_iadd(sequence, deque): values = [random.random() for _ in range(5)] sequence += values deque += values @register def stress_iter(sequence, deque): assert all(alpha == beta for alpha, beta in zip(sequence, deque)) @register def stress_reversed(sequence, deque): reversed_sequence = reversed(sequence) reversed_deque = reversed(deque) pairs = zip(reversed_sequence, reversed_deque) assert all(alpha == beta for alpha, beta in pairs) @register def stress_append(sequence, deque): value = random.random() sequence.append(value) deque.append(value) @register def stress_appendleft(sequence, deque): value = random.random() sequence.appendleft(value) deque.appendleft(value) @register @lencheck def stress_pop(sequence, deque): assert sequence.pop() == deque.pop() register(stress_pop) register(stress_pop) @register @lencheck def stress_popleft(sequence, deque): assert sequence.popleft() == deque.popleft() register(stress_popleft) register(stress_popleft) @register def stress_reverse(sequence, deque): sequence.reverse() deque.reverse() assert all(alpha == beta for alpha, beta in zip(sequence, deque)) @register @lencheck def stress_rotate(sequence, deque): assert len(sequence) == len(deque) steps = random.randrange(len(deque)) sequence.rotate(steps) deque.rotate(steps) assert all(alpha == beta for alpha, beta in zip(sequence, deque)) def stress(sequence, deque): for count in range(OPERATIONS): function = random.choice(functions) function(sequence, deque) if count % 100 == 0: print('\r', len(sequence), ' ' * 7, end='') print() def test(): random.seed(SEED) sequence = co.deque(range(SIZE)) deque = dc.Deque(range(SIZE)) stress(sequence, deque) assert all(alpha == beta for alpha, beta in zip(sequence, deque)) if __name__ == '__main__': test() python-diskcache-5.4.0/tests/stress_test_deque_mp.py000066400000000000000000000044561416346170000227450ustar00rootroot00000000000000"""Stress test diskcache.persistent.Deque.""" import itertools as it import multiprocessing as mp import random import time import diskcache as dc OPERATIONS = 1000 SEED = 0 SIZE = 10 functions = [] def register(function): functions.append(function) return function @register def stress_get(deque): index = random.randrange(max(1, len(deque))) try: deque[index] except IndexError: pass @register def stress_set(deque): index = random.randrange(max(1, len(deque))) value = random.random() try: deque[index] = value except IndexError: pass @register def stress_del(deque): index = random.randrange(max(1, len(deque))) try: del deque[index] except IndexError: pass @register def stress_iadd(deque): values = [random.random() for _ in range(5)] deque += values @register def stress_append(deque): value = random.random() deque.append(value) @register def stress_appendleft(deque): value = random.random() deque.appendleft(value) @register def stress_pop(deque): try: deque.pop() except IndexError: pass @register def stress_popleft(deque): try: deque.popleft() except IndexError: pass @register def stress_reverse(deque): deque.reverse() @register def stress_rotate(deque): steps = random.randrange(max(1, len(deque))) deque.rotate(steps) def stress(seed, deque): random.seed(seed) for count in range(OPERATIONS): if len(deque) > 100: function = random.choice([stress_pop, stress_popleft]) else: function = random.choice(functions) function(deque) def test(status=False): random.seed(SEED) deque = dc.Deque(range(SIZE)) processes = [] for count in range(8): process = mp.Process(target=stress, args=(SEED + count, deque)) process.start() processes.append(process) for value in it.count(): time.sleep(1) if status: print('\r', value, 's', len(deque), 'items', ' ' * 20, end='') if all(not process.is_alive() for process in processes): break if status: print('') assert all(process.exitcode == 0 for process in processes) if __name__ == '__main__': test(status=True) python-diskcache-5.4.0/tests/stress_test_fanout.py000066400000000000000000000232151416346170000224340ustar00rootroot00000000000000"""Stress test diskcache.core.Cache.""" import multiprocessing as mp import os import pickle import queue import random import shutil import threading import time import warnings from diskcache import EmptyDirWarning, FanoutCache, UnknownFileWarning from .utils import display OPERATIONS = int(1e4) GET_AVERAGE = 100 KEY_COUNT = 10 DEL_CHANCE = 0.1 WARMUP = 10 EXPIRE = None def make_keys(): def make_int(): return random.randrange(int(1e9)) def make_long(): value = random.randrange(int(1e9)) return value << 64 def make_unicode(): word_size = random.randint(1, 26) word = u''.join( random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) ) size = random.randint(1, int(200 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) word = u''.join( random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') size = random.randint(1, int(200 / 13)) return word * size def make_float(): return random.random() def make_object(): return (make_float(),) * random.randint(1, 20) funcs = [ make_int, make_long, make_unicode, make_bytes, make_float, make_object, ] while True: func = random.choice(funcs) yield func() def make_vals(): def make_int(): return random.randrange(int(1e9)) def make_long(): value = random.randrange(int(1e9)) return value << 64 def make_unicode(): word_size = random.randint(1, 26) word = u''.join( random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) ) size = random.randint(1, int(2 ** 16 / 13)) return word * size def make_bytes(): word_size = random.randint(1, 26) word = u''.join( random.sample(u'abcdefghijklmnopqrstuvwxyz', word_size) ).encode('utf-8') size = random.randint(1, int(2 ** 16 / 13)) return word * size def make_float(): return random.random() def make_object(): return [make_float()] * random.randint(1, int(2e3)) funcs = [ make_int, make_long, make_unicode, make_bytes, make_float, make_object, ] while True: func = random.choice(funcs) yield func() def key_ops(): keys = make_keys() vals = make_vals() key = next(keys) while True: value = next(vals) yield 'set', key, value for _ in range(int(random.expovariate(1.0 / GET_AVERAGE))): yield 'get', key, value if random.random() < DEL_CHANCE: yield 'delete', key, None def all_ops(): keys = [key_ops() for _ in range(KEY_COUNT)] for _ in range(OPERATIONS): ops = random.choice(keys) yield next(ops) def worker(queue, eviction_policy, processes, threads): timings = {'get': [], 'set': [], 'delete': []} cache = FanoutCache('tmp', eviction_policy=eviction_policy) for index, (action, key, value) in enumerate(iter(queue.get, None)): start = time.time() if action == 'set': cache.set(key, value, expire=EXPIRE) elif action == 'get': result = cache.get(key) else: assert action == 'delete' cache.delete(key) stop = time.time() if ( action == 'get' and processes == 1 and threads == 1 and EXPIRE is None ): assert result == value if index > WARMUP: timings[action].append(stop - start) queue.put(timings) cache.close() def dispatch(num, eviction_policy, processes, threads): with open('input-%s.pkl' % num, 'rb') as reader: process_queue = pickle.load(reader) thread_queues = [queue.Queue() for _ in range(threads)] subthreads = [ threading.Thread( target=worker, args=(thread_queue, eviction_policy, processes, threads), ) for thread_queue in thread_queues ] for index, triplet in enumerate(process_queue): thread_queue = thread_queues[index % threads] thread_queue.put(triplet) for thread_queue in thread_queues: thread_queue.put(None) start = time.time() for thread in subthreads: thread.start() for thread in subthreads: thread.join() stop = time.time() timings = {'get': [], 'set': [], 'delete': [], 'self': (stop - start)} for thread_queue in thread_queues: data = thread_queue.get() for key in data: timings[key].extend(data[key]) with open('output-%s.pkl' % num, 'wb') as writer: pickle.dump(timings, writer, protocol=2) def percentile(sequence, percent): if not sequence: return None values = sorted(sequence) if percent == 0: return values[0] pos = int(len(values) * percent) - 1 return values[pos] def stress_test( create=True, delete=True, eviction_policy=u'least-recently-stored', processes=1, threads=1, ): shutil.rmtree('tmp', ignore_errors=True) if processes == 1: # Use threads. func = threading.Thread else: func = mp.Process subprocs = [ func(target=dispatch, args=(num, eviction_policy, processes, threads)) for num in range(processes) ] if create: operations = list(all_ops()) process_queue = [[] for _ in range(processes)] for index, ops in enumerate(operations): process_queue[index % processes].append(ops) for num in range(processes): with open('input-%s.pkl' % num, 'wb') as writer: pickle.dump(process_queue[num], writer, protocol=2) for process in subprocs: process.start() for process in subprocs: process.join() with FanoutCache('tmp') as cache: warnings.simplefilter('error') warnings.simplefilter('ignore', category=UnknownFileWarning) warnings.simplefilter('ignore', category=EmptyDirWarning) cache.check() timings = {'get': [], 'set': [], 'delete': [], 'self': 0.0} for num in range(processes): with open('output-%s.pkl' % num, 'rb') as reader: data = pickle.load(reader) for key in data: timings[key] += data[key] if delete: for num in range(processes): os.remove('input-%s.pkl' % num) os.remove('output-%s.pkl' % num) display(eviction_policy, timings) shutil.rmtree('tmp', ignore_errors=True) def stress_test_lru(): """Stress test least-recently-used eviction policy.""" stress_test(eviction_policy=u'least-recently-used') def stress_test_lfu(): """Stress test least-frequently-used eviction policy.""" stress_test(eviction_policy=u'least-frequently-used') def stress_test_none(): """Stress test 'none' eviction policy.""" stress_test(eviction_policy=u'none') def stress_test_mp(): """Stress test multiple threads and processes.""" stress_test(processes=4, threads=4) if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '-n', '--operations', type=float, default=OPERATIONS, help='Number of operations to perform', ) parser.add_argument( '-g', '--get-average', type=float, default=GET_AVERAGE, help='Expected value of exponential variate used for GET count', ) parser.add_argument( '-k', '--key-count', type=float, default=KEY_COUNT, help='Number of unique keys', ) parser.add_argument( '-d', '--del-chance', type=float, default=DEL_CHANCE, help='Likelihood of a key deletion', ) parser.add_argument( '-w', '--warmup', type=float, default=WARMUP, help='Number of warmup operations before timings', ) parser.add_argument( '-e', '--expire', type=float, default=EXPIRE, help='Number of seconds before key expires', ) parser.add_argument( '-t', '--threads', type=int, default=1, help='Number of threads to start in each process', ) parser.add_argument( '-p', '--processes', type=int, default=1, help='Number of processes to start', ) parser.add_argument( '-s', '--seed', type=int, default=0, help='Random seed', ) parser.add_argument( '--no-create', action='store_false', dest='create', help='Do not create operations data', ) parser.add_argument( '--no-delete', action='store_false', dest='delete', help='Do not delete operations data', ) parser.add_argument( '-v', '--eviction-policy', type=str, default=u'least-recently-stored', ) args = parser.parse_args() OPERATIONS = int(args.operations) GET_AVERAGE = int(args.get_average) KEY_COUNT = int(args.key_count) DEL_CHANCE = args.del_chance WARMUP = int(args.warmup) EXPIRE = args.expire random.seed(args.seed) start = time.time() stress_test( create=args.create, delete=args.delete, eviction_policy=args.eviction_policy, processes=args.processes, threads=args.threads, ) end = time.time() print('Total wall clock time: %.3f seconds' % (end - start)) python-diskcache-5.4.0/tests/stress_test_index.py000066400000000000000000000036641416346170000222550ustar00rootroot00000000000000"""Stress test diskcache.persistent.Index.""" import collections as co import itertools as it import random import diskcache as dc KEYS = 100 OPERATIONS = 25000 SEED = 0 functions = [] def register(function): functions.append(function) return function @register def stress_get(mapping, index): key = random.randrange(KEYS) assert mapping.get(key, None) == index.get(key, None) @register def stress_set(mapping, index): key = random.randrange(KEYS) value = random.random() mapping[key] = value index[key] = value register(stress_set) register(stress_set) register(stress_set) @register def stress_pop(mapping, index): key = random.randrange(KEYS) assert mapping.pop(key, None) == index.pop(key, None) @register def stress_popitem(mapping, index): if len(mapping) == len(index) == 0: return elif random.randrange(2): assert mapping.popitem() == index.popitem() else: assert mapping.popitem(last=False) == index.popitem(last=False) @register def stress_iter(mapping, index): iterator = it.islice(zip(mapping, index), 5) assert all(alpha == beta for alpha, beta in iterator) @register def stress_reversed(mapping, index): reversed_mapping = reversed(mapping) reversed_index = reversed(index) pairs = it.islice(zip(reversed_mapping, reversed_index), 5) assert all(alpha == beta for alpha, beta in pairs) @register def stress_len(mapping, index): assert len(mapping) == len(index) def stress(mapping, index): for count in range(OPERATIONS): function = random.choice(functions) function(mapping, index) if count % 1000 == 0: print('\r', len(mapping), ' ' * 7, end='') print() def test(): random.seed(SEED) mapping = co.OrderedDict(enumerate(range(KEYS))) index = dc.Index(enumerate(range(KEYS))) stress(mapping, index) assert mapping == index if __name__ == '__main__': test() python-diskcache-5.4.0/tests/stress_test_index_mp.py000066400000000000000000000040231416346170000227370ustar00rootroot00000000000000"""Stress test diskcache.persistent.Index.""" import itertools as it import multiprocessing as mp import random import time import diskcache as dc KEYS = 100 OPERATIONS = 10000 SEED = 0 functions = [] def register(function): functions.append(function) return function @register def stress_get(index): key = random.randrange(KEYS) index.get(key, None) @register def stress_set(index): key = random.randrange(KEYS) value = random.random() index[key] = value register(stress_set) register(stress_set) register(stress_set) @register def stress_del(index): key = random.randrange(KEYS) try: del index[key] except KeyError: pass @register def stress_pop(index): key = random.randrange(KEYS) index.pop(key, None) @register def stress_popitem(index): try: if random.randrange(2): index.popitem() else: index.popitem(last=False) except KeyError: pass @register def stress_iter(index): iterator = it.islice(index, 5) for key in iterator: pass @register def stress_reversed(index): iterator = it.islice(reversed(index), 5) for key in iterator: pass @register def stress_len(index): len(index) def stress(seed, index): random.seed(seed) for count in range(OPERATIONS): function = random.choice(functions) function(index) def test(status=False): random.seed(SEED) index = dc.Index(enumerate(range(KEYS))) processes = [] for count in range(8): process = mp.Process(target=stress, args=(SEED + count, index)) process.start() processes.append(process) for value in it.count(): time.sleep(1) if status: print('\r', value, 's', len(index), 'keys', ' ' * 20, end='') if all(not process.is_alive() for process in processes): break if status: print('') assert all(process.exitcode == 0 for process in processes) if __name__ == '__main__': test(status=True) python-diskcache-5.4.0/tests/test_core.py000066400000000000000000001026741416346170000204740ustar00rootroot00000000000000"""Test diskcache.core.Cache.""" import errno import hashlib import io import os import os.path as op import pickle import shutil import sqlite3 import subprocess as sp import tempfile import threading import time import warnings from unittest import mock import pytest import diskcache as dc pytestmark = pytest.mark.filterwarnings('ignore', category=dc.EmptyDirWarning) @pytest.fixture def cache(): with dc.Cache() as cache: yield cache shutil.rmtree(cache.directory, ignore_errors=True) def test_init(cache): for key, value in dc.DEFAULT_SETTINGS.items(): assert getattr(cache, key) == value cache.check() cache.close() cache.close() def test_init_disk(): with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2 ** 20) as cache: key = (None, 0, 'abc') cache[key] = 0 cache.check() assert cache.disk_min_file_size == 2 ** 20 assert cache.disk_pickle_protocol == 1 shutil.rmtree(cache.directory, ignore_errors=True) def test_disk_reset(): with dc.Cache(disk_min_file_size=0, disk_pickle_protocol=0) as cache: value = (None, 0, 'abc') cache[0] = value cache.check() assert cache.disk_min_file_size == 0 assert cache.disk_pickle_protocol == 0 assert cache._disk.min_file_size == 0 assert cache._disk.pickle_protocol == 0 cache.reset('disk_min_file_size', 2 ** 10) cache.reset('disk_pickle_protocol', 2) cache[1] = value cache.check() assert cache.disk_min_file_size == 2 ** 10 assert cache.disk_pickle_protocol == 2 assert cache._disk.min_file_size == 2 ** 10 assert cache._disk.pickle_protocol == 2 shutil.rmtree(cache.directory, ignore_errors=True) def test_disk_valueerror(): with pytest.raises(ValueError): with dc.Cache(disk=dc.Disk('test')): pass def test_custom_disk(): with dc.Cache(disk=dc.JSONDisk, disk_compress_level=6) as cache: values = [None, True, 0, 1.23, {}, [None] * 10000] for value in values: cache[value] = value for value in values: assert cache[value] == value for key, value in zip(cache, values): assert key == value test_memoize_iter(cache) shutil.rmtree(cache.directory, ignore_errors=True) class SHA256FilenameDisk(dc.Disk): def filename(self, key=dc.UNKNOWN, value=dc.UNKNOWN): filename = hashlib.sha256(key).hexdigest()[:32] full_path = op.join(self._directory, filename) return filename, full_path def test_custom_filename_disk(): with dc.Cache(disk=SHA256FilenameDisk) as cache: for count in range(100, 200): key = str(count).encode('ascii') cache[key] = str(count) * int(1e5) for count in range(100, 200): key = str(count).encode('ascii') filename = hashlib.sha256(key).hexdigest()[:32] full_path = op.join(cache.directory, filename) with open(full_path) as reader: content = reader.read() assert content == str(count) * int(1e5) shutil.rmtree(cache.directory, ignore_errors=True) def test_init_makedirs(): cache_dir = tempfile.mkdtemp() shutil.rmtree(cache_dir) makedirs = mock.Mock(side_effect=OSError(errno.EACCES)) with pytest.raises(EnvironmentError): try: with mock.patch('os.makedirs', makedirs): dc.Cache(cache_dir) except EnvironmentError: shutil.rmtree(cache_dir, ignore_errors=True) raise def test_pragma_error(cache): local = mock.Mock() con = mock.Mock() execute = mock.Mock() cursor = mock.Mock() fetchall = mock.Mock() local.pid = os.getpid() local.con = con con.execute = execute execute.return_value = cursor cursor.fetchall = fetchall fetchall.side_effect = [sqlite3.OperationalError] * 60000 size = 2 ** 28 with mock.patch('time.sleep', lambda num: 0): with mock.patch.object(cache, '_local', local): with pytest.raises(sqlite3.OperationalError): cache.reset('sqlite_mmap_size', size) def test_close_error(cache): class LocalTest(object): def __init__(self): self._calls = 0 def __getattr__(self, name): if self._calls: raise AttributeError else: self._calls += 1 return mock.Mock() with mock.patch.object(cache, '_local', LocalTest()): cache.close() def test_getsetdel(cache): values = [ (None, False), ((None,) * 2 ** 20, False), (1234, False), (2 ** 512, False), (56.78, False), (u'hello', False), (u'hello' * 2 ** 20, False), (b'world', False), (b'world' * 2 ** 20, False), (io.BytesIO(b'world' * 2 ** 20), True), ] for key, (value, file_like) in enumerate(values): assert cache.set(key, value, read=file_like) assert len(cache) == len(values) for key, (value, file_like) in enumerate(values): if file_like: assert cache[key] == value.getvalue() else: assert cache[key] == value for key, _ in enumerate(values): del cache[key] assert len(cache) == 0 for value, (key, _) in enumerate(values): cache[key] = value assert len(cache) == len(values) for value, (key, _) in enumerate(values): assert cache[key] == value for _, (key, _) in enumerate(values): del cache[key] assert len(cache) == 0 cache.check() def test_get_keyerror1(cache): with pytest.raises(KeyError): cache[0] def test_get_keyerror4(cache): func = mock.Mock(side_effect=IOError(errno.ENOENT, '')) cache.reset('statistics', True) cache[0] = b'abcd' * 2 ** 20 with mock.patch('diskcache.core.open', func): with pytest.raises((IOError, KeyError, OSError)): cache[0] def test_read(cache): cache.set(0, b'abcd' * 2 ** 20) with cache.read(0) as reader: assert reader is not None def test_read_keyerror(cache): with pytest.raises(KeyError): with cache.read(0): pass def test_set_twice(cache): large_value = b'abcd' * 2 ** 20 cache[0] = 0 cache[0] = 1 assert cache[0] == 1 cache[0] = large_value assert cache[0] == large_value with cache.get(0, read=True) as reader: assert reader.name is not None cache[0] = 2 assert cache[0] == 2 assert cache.get(0, read=True) == 2 cache.check() def test_set_timeout(cache): local = mock.Mock() con = mock.Mock() execute = mock.Mock() local.pid = os.getpid() local.con = con con.execute = execute execute.side_effect = sqlite3.OperationalError with pytest.raises(dc.Timeout): try: with mock.patch.object(cache, '_local', local): cache.set('a', 'b' * 2 ** 20) finally: cache.check() def test_raw(cache): assert cache.set(0, io.BytesIO(b'abcd'), read=True) assert cache[0] == b'abcd' def test_get(cache): assert cache.get(0) is None assert cache.get(1, 'dne') == 'dne' assert cache.get(2, {}) == {} assert cache.get(0, expire_time=True, tag=True) == (None, None, None) assert cache.set(0, 0, expire=None, tag=u'number') assert cache.get(0, expire_time=True) == (0, None) assert cache.get(0, tag=True) == (0, u'number') assert cache.get(0, expire_time=True, tag=True) == (0, None, u'number') def test_get_expired_fast_path(cache): assert cache.set(0, 0, expire=0.001) time.sleep(0.01) assert cache.get(0) is None def test_get_ioerror_fast_path(cache): assert cache.set(0, 0) disk = mock.Mock() put = mock.Mock() fetch = mock.Mock() disk.put = put put.side_effect = [(0, True)] disk.fetch = fetch io_error = IOError() io_error.errno = errno.ENOENT fetch.side_effect = io_error with mock.patch.object(cache, '_disk', disk): assert cache.get(0) is None def test_get_expired_slow_path(cache): cache.stats(enable=True) cache.reset('eviction_policy', 'least-recently-used') assert cache.set(0, 0, expire=0.001) time.sleep(0.01) assert cache.get(0) is None def test_pop(cache): assert cache.incr('alpha') == 1 assert cache.pop('alpha') == 1 assert cache.get('alpha') is None assert cache.check() == [] assert cache.set('alpha', 123, expire=1, tag='blue') assert cache.pop('alpha', tag=True) == (123, 'blue') assert cache.set('beta', 456, expire=1e-9, tag='green') time.sleep(0.01) assert cache.pop('beta', 'dne') == 'dne' assert cache.set('gamma', 789, tag='red') assert cache.pop('gamma', expire_time=True, tag=True) == (789, None, 'red') assert cache.pop('dne') is None assert cache.set('delta', 210) assert cache.pop('delta', expire_time=True) == (210, None) assert cache.set('epsilon', '0' * 2 ** 20) assert cache.pop('epsilon') == '0' * 2 ** 20 def test_pop_ioerror(cache): assert cache.set(0, 0) disk = mock.Mock() put = mock.Mock() fetch = mock.Mock() disk.put = put put.side_effect = [(0, True)] disk.fetch = fetch io_error = IOError() io_error.errno = errno.ENOENT fetch.side_effect = io_error with mock.patch.object(cache, '_disk', disk): assert cache.pop(0) is None def test_delete(cache): cache[0] = 0 assert cache.delete(0) assert len(cache) == 0 assert not cache.delete(0) assert len(cache.check()) == 0 def test_del(cache): with pytest.raises(KeyError): del cache[0] def test_del_expired(cache): cache.set(0, 0, expire=0.001) time.sleep(0.01) with pytest.raises(KeyError): del cache[0] def test_stats(cache): cache[0] = 0 assert cache.stats(enable=True) == (0, 0) for _ in range(100): cache[0] for _ in range(10): cache.get(1) assert cache.stats(reset=True) == (100, 10) assert cache.stats(enable=False) == (0, 0) for _ in range(100): cache[0] for _ in range(10): cache.get(1) assert cache.stats() == (0, 0) assert len(cache.check()) == 0 def test_path(cache): cache[0] = u'abc' large_value = b'abc' * 2 ** 20 cache[1] = large_value assert cache.get(0, read=True) == u'abc' with cache.get(1, read=True) as reader: assert reader.name is not None path = reader.name with open(path, 'rb') as reader: value = reader.read() assert value == large_value assert len(cache.check()) == 0 def test_expire_rows(cache): cache.reset('cull_limit', 0) for value in range(10): assert cache.set(value, value, expire=1e-9) for value in range(10, 15): assert cache.set(value, value) assert len(cache) == 15 time.sleep(0.01) cache.reset('cull_limit', 10) assert cache.set(15, 15) assert len(cache) == 6 assert len(cache.check()) == 0 def test_least_recently_stored(cache): cache.reset('eviction_policy', u'least-recently-stored') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 2) million = b'x' * int(1e6) for value in range(10): cache[value] = million assert len(cache) == 10 for value in range(10): assert cache[value] == million for value in range(10, 20): cache[value] = million assert len(cache) == 10 for value in range(10): cache[value] = million count = len(cache) for index, length in enumerate([1, 2, 3, 4]): cache[10 + index] = million * length assert len(cache) == count - length assert cache[12] == million * 3 assert cache[13] == million * 4 assert len(cache.check()) == 0 def test_least_recently_used(cache): cache.reset('eviction_policy', u'least-recently-used') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 5) million = b'x' * int(1e6) for value in range(10): cache[value] = million assert len(cache) == 10 time.sleep(0.01) cache[0] cache[1] cache[7] cache[8] cache[9] cache[10] = million assert len(cache) == 6 for value in [0, 1, 7, 8, 9, 10]: assert cache[value] == million assert len(cache.check()) == 0 def test_least_frequently_used(cache): cache.reset('eviction_policy', u'least-frequently-used') cache.reset('size_limit', int(10.1e6)) cache.reset('cull_limit', 5) million = b'x' * int(1e6) for value in range(10): cache[value] = million assert len(cache) == 10 cache[0], cache[0], cache[0], cache[0], cache[0] cache[1], cache[1], cache[1], cache[1] cache[7], cache[7], cache[7] cache[8], cache[8] cache[9] cache[10] = million assert len(cache) == 6 for value in [0, 1, 7, 8, 9, 10]: assert cache[value] == million assert len(cache.check()) == 0 def test_check(cache): blob = b'a' * 2 ** 20 keys = (0, 1, 1234, 56.78, u'hello', b'world', None) for key in keys: cache[key] = blob # Cause mayhem. with cache.get(0, read=True) as reader: full_path = reader.name os.rename(full_path, full_path + '_moved') with cache.get(1, read=True) as reader: full_path = reader.name os.remove(full_path) cache._sql('UPDATE Cache SET size = 0 WHERE rowid > 1') cache.reset('count', 0) cache.reset('size', 0) with warnings.catch_warnings(): warnings.filterwarnings('ignore') cache.check() cache.check(fix=True) assert len(cache.check()) == 0 # Should display no warnings. def test_integrity_check(cache): for value in range(1000): cache[value] = value cache.close() with io.open(op.join(cache.directory, 'cache.db'), 'r+b') as writer: writer.seek(52) writer.write(b'\x00\x01') # Should be 0, change it. cache = dc.Cache(cache.directory) with warnings.catch_warnings(): warnings.filterwarnings('ignore') cache.check() cache.check(fix=True) assert len(cache.check()) == 0 def test_expire(cache): cache.reset('cull_limit', 0) # Disable expiring keys on `set`. now = time.time() time_time = mock.Mock(return_value=now) with mock.patch('time.time', time_time): for value in range(1, 101): assert cache.set(value, value, expire=value) assert len(cache) == 100 time_time = mock.Mock(return_value=now + 11) cache.reset('cull_limit', 10) with mock.patch('time.time', time_time): assert cache.expire() == 10 assert len(cache) == 90 assert len(cache.check()) == 0 def test_tag_index(): with dc.Cache(tag_index=True) as cache: assert cache.tag_index == 1 shutil.rmtree(cache.directory, ignore_errors=True) def test_evict(cache): colors = ('red', 'blue', 'yellow') for value in range(90): assert cache.set(value, value, tag=colors[value % len(colors)]) assert len(cache) == 90 assert cache.evict('red') == 30 assert len(cache) == 60 assert len(cache.check()) == 0 def test_clear(cache): for value in range(100): cache[value] = value assert len(cache) == 100 assert cache.clear() == 100 assert len(cache) == 0 assert len(cache.check()) == 0 def test_clear_timeout(cache): transact = mock.Mock() transact.side_effect = dc.Timeout with mock.patch.object(cache, '_transact', transact): with pytest.raises(dc.Timeout): cache.clear() def test_tag(cache): assert cache.set(0, None, tag=u'zero') assert cache.set(1, None, tag=1234) assert cache.set(2, None, tag=5.67) assert cache.set(3, None, tag=b'three') assert cache.get(0, tag=True) == (None, u'zero') assert cache.get(1, tag=True) == (None, 1234) assert cache.get(2, tag=True) == (None, 5.67) assert cache.get(3, tag=True) == (None, b'three') def test_with(cache): with dc.Cache(cache.directory) as tmp: tmp[u'a'] = 0 tmp[u'b'] = 1 assert cache[u'a'] == 0 assert cache[u'b'] == 1 def test_contains(cache): assert 0 not in cache cache[0] = 0 assert 0 in cache def test_touch(cache): assert cache.set(0, None, expire=60) assert cache.touch(0, expire=None) assert cache.touch(0, expire=0) assert not cache.touch(0) def test_add(cache): assert cache.add(1, 1) assert cache.get(1) == 1 assert not cache.add(1, 2) assert cache.get(1) == 1 assert cache.delete(1) assert cache.add(1, 1, expire=0.001) time.sleep(0.01) assert cache.add(1, 1) cache.check() def test_add_large_value(cache): value = b'abcd' * 2 ** 20 assert cache.add(b'test-key', value) assert cache.get(b'test-key') == value assert not cache.add(b'test-key', value * 2) assert cache.get(b'test-key') == value cache.check() def test_add_timeout(cache): local = mock.Mock() con = mock.Mock() execute = mock.Mock() local.pid = os.getpid() local.con = con con.execute = execute execute.side_effect = sqlite3.OperationalError with pytest.raises(dc.Timeout): try: with mock.patch.object(cache, '_local', local): cache.add(0, 0) finally: cache.check() def test_incr(cache): assert cache.incr('key', default=5) == 6 assert cache.incr('key', 2) == 8 assert cache.get('key', expire_time=True, tag=True) == (8, None, None) assert cache.delete('key') assert cache.set('key', 100, expire=0.100) assert cache.get('key') == 100 time.sleep(0.120) assert cache.incr('key') == 1 def test_incr_insert_keyerror(cache): with pytest.raises(KeyError): cache.incr('key', default=None) def test_incr_update_keyerror(cache): assert cache.set('key', 100, expire=0.100) assert cache.get('key') == 100 time.sleep(0.120) with pytest.raises(KeyError): cache.incr('key', default=None) def test_decr(cache): assert cache.decr('key', default=5) == 4 assert cache.decr('key', 2) == 2 assert cache.get('key', expire_time=True, tag=True) == (2, None, None) assert cache.delete('key') assert cache.set('key', 100, expire=0.100) assert cache.get('key') == 100 time.sleep(0.120) assert cache.decr('key') == -1 def test_iter(cache): sequence = list('abcdef') + [('g',)] for index, value in enumerate(sequence): cache[value] = index iterator = iter(cache) assert all(one == two for one, two in zip(sequence, iterator)) cache['h'] = 7 with pytest.raises(StopIteration): next(iterator) def test_iter_expire(cache): cache.reset('cull_limit', 0) for num in range(100): cache.set(num, num, expire=1e-9) assert len(cache) == 100 assert list(cache) == list(range(100)) def test_iter_error(cache): with pytest.raises(StopIteration): next(iter(cache)) def test_reversed(cache): sequence = 'abcdef' for index, value in enumerate(sequence): cache[value] = index iterator = reversed(cache) pairs = zip(reversed(sequence), iterator) assert all(one == two for one, two in pairs) try: next(iterator) except StopIteration: pass else: assert False, 'StopIteration expected' def test_reversed_error(cache): with pytest.raises(StopIteration): next(reversed(cache)) def test_push_pull(cache): for value in range(10): cache.push(value) for value in range(10): _, pull_value = cache.pull() assert pull_value == value assert len(cache) == 0 def test_push_pull_prefix(cache): for value in range(10): cache.push(value, prefix='key') for value in range(10): key, peek_value = cache.peek(prefix='key') key, pull_value = cache.pull(prefix='key') assert key.startswith('key') assert peek_value == value assert pull_value == value assert len(cache) == 0 assert len(cache.check()) == 0 def test_push_pull_extras(cache): cache.push('test') assert cache.pull() == (500000000000000, 'test') assert len(cache) == 0 cache.push('test', expire=10) (key, value), expire_time = cache.peek(expire_time=True) assert key == 500000000000000 assert value == 'test' assert expire_time > time.time() assert len(cache) == 1 (key, value), expire_time = cache.pull(expire_time=True) assert key == 500000000000000 assert value == 'test' assert expire_time > time.time() assert len(cache) == 0 cache.push('test', tag='foo') (key, value), tag = cache.peek(tag=True) assert key == 500000000000000 assert value == 'test' assert tag == 'foo' assert len(cache) == 1 (key, value), tag = cache.pull(tag=True) assert key == 500000000000000 assert value == 'test' assert tag == 'foo' assert len(cache) == 0 cache.push('test') (key, value), expire_time, tag = cache.peek(expire_time=True, tag=True) assert key == 500000000000000 assert value == 'test' assert expire_time is None assert tag is None assert len(cache) == 1 (key, value), expire_time, tag = cache.pull(expire_time=True, tag=True) assert key == 500000000000000 assert value == 'test' assert expire_time is None assert tag is None assert len(cache) == 0 assert cache.pull(default=(0, 1)) == (0, 1) assert len(cache.check()) == 0 def test_push_pull_expire(cache): cache.push(0, expire=0.1) cache.push(0, expire=0.1) cache.push(0, expire=0.1) cache.push(1) time.sleep(0.2) assert cache.pull() == (500000000000003, 1) assert len(cache) == 0 assert len(cache.check()) == 0 def test_push_peek_expire(cache): cache.push(0, expire=0.1) cache.push(0, expire=0.1) cache.push(0, expire=0.1) cache.push(1) time.sleep(0.2) assert cache.peek() == (500000000000003, 1) assert len(cache) == 1 assert len(cache.check()) == 0 def test_push_pull_large_value(cache): value = b'test' * (2 ** 20) cache.push(value) assert cache.pull() == (500000000000000, value) assert len(cache) == 0 assert len(cache.check()) == 0 def test_push_peek_large_value(cache): value = b'test' * (2 ** 20) cache.push(value) assert cache.peek() == (500000000000000, value) assert len(cache) == 1 assert len(cache.check()) == 0 def test_pull_ioerror(cache): assert cache.push(0) == 500000000000000 disk = mock.Mock() put = mock.Mock() fetch = mock.Mock() disk.put = put put.side_effect = [(0, True)] disk.fetch = fetch io_error = IOError() io_error.errno = errno.ENOENT fetch.side_effect = io_error with mock.patch.object(cache, '_disk', disk): assert cache.pull() == (None, None) def test_peek_ioerror(cache): assert cache.push(0) == 500000000000000 disk = mock.Mock() put = mock.Mock() fetch = mock.Mock() disk.put = put put.side_effect = [(0, True)] disk.fetch = fetch io_error = IOError() io_error.errno = errno.ENOENT fetch.side_effect = [io_error, 0] with mock.patch.object(cache, '_disk', disk): _, value = cache.peek() assert value == 0 def test_peekitem_extras(cache): with pytest.raises(KeyError): cache.peekitem() assert cache.set('a', 0) assert cache.set('b', 1) assert cache.set('c', 2, expire=10, tag='foo') assert cache.set('d', 3, expire=0.1) assert cache.set('e', 4, expire=0.1) time.sleep(0.2) (key, value), expire_time, tag = cache.peekitem(expire_time=True, tag=True) assert key == 'c' assert value == 2 assert expire_time > 0 assert tag == 'foo' (key, value), expire_time = cache.peekitem(expire_time=True) assert key == 'c' assert value == 2 assert expire_time > 0 (key, value), tag = cache.peekitem(tag=True) assert key == 'c' assert value == 2 assert expire_time > 0 assert tag == 'foo' def test_peekitem_ioerror(cache): assert cache.set('a', 0) assert cache.set('b', 1) assert cache.set('c', 2) disk = mock.Mock() put = mock.Mock() fetch = mock.Mock() disk.put = put put.side_effect = [(0, True)] disk.fetch = fetch io_error = IOError() io_error.errno = errno.ENOENT fetch.side_effect = [io_error, 2] with mock.patch.object(cache, '_disk', disk): _, value = cache.peekitem() assert value == 2 def test_iterkeys(cache): assert list(cache.iterkeys()) == [] def test_pickle(cache): for num, val in enumerate('abcde'): cache[val] = num data = pickle.dumps(cache) other = pickle.loads(data) for key in other: assert other[key] == cache[key] def test_pragmas(cache): results = [] def compare_pragmas(): valid = True for key, value in dc.DEFAULT_SETTINGS.items(): if not key.startswith('sqlite_'): continue pragma = key[7:] result = cache._sql('PRAGMA %s' % pragma).fetchall() if result == [(value,)]: continue args = pragma, result, [(value,)] print('pragma %s mismatch: %r != %r' % args) valid = False results.append(valid) threads = [] for count in range(8): thread = threading.Thread(target=compare_pragmas) thread.start() threads.append(thread) for thread in threads: thread.join() assert all(results) def test_size_limit_with_files(cache): cache.reset('cull_limit', 0) size_limit = 30 * cache.disk_min_file_size cache.reset('size_limit', size_limit) value = b'foo' * cache.disk_min_file_size for key in range(40): cache.set(key, value) assert cache.volume() > size_limit cache.cull() assert cache.volume() <= size_limit def test_size_limit_with_database(cache): cache.reset('cull_limit', 0) size_limit = 2 * cache.disk_min_file_size cache.reset('size_limit', size_limit) value = b'0123456789' * 10 count = size_limit // (8 + len(value)) for key in range(count): cache.set(key, value) assert cache.volume() > size_limit cache.cull() assert cache.volume() <= size_limit def test_cull_eviction_policy_none(cache): cache.reset('eviction_policy', 'none') size_limit = 2 * cache.disk_min_file_size cache.reset('size_limit', size_limit) value = b'0123456789' * 10 count = size_limit // (8 + len(value)) for key in range(count): cache.set(key, value) assert cache.volume() > size_limit cache.cull() assert cache.volume() > size_limit def test_cull_size_limit_0(cache): cache.reset('cull_limit', 0) size_limit = 2 * cache.disk_min_file_size cache.reset('size_limit', 0) value = b'0123456789' * 10 count = size_limit // (8 + len(value)) for key in range(count): cache.set(key, value) assert cache.volume() > size_limit cache.cull() assert cache.volume() <= size_limit def test_cull_timeout(cache): transact = mock.Mock() transact.side_effect = [dc.Timeout] with mock.patch.object(cache, 'expire', lambda now: 0): with mock.patch.object(cache, 'volume', lambda: int(1e12)): with mock.patch.object(cache, '_transact', transact): with pytest.raises(dc.Timeout): cache.cull() def test_key_roundtrip(cache): key_part_0 = u'part0' key_part_1 = u'part1' to_test = [ (key_part_0, key_part_1), [key_part_0, key_part_1], ] for key in to_test: cache.clear() cache[key] = {'example0': ['value0']} keys = list(cache) assert len(keys) == 1 cache_key = keys[0] assert cache[key] == {'example0': ['value0']} assert cache[cache_key] == {'example0': ['value0']} def test_constant(): import diskcache.core assert repr(diskcache.core.ENOVAL) == 'ENOVAL' def test_copy(): cache_dir1 = tempfile.mkdtemp() with dc.Cache(cache_dir1) as cache1: for count in range(10): cache1[count] = str(count) for count in range(10, 20): cache1[count] = str(count) * int(1e5) cache_dir2 = tempfile.mkdtemp() shutil.rmtree(cache_dir2) shutil.copytree(cache_dir1, cache_dir2) with dc.Cache(cache_dir2) as cache2: for count in range(10): assert cache2[count] == str(count) for count in range(10, 20): assert cache2[count] == str(count) * int(1e5) shutil.rmtree(cache_dir1, ignore_errors=True) shutil.rmtree(cache_dir2, ignore_errors=True) def run(command): print('run$ %r' % command) try: result = sp.check_output(command, stderr=sp.STDOUT) print(result) except sp.CalledProcessError as exc: print(exc.output) raise def test_rsync(): try: run(['rsync', '--version']) except OSError: return # No rsync installed. Skip test. rsync_args = ['rsync', '-a', '--checksum', '--delete', '--stats'] cache_dir1 = tempfile.mkdtemp() + os.sep cache_dir2 = tempfile.mkdtemp() + os.sep # Store some items in cache_dir1. with dc.Cache(cache_dir1) as cache1: for count in range(100): cache1[count] = str(count) for count in range(100, 200): cache1[count] = str(count) * int(1e5) # Rsync cache_dir1 to cache_dir2. run(rsync_args + [cache_dir1, cache_dir2]) # Validate items in cache_dir2. with dc.Cache(cache_dir2) as cache2: for count in range(100): assert cache2[count] == str(count) for count in range(100, 200): assert cache2[count] == str(count) * int(1e5) # Store more items in cache_dir2. with dc.Cache(cache_dir2) as cache2: for count in range(200, 300): cache2[count] = str(count) for count in range(300, 400): cache2[count] = str(count) * int(1e5) # Rsync cache_dir2 to cache_dir1. run(rsync_args + [cache_dir2, cache_dir1]) # Validate items in cache_dir1. with dc.Cache(cache_dir1) as cache1: for count in range(100): assert cache1[count] == str(count) for count in range(100, 200): assert cache1[count] == str(count) * int(1e5) for count in range(200, 300): assert cache1[count] == str(count) for count in range(300, 400): assert cache1[count] == str(count) * int(1e5) shutil.rmtree(cache_dir1, ignore_errors=True) shutil.rmtree(cache_dir2, ignore_errors=True) def test_custom_eviction_policy(cache): dc.EVICTION_POLICY['lru-gt-1s'] = { 'init': ( 'CREATE INDEX IF NOT EXISTS Cache_access_time ON' ' Cache (access_time)' ), 'get': 'access_time = {now}', 'cull': ( 'SELECT {fields} FROM Cache' ' WHERE access_time < ({now} - 1)' ' ORDER BY access_time LIMIT ?' ), } size_limit = int(1e5) cache.reset('eviction_policy', 'lru-gt-1s') cache.reset('size_limit', size_limit) for count in range(100, 150): cache[count] = str(count) * 500 size = cache.volume() assert size > size_limit assert cache.cull() == 0 assert size == cache.volume() for count in range(100, 150): assert cache[count] == str(count) * 500 time.sleep(1.1) assert cache.cull() > 0 assert cache.volume() < size_limit def test_lru_incr(cache): cache.reset('eviction_policy', 'least-recently-used') cache.incr(0) cache.decr(0) assert cache[0] == 0 def test_memoize(cache): count = 1000 def fibiter(num): alpha, beta = 0, 1 for _ in range(num): alpha, beta = beta, alpha + beta return alpha @cache.memoize() def fibrec(num): if num == 0: return 0 elif num == 1: return 1 else: return fibrec(num - 1) + fibrec(num - 2) cache.stats(enable=True) for value in range(count): assert fibrec(value) == fibiter(value) hits1, misses1 = cache.stats() for value in range(count): assert fibrec(value) == fibiter(value) hits2, misses2 = cache.stats() assert hits2 == (hits1 + count) assert misses2 == misses1 def test_memoize_kwargs(cache): @cache.memoize(typed=True) def foo(*args, **kwargs): return args, kwargs assert foo(1, 2, 3, a=4, b=5) == ((1, 2, 3), {'a': 4, 'b': 5}) def test_cleanup_dirs(cache): value = b'\0' * 2 ** 20 start_count = len(os.listdir(cache.directory)) for i in range(10): cache[i] = value set_count = len(os.listdir(cache.directory)) assert set_count > start_count for i in range(10): del cache[i] del_count = len(os.listdir(cache.directory)) assert start_count == del_count def test_disk_write_os_error(cache): func = mock.Mock(side_effect=[OSError] * 10) with mock.patch('diskcache.core.open', func): with pytest.raises(OSError): cache[0] = '\0' * 2 ** 20 def test_memoize_ignore(cache): @cache.memoize(ignore={1, 'arg1'}) def test(*args, **kwargs): return args, kwargs cache.stats(enable=True) assert test('a', 'b', 'c', arg0='d', arg1='e', arg2='f') assert test('a', 'w', 'c', arg0='d', arg1='x', arg2='f') assert test('a', 'y', 'c', arg0='d', arg1='z', arg2='f') assert cache.stats() == (2, 1) def test_memoize_iter(cache): @cache.memoize() def test(*args, **kwargs): return sum(args) + sum(kwargs.values()) cache.clear() assert test(1, 2, 3) assert test(a=1, b=2, c=3) assert test(-1, 0, 1, a=1, b=2, c=3) assert len(cache) == 3 for key in cache: assert cache[key] == 6 python-diskcache-5.4.0/tests/test_deque.py000066400000000000000000000137631416346170000206470ustar00rootroot00000000000000"""Test diskcache.persistent.Deque.""" import pickle import shutil import tempfile from unittest import mock import pytest import diskcache as dc from diskcache.core import ENOVAL def rmdir(directory): try: shutil.rmtree(directory) except OSError: pass @pytest.fixture def deque(): deque = dc.Deque() yield deque rmdir(deque.directory) def test_init(): directory = tempfile.mkdtemp() sequence = list('abcde') deque = dc.Deque(sequence, None) assert deque == sequence rmdir(deque.directory) del deque rmdir(directory) deque = dc.Deque(sequence, directory) assert deque.directory == directory assert deque == sequence other = dc.Deque(directory=directory) assert other == deque del deque del other rmdir(directory) def test_getsetdel(deque): sequence = list('abcde') assert len(deque) == 0 for key in sequence: deque.append(key) assert len(deque) == len(sequence) for index in range(len(sequence)): assert deque[index] == sequence[index] for index in range(len(sequence)): deque[index] = index for index in range(len(sequence)): assert deque[index] == index for index in range(len(sequence)): if index % 2: del deque[-1] else: del deque[0] assert len(deque) == 0 def test_index_positive(deque): cache = mock.MagicMock() cache.__len__.return_value = 3 cache.iterkeys.return_value = ['a', 'b', 'c'] cache.__getitem__.side_effect = [KeyError, 101, 102] with mock.patch.object(deque, '_cache', cache): assert deque[0] == 101 def test_index_negative(deque): cache = mock.MagicMock() cache.__len__.return_value = 3 cache.iterkeys.return_value = ['c', 'b', 'a'] cache.__getitem__.side_effect = [KeyError, 101, 100] with mock.patch.object(deque, '_cache', cache): assert deque[-1] == 101 def test_index_out_of_range(deque): cache = mock.MagicMock() cache.__len__.return_value = 3 cache.iterkeys.return_value = ['a', 'b', 'c'] cache.__getitem__.side_effect = [KeyError] * 3 with mock.patch.object(deque, '_cache', cache): with pytest.raises(IndexError): deque[0] def test_iter_keyerror(deque): cache = mock.MagicMock() cache.iterkeys.return_value = ['a', 'b', 'c'] cache.__getitem__.side_effect = [KeyError, 101, 102] with mock.patch.object(deque, '_cache', cache): assert list(iter(deque)) == [101, 102] def test_reversed(deque): sequence = list('abcde') deque += sequence assert list(reversed(deque)) == list(reversed(sequence)) def test_reversed_keyerror(deque): cache = mock.MagicMock() cache.iterkeys.return_value = ['c', 'b', 'a'] cache.__getitem__.side_effect = [KeyError, 101, 100] with mock.patch.object(deque, '_cache', cache): assert list(reversed(deque)) == [101, 100] def test_state(deque): sequence = list('abcde') deque.extend(sequence) assert deque == sequence state = pickle.dumps(deque) values = pickle.loads(state) assert values == sequence def test_compare(deque): assert not (deque == {}) assert not (deque == [0]) assert deque != [1] deque.append(0) assert deque <= [0] assert deque <= [1] def test_indexerror_negative(deque): with pytest.raises(IndexError): deque[-1] def test_indexerror(deque): with pytest.raises(IndexError): deque[0] def test_repr(): directory = tempfile.mkdtemp() deque = dc.Deque(directory=directory) assert repr(deque) == 'Deque(directory=%r)' % directory def test_count(deque): deque += 'abbcccddddeeeee' for index, value in enumerate('abcde', 1): assert deque.count(value) == index def test_extend(deque): sequence = list('abcde') deque.extend(sequence) assert deque == sequence def test_extendleft(deque): sequence = list('abcde') deque.extendleft(sequence) assert deque == list(reversed(sequence)) def test_pop(deque): sequence = list('abcde') deque.extend(sequence) while sequence: assert deque.pop() == sequence.pop() def test_pop_indexerror(deque): with pytest.raises(IndexError): deque.pop() def test_popleft(deque): sequence = list('abcde') deque.extend(sequence) while sequence: value = sequence[0] assert deque.popleft() == value del sequence[0] def test_popleft_indexerror(deque): with pytest.raises(IndexError): deque.popleft() def test_remove(deque): deque.extend('abaca') deque.remove('a') assert deque == 'baca' deque.remove('a') assert deque == 'bca' deque.remove('a') assert deque == 'bc' def test_remove_valueerror(deque): with pytest.raises(ValueError): deque.remove(0) def test_remove_keyerror(deque): cache = mock.MagicMock() cache.iterkeys.return_value = ['a', 'b', 'c'] cache.__getitem__.side_effect = [KeyError, 100, 100] cache.__delitem__.side_effect = [KeyError, None] with mock.patch.object(deque, '_cache', cache): deque.remove(100) def test_reverse(deque): deque += 'abcde' deque.reverse() assert deque == 'edcba' def test_rotate_typeerror(deque): with pytest.raises(TypeError): deque.rotate(0.5) def test_rotate(deque): deque.rotate(1) deque.rotate(-1) deque += 'abcde' deque.rotate(3) assert deque == 'cdeab' def test_rotate_negative(deque): deque += 'abcde' deque.rotate(-2) assert deque == 'cdeab' def test_rotate_indexerror(deque): deque += 'abc' cache = mock.MagicMock() cache.__len__.return_value = 3 cache.pull.side_effect = [(None, ENOVAL)] with mock.patch.object(deque, '_cache', cache): deque.rotate(1) def test_rotate_indexerror_negative(deque): deque += 'abc' cache = mock.MagicMock() cache.__len__.return_value = 3 cache.pull.side_effect = [(None, ENOVAL)] with mock.patch.object(deque, '_cache', cache): deque.rotate(-1) python-diskcache-5.4.0/tests/test_djangocache.py000066400000000000000000001256441416346170000217740ustar00rootroot00000000000000# Most of this file was copied from: # https://raw.githubusercontent.com/django/django/stable/3.2.x/tests/cache/tests.py # Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. import os import pickle import shutil import tempfile import time from unittest import mock from django.conf import settings from django.core.cache import CacheKeyWarning, cache, caches from django.http import HttpResponse from django.middleware.cache import ( FetchFromCacheMiddleware, UpdateCacheMiddleware, ) from django.test import RequestFactory, TestCase, override_settings from django.test.signals import setting_changed ################################################################################ # Setup Django for models import. ################################################################################ os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings') import django django.setup() from .models import Poll, expensive_calculation # functions/classes for complex data type tests def f(): return 42 class C: def m(n): return 24 class Unpicklable: def __getstate__(self): raise pickle.PickleError() def empty_response(request): return HttpResponse() KEY_ERRORS_WITH_MEMCACHED_MSG = ( 'Cache key contains characters that will cause errors if used with ' 'memcached: %r' ) class UnpicklableType(object): # Unpicklable using the default pickling protocol on Python 2. __slots__ = ('a',) def custom_key_func(key, key_prefix, version): """A customized cache key function""" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) _caches_setting_base = { 'default': {}, 'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())}, 'v2': {'VERSION': 2}, 'custom_key': {'KEY_FUNCTION': custom_key_func}, 'custom_key2': {'KEY_FUNCTION': 'tests.test_djangocache.custom_key_func'}, 'cull': {'OPTIONS': {'MAX_ENTRIES': 30}}, 'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}}, } def caches_setting_for_tests(base=None, exclude=None, **params): # `base` is used to pull in the memcached config from the original settings, # `exclude` is a set of cache names denoting which `_caches_setting_base` keys # should be omitted. # `params` are test specific overrides and `_caches_settings_base` is the # base config for the tests. # This results in the following search order: # params -> _caches_setting_base -> base base = base or {} exclude = exclude or set() setting = { k: base.copy() for k in _caches_setting_base if k not in exclude } for key, cache_params in setting.items(): cache_params.update(_caches_setting_base[key]) cache_params.update(params) return setting class BaseCacheTests: # A common set of tests to apply to all cache backends factory = RequestFactory() # RemovedInDjango41Warning: python-memcached doesn't support .get() with # default. supports_get_with_default = True # Some clients raise custom exceptions when .incr() or .decr() are called # with a non-integer value. incr_decr_type_error = TypeError def tearDown(self): cache.clear() def test_simple(self): # Simple cache set/get works cache.set('key', 'value') self.assertEqual(cache.get('key'), 'value') def test_default_used_when_none_is_set(self): """If None is cached, get() returns it instead of the default.""" cache.set('key_default_none', None) self.assertIsNone(cache.get('key_default_none', default='default')) def test_add(self): # A key can be added to a cache self.assertIs(cache.add('addkey1', 'value'), True) self.assertIs(cache.add('addkey1', 'newvalue'), False) self.assertEqual(cache.get('addkey1'), 'value') def test_prefix(self): # Test for same cache key conflicts between shared backend cache.set('somekey', 'value') # should not be set in the prefixed cache self.assertIs(caches['prefix'].has_key('somekey'), False) caches['prefix'].set('somekey', 'value2') self.assertEqual(cache.get('somekey'), 'value') self.assertEqual(caches['prefix'].get('somekey'), 'value2') def test_non_existent(self): """Nonexistent cache keys return as None/default.""" self.assertIsNone(cache.get('does_not_exist')) self.assertEqual(cache.get('does_not_exist', 'bang!'), 'bang!') def test_get_many(self): # Multiple cache keys can be returned using get_many cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual( cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'} ) self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) self.assertEqual( cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'} ) cache.set_many({'x': None, 'y': 1}) self.assertEqual(cache.get_many(['x', 'y']), {'x': None, 'y': 1}) def test_delete(self): # Cache keys can be deleted cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(cache.get('key1'), 'spam') self.assertIs(cache.delete('key1'), True) self.assertIsNone(cache.get('key1')) self.assertEqual(cache.get('key2'), 'eggs') def test_delete_nonexistent(self): self.assertIs(cache.delete('nonexistent_key'), False) def test_has_key(self): # The cache can be inspected for cache keys cache.set('hello1', 'goodbye1') self.assertIs(cache.has_key('hello1'), True) self.assertIs(cache.has_key('goodbye1'), False) cache.set('no_expiry', 'here', None) self.assertIs(cache.has_key('no_expiry'), True) cache.set('null', None) self.assertIs( cache.has_key('null'), True if self.supports_get_with_default else False, ) def test_in(self): # The in operator can be used to inspect cache contents cache.set('hello2', 'goodbye2') self.assertIn('hello2', cache) self.assertNotIn('goodbye2', cache) cache.set('null', None) if self.supports_get_with_default: self.assertIn('null', cache) else: self.assertNotIn('null', cache) def test_incr(self): # Cache values can be incremented cache.set('answer', 41) self.assertEqual(cache.incr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.incr('answer', 10), 52) self.assertEqual(cache.get('answer'), 52) self.assertEqual(cache.incr('answer', -10), 42) with self.assertRaises(ValueError): cache.incr('does_not_exist') cache.set('null', None) with self.assertRaises(self.incr_decr_type_error): cache.incr('null') def test_decr(self): # Cache values can be decremented cache.set('answer', 43) self.assertEqual(cache.decr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.decr('answer', 10), 32) self.assertEqual(cache.get('answer'), 32) self.assertEqual(cache.decr('answer', -10), 42) with self.assertRaises(ValueError): cache.decr('does_not_exist') cache.set('null', None) with self.assertRaises(self.incr_decr_type_error): cache.decr('null') def test_close(self): self.assertTrue(hasattr(cache, 'close')) cache.close() def test_data_types(self): # Many different data types can be cached stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set('stuff', stuff) self.assertEqual(cache.get('stuff'), stuff) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() my_poll = Poll.objects.create(question='Well?') self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date cache.set('question', my_poll) cached_poll = cache.get('question') self.assertEqual(cached_poll.pub_date, pub_date) # We only want the default expensive calculation run once self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_write_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache write expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question='What?') self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) self.assertEqual(expensive_calculation.num_runs, 1) cache.set('deferred_queryset', defer_qs) # cache set should not re-evaluate default functions self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_read_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question='What?') self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) cache.set('deferred_queryset', defer_qs) self.assertEqual(expensive_calculation.num_runs, 1) runs_before_cache_read = expensive_calculation.num_runs cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set self.assertEqual( expensive_calculation.num_runs, runs_before_cache_read ) def test_expiration(self): # Cache values can be set to expire cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get('expire1')) self.assertIs(cache.add('expire2', 'newvalue'), True) self.assertEqual(cache.get('expire2'), 'newvalue') self.assertIs(cache.has_key('expire3'), False) def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) self.assertIs(cache.has_key('expire1'), True) time.sleep(3) self.assertIs(cache.has_key('expire1'), False) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) self.assertIs(cache.has_key('expire1'), True) self.assertIs(cache.touch('nonexistent'), False) def test_unicode(self): # Unicode values can be cached stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1}, } # Test `set` for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertEqual(cache.get(key), value) # Test `add` for (key, value) in stuff.items(): with self.subTest(key=key): self.assertIs(cache.delete(key), True) self.assertIs(cache.add(key, value), True) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): self.assertIs(cache.delete(key), True) cache.set_many(stuff) for (key, value) in stuff.items(): with self.subTest(key=key): self.assertEqual(cache.get(key), value) def test_binary_string(self): # Binary strings should be cacheable from zlib import compress, decompress value = 'value_to_be_compressed' compressed_value = compress(value.encode()) # Test set cache.set('binary1', compressed_value) compressed_result = cache.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test add self.assertIs(cache.add('binary1-add', compressed_value), True) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test set_many cache.set_many({'binary1-set_many': compressed_value}) compressed_result = cache.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) def test_set_many(self): # Multiple keys can be set using set_many cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(cache.get('key1'), 'spam') self.assertEqual(cache.get('key2'), 'eggs') def test_set_many_returns_empty_list_on_success(self): """set_many() returns an empty list when all keys are inserted.""" failing_keys = cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(failing_keys, []) def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter cache.set_many({'key1': 'spam', 'key2': 'eggs'}, 1) time.sleep(2) self.assertIsNone(cache.get('key1')) self.assertIsNone(cache.get('key2')) def test_delete_many(self): # Multiple keys can be deleted using delete_many cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'}) cache.delete_many(['key1', 'key2']) self.assertIsNone(cache.get('key1')) self.assertIsNone(cache.get('key2')) self.assertEqual(cache.get('key3'), 'ham') def test_clear(self): # The cache can be emptied using clear cache.set_many({'key1': 'spam', 'key2': 'eggs'}) cache.clear() self.assertIsNone(cache.get('key1')) self.assertIsNone(cache.get('key2')) def test_long_timeout(self): """ Follow memcached's convention where a timeout greater than 30 days is treated as an absolute expiration timestamp instead of a relative offset (#12399). """ cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True) self.assertEqual(cache.get('key2'), 'ham') cache.set_many( {'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1, ) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_forever_timeout(self): """ Passing in None into timeout results in a value that is cached forever """ cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') self.assertIs(cache.add('key2', 'ham', None), True) self.assertEqual(cache.get('key2'), 'ham') self.assertIs(cache.add('key1', 'new eggs', None), False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') cache.set('key5', 'belgian fries', timeout=1) self.assertIs(cache.touch('key5', timeout=None), True) time.sleep(2) self.assertEqual(cache.get('key5'), 'belgian fries') def test_zero_timeout(self): """ Passing in zero into timeout results in a value that is not cached """ cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) self.assertIs(cache.add('key2', 'ham', 0), True) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) self.assertIsNone(cache.get('key3')) self.assertIsNone(cache.get('key4')) cache.set('key5', 'belgian fries', timeout=5) self.assertIs(cache.touch('key5', timeout=0), True) self.assertIsNone(cache.get('key5')) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. cache.set('key1', 'spam', 100.2) self.assertEqual(cache.get('key1'), 'spam') def _perform_cull_test(self, cull_cache_name, initial_count, final_count): try: cull_cache = caches[cull_cache_name] except InvalidCacheBackendError: self.skipTest("Culling isn't implemented.") # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): cull_cache.set('cull%d' % i, 'value', 1000) count = 0 # Count how many keys are left in the cache. for i in range(1, initial_count): if cull_cache.has_key('cull%d' % i): count += 1 self.assertEqual(count, final_count) def test_cull(self): self._perform_cull_test('cull', 50, 29) def test_zero_cull(self): self._perform_cull_test('zero_cull', 50, 19) def test_cull_delete_when_store_empty(self): try: cull_cache = caches['cull'] except InvalidCacheBackendError: self.skipTest("Culling isn't implemented.") old_max_entries = cull_cache._max_entries # Force _cull to delete on first cached record. cull_cache._max_entries = -1 try: cull_cache.set('force_cull_delete', 'value', 1000) self.assertIs(cull_cache.has_key('force_cull_delete'), True) finally: cull_cache._max_entries = old_max_entries def _perform_invalid_key_test(self, key, expected_warning): """ All the builtin backends should warn (except memcached that should error) on keys that would be refused by memcached. This encourages portable caching code without making it too difficult to use production backends with more liberal key rules. Refs #6447. """ # mimic custom ``make_key`` method being defined since the default will # never show the below warnings def func(key, *args): return key old_func = cache.key_func cache.key_func = func tests = [ ('add', [key, 1]), ('get', [key]), ('set', [key, 1]), ('incr', [key]), ('decr', [key]), ('touch', [key]), ('delete', [key]), ('get_many', [[key, 'b']]), ('set_many', [{key: 1, 'b': 2}]), ('delete_many', [{key: 1, 'b': 2}]), ] try: for operation, args in tests: with self.subTest(operation=operation): with self.assertWarns(CacheKeyWarning) as cm: getattr(cache, operation)(*args) self.assertEqual(str(cm.warning), expected_warning) finally: cache.key_func = old_func def test_invalid_key_characters(self): # memcached doesn't allow whitespace or control characters in keys. key = 'key with spaces and 清' self._perform_invalid_key_test( key, KEY_ERRORS_WITH_MEMCACHED_MSG % key ) def test_invalid_key_length(self): # memcached limits key length to 250. key = ('a' * 250) + '清' expected_warning = ( 'Cache key will cause errors if used with memcached: ' '%r (longer than %s)' % (key, 250) ) self._perform_invalid_key_test(key, expected_warning) def test_cache_versioning_get_set(self): # set, using default version = 1 cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertEqual(cache.get('answer1', version=1), 42) self.assertIsNone(cache.get('answer1', version=2)) self.assertIsNone(caches['v2'].get('answer1')) self.assertEqual(caches['v2'].get('answer1', version=1), 42) self.assertIsNone(caches['v2'].get('answer1', version=2)) # set, default version = 1, but manually override version = 2 cache.set('answer2', 42, version=2) self.assertIsNone(cache.get('answer2')) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) # v2 set, using default version = 2 caches['v2'].set('answer3', 42) self.assertIsNone(cache.get('answer3')) self.assertIsNone(cache.get('answer3', version=1)) self.assertEqual(cache.get('answer3', version=2), 42) self.assertEqual(caches['v2'].get('answer3'), 42) self.assertIsNone(caches['v2'].get('answer3', version=1)) self.assertEqual(caches['v2'].get('answer3', version=2), 42) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set('answer4', 42, version=1) self.assertEqual(cache.get('answer4'), 42) self.assertEqual(cache.get('answer4', version=1), 42) self.assertIsNone(cache.get('answer4', version=2)) self.assertIsNone(caches['v2'].get('answer4')) self.assertEqual(caches['v2'].get('answer4', version=1), 42) self.assertIsNone(caches['v2'].get('answer4', version=2)) def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 self.assertIs(cache.add('answer1', 42, version=2), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) self.assertIs(cache.add('answer1', 37, version=2), False) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) self.assertIs(cache.add('answer1', 37, version=1), True) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 self.assertIs(caches['v2'].add('answer2', 42), True) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertIs(caches['v2'].add('answer2', 37), False) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertIs(caches['v2'].add('answer2', 37, version=1), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 self.assertIs(caches['v2'].add('answer3', 42, version=1), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) self.assertIs(caches['v2'].add('answer3', 37, version=1), False) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) self.assertIs(caches['v2'].add('answer3', 37), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key self.assertIs(cache.has_key('answer1'), True) self.assertIs(cache.has_key('answer1', version=1), True) self.assertIs(cache.has_key('answer1', version=2), False) self.assertIs(caches['v2'].has_key('answer1'), False) self.assertIs(caches['v2'].has_key('answer1', version=1), True) self.assertIs(caches['v2'].has_key('answer1', version=2), False) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) self.assertIs(cache.delete('answer1'), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) self.assertIs(cache.delete('answer2', version=2), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) self.assertIs(caches['v2'].delete('answer3'), True) self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) self.assertIs(caches['v2'].delete('answer4', version=1), True) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) self.assertEqual(cache.incr('answer1'), 38) self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) self.assertEqual(cache.decr('answer1'), 37) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) self.assertEqual(cache.incr('answer2', version=2), 43) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) self.assertEqual(cache.decr('answer2', version=2), 42) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) self.assertEqual(caches['v2'].incr('answer3'), 43) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) self.assertEqual(caches['v2'].decr('answer3'), 42) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) self.assertEqual(caches['v2'].incr('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) self.assertEqual(caches['v2'].decr('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 cache.set_many({'ford1': 37, 'arthur1': 42}) self.assertEqual( cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42} ) self.assertEqual( cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}, ) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) self.assertEqual( caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}, ) self.assertEqual( caches['v2'].get_many(['ford1', 'arthur1'], version=2), {} ) # set, default version = 1, but manually override version = 2 cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) self.assertEqual(cache.get_many(['ford2', 'arthur2']), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual( cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}, ) self.assertEqual( caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}, ) self.assertEqual( caches['v2'].get_many(['ford2', 'arthur2'], version=1), {} ) self.assertEqual( caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}, ) # v2 set, using default version = 2 caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) self.assertEqual(cache.get_many(['ford3', 'arthur3']), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual( cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}, ) self.assertEqual( caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}, ) self.assertEqual( caches['v2'].get_many(['ford3', 'arthur3'], version=1), {} ) self.assertEqual( caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}, ) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) self.assertEqual( cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42} ) self.assertEqual( cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}, ) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) self.assertEqual( caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}, ) self.assertEqual( caches['v2'].get_many(['ford4', 'arthur4'], version=2), {} ) def test_incr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertIsNone(cache.get('answer', version=3)) self.assertEqual(cache.incr_version('answer', version=2), 3) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertIsNone(cache.get('answer', version=2)) self.assertEqual(cache.get('answer', version=3), 42) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertIsNone(caches['v2'].get('answer2', version=3)) self.assertEqual(caches['v2'].incr_version('answer2'), 3) self.assertIsNone(caches['v2'].get('answer2')) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertEqual(caches['v2'].get('answer2', version=3), 42) with self.assertRaises(ValueError): cache.incr_version('does_not_exist') cache.set('null', None) if self.supports_get_with_default: self.assertEqual(cache.incr_version('null'), 2) else: with self.assertRaises(self.incr_decr_type_error): cache.incr_version('null') def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertEqual(cache.decr_version('answer', version=2), 1) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.get('answer', version=1), 42) self.assertIsNone(cache.get('answer', version=2)) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertEqual(caches['v2'].decr_version('answer2'), 1) self.assertIsNone(caches['v2'].get('answer2')) self.assertEqual(caches['v2'].get('answer2', version=1), 42) self.assertIsNone(caches['v2'].get('answer2', version=2)) with self.assertRaises(ValueError): cache.decr_version('does_not_exist', version=2) cache.set('null', None, version=2) if self.supports_get_with_default: self.assertEqual(cache.decr_version('null', version=2), 1) else: with self.assertRaises(self.incr_decr_type_error): cache.decr_version('null', version=2) def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertIsNone(caches['custom_key'].get('answer1')) self.assertIsNone(caches['custom_key2'].get('answer1')) caches['custom_key'].set('answer2', 42) self.assertIsNone(cache.get('answer2')) self.assertEqual(caches['custom_key'].get('answer2'), 42) self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpicklable_object(self): fetch_middleware = FetchFromCacheMiddleware(empty_response) fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware( empty_response ).process_request(request) self.assertIsNone(get_cache_data) content = 'Testing cookie serialization.' def get_response(req): response = HttpResponse(content) response.set_cookie('foo', 'bar') return response update_middleware = UpdateCacheMiddleware(get_response) update_middleware.cache = cache response = update_middleware(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) UpdateCacheMiddleware(lambda req: get_cache_data)(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) def test_add_fail_on_pickleerror(self): # Shouldn't fail silently if trying to cache an unpicklable type. with self.assertRaises(pickle.PickleError): cache.add('unpicklable', Unpicklable()) def test_set_fail_on_pickleerror(self): with self.assertRaises(pickle.PickleError): cache.set('unpicklable', Unpicklable()) def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) self.assertIsNone(cache.get_or_set('null', None)) if self.supports_get_with_default: # Previous get_or_set() stores None in the cache. self.assertIsNone(cache.get('null', 'default')) else: self.assertEqual(cache.get('null', 'default'), 'default') def test_get_or_set_callable(self): def my_callable(): return 'value' self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value') self.assertIsNone(cache.get_or_set('null', lambda: None)) if self.supports_get_with_default: # Previous get_or_set() stores None in the cache. self.assertIsNone(cache.get('null', 'default')) else: self.assertEqual(cache.get('null', 'default'), 'default') def test_get_or_set_version(self): msg = "get_or_set() missing 1 required positional argument: 'default'" self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian') with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian', version=1) self.assertIsNone(cache.get('brian', version=1)) self.assertEqual(cache.get_or_set('brian', 42, version=1), 42) self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) self.assertIsNone(cache.get('brian', version=3)) def test_get_or_set_racing(self): with mock.patch( '%s.%s' % (settings.CACHES['default']['BACKEND'], 'add') ) as cache_add: # Simulate cache.add() failing to add a value. In that case, the # default value should be returned. cache_add.return_value = False self.assertEqual(cache.get_or_set('key', 'default'), 'default') class PicklingSideEffect: def __init__(self, cache): self.cache = cache self.locked = False def __getstate__(self): self.locked = self.cache._lock.locked() return {} @override_settings( CACHES=caches_setting_for_tests( BACKEND='diskcache.DjangoCache', ) ) class DiskCacheTests(BaseCacheTests, TestCase): """Specific test cases for diskcache.DjangoCache.""" def setUp(self): super().setUp() self.dirname = tempfile.mkdtemp() # Caches location cannot be modified through override_settings / modify_settings, # hence settings are manipulated directly here and the setting_changed signal # is triggered manually. for cache_params in settings.CACHES.values(): cache_params.update({'LOCATION': self.dirname}) setting_changed.send(self.__class__, setting='CACHES', enter=False) def tearDown(self): super().tearDown() cache.close() shutil.rmtree(self.dirname, ignore_errors=True) def test_ignores_non_cache_files(self): fname = os.path.join(self.dirname, 'not-a-cache-file') with open(fname, 'w'): os.utime(fname, None) cache.clear() self.assertTrue( os.path.exists(fname), 'Expected cache.clear to ignore non cache files', ) os.remove(fname) def test_creates_cache_dir_if_nonexistent(self): os.rmdir(self.dirname) cache.set('foo', 'bar') self.assertTrue(os.path.exists(self.dirname)) def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue( os.path.exists(self.dirname), 'Expected cache.clear to keep the cache dir', ) def test_cache_write_unpicklable_type(self): # This fails if not using the highest pickling protocol on Python 2. cache.set('unpicklable', UnpicklableType()) def test_cull(self): cache.cull() def test_zero_cull(self): pass # DiskCache has its own cull strategy. def test_invalid_key_characters(self): pass # DiskCache supports any Pickle-able value as a cache key. def test_invalid_key_length(self): pass # DiskCache supports any Pickle-able value as a cache key. def test_directory(self): self.assertTrue('tmp' in cache.directory) def test_read(self): value = b'abcd' * 2 ** 20 result = cache.set(b'test-key', value) self.assertTrue(result) with cache.read(b'test-key') as reader: self.assertEqual(reader.read(), value) try: with cache.read(b'dne') as reader: error = False except KeyError: error = True self.assertTrue(error) def test_expire(self): cache.clear() cache.set(b'expire-key', 0, timeout=0.05) time.sleep(0.1) self.assertEqual(cache.expire(), 1) self.assertEqual(cache.get(b'expire-key'), None) def test_evict(self): cache.clear() for num in range(100): cache.set(num, num, tag=(num % 4)) self.assertEqual(cache.evict(1), 25) cache.create_tag_index() self.assertEqual(cache.evict(2), 25) cache.drop_tag_index() self.assertEqual(cache.evict(3), 25) for num in range(0, 100, 4): self.assertEqual(cache.get(num), num) def test_pop(self): cache.clear() for num in range(5): cache.set(num, num, timeout=None) self.assertEqual(cache.pop(0), 0) self.assertEqual(cache.pop(0), None) self.assertEqual(cache.pop(0, 1), 1) self.assertEqual(cache.pop(0, default=1), 1) self.assertEqual(cache.pop(1, expire_time=True), (1, None)) self.assertEqual(cache.pop(2, tag=True), (2, None)) self.assertEqual( cache.pop(3, expire_time=True, tag=True), (3, None, None) ) self.assertEqual(cache.pop(4, retry=False), 4) def test_cache(self): subcache = cache.cache('test') directory = os.path.join(cache.directory, 'cache', 'test') self.assertEqual(subcache.directory, directory) def test_deque(self): deque = cache.deque('test') directory = os.path.join(cache.directory, 'deque', 'test') self.assertEqual(deque.directory, directory) def test_index(self): index = cache.index('test') directory = os.path.join(cache.directory, 'index', 'test') self.assertEqual(index.directory, directory) def test_memoize(self): with self.assertRaises(TypeError): @cache.memoize # <-- Missing parens! def test(): pass count = 1000 def fibiter(num): alpha, beta = 0, 1 for _ in range(num): alpha, beta = beta, alpha + beta return alpha @cache.memoize() def fibrec(num): if num == 0: return 0 elif num == 1: return 1 else: return fibrec(num - 1) + fibrec(num - 2) cache.stats(enable=True) for value in range(count): self.assertEqual(fibrec(value), fibiter(value)) hits1, misses1 = cache.stats() for value in range(count): self.assertEqual(fibrec(value), fibiter(value)) hits2, misses2 = cache.stats() self.assertEqual(hits2, hits1 + count) self.assertEqual(misses2, misses1) python-diskcache-5.4.0/tests/test_doctest.py000066400000000000000000000013651416346170000212040ustar00rootroot00000000000000import doctest import diskcache.core import diskcache.djangocache import diskcache.fanout import diskcache.persistent import diskcache.recipes def test_core(): failures, _ = doctest.testmod(diskcache.core) assert failures == 0 def test_djangocache(): failures, _ = doctest.testmod(diskcache.djangocache) assert failures == 0 def test_fanout(): failures, _ = doctest.testmod(diskcache.fanout) assert failures == 0 def test_persistent(): failures, _ = doctest.testmod(diskcache.persistent) assert failures == 0 def test_recipes(): failures, _ = doctest.testmod(diskcache.recipes) assert failures == 0 def test_tutorial(): failures, _ = doctest.testfile('../docs/tutorial.rst') assert failures == 0 python-diskcache-5.4.0/tests/test_fanout.py000066400000000000000000000367541416346170000210450ustar00rootroot00000000000000"""Test diskcache.fanout.FanoutCache.""" import collections as co import hashlib import io import os import os.path as op import pickle import shutil import subprocess as sp import tempfile import threading import time import warnings from unittest import mock import pytest import diskcache as dc warnings.simplefilter('error') warnings.simplefilter('ignore', category=dc.EmptyDirWarning) @pytest.fixture def cache(): with dc.FanoutCache() as cache: yield cache shutil.rmtree(cache.directory, ignore_errors=True) def test_init(cache): default_settings = dc.DEFAULT_SETTINGS.copy() del default_settings['size_limit'] for key, value in default_settings.items(): assert getattr(cache, key) == value assert cache.size_limit == 2 ** 27 cache.check() for key, value in dc.DEFAULT_SETTINGS.items(): setattr(cache, key, value) cache.check() def test_set_get_delete(cache): for value in range(100): cache.set(value, value) cache.check() for value in range(100): assert cache.get(value) == value cache.check() for value in range(100): assert value in cache cache.check() for value in range(100): assert cache.delete(value) assert cache.delete(100) is False cache.check() for value in range(100): cache[value] = value cache.check() for value in range(100): assert cache[value] == value cache.check() cache.clear() assert len(cache) == 0 cache.check() def test_set_timeout(cache): shards = mock.Mock() shard = mock.Mock() set_func = mock.Mock() shards.__getitem__ = mock.Mock(side_effect=lambda key: shard) shard.set = set_func set_func.side_effect = dc.Timeout with mock.patch.object(cache, '_shards', shards): assert not cache.set(0, 0) def test_touch(cache): assert cache.set(0, None, expire=60) assert cache.touch(0, expire=None) assert cache.touch(0, expire=0) assert not cache.touch(0) def test_touch_timeout(cache): shards = mock.Mock() shard = mock.Mock() touch_func = mock.Mock() shards.__getitem__ = mock.Mock(side_effect=lambda key: shard) shard.touch = touch_func touch_func.side_effect = dc.Timeout with mock.patch.object(cache, '_shards', shards): assert not cache.touch(0) def test_add(cache): assert cache.add(0, 0) assert not cache.add(0, 1) assert cache.get(0) == 0 def test_add_timeout(cache): shards = mock.Mock() shard = mock.Mock() add_func = mock.Mock() shards.__getitem__ = mock.Mock(side_effect=lambda key: shard) shard.add = add_func add_func.side_effect = dc.Timeout with mock.patch.object(cache, '_shards', shards): assert not cache.add(0, 0) def stress_add(cache, limit, results): total = 0 for num in range(limit): if cache.add(num, num, retry=True): total += 1 # Stop one thread from running ahead of others. time.sleep(0.001) results.append(total) def test_add_concurrent(): with dc.FanoutCache(shards=1) as cache: results = co.deque() limit = 1000 threads = [ threading.Thread(target=stress_add, args=(cache, limit, results)) for _ in range(16) ] for thread in threads: thread.start() for thread in threads: thread.join() assert sum(results) == limit cache.check() shutil.rmtree(cache.directory, ignore_errors=True) def test_incr(cache): cache.incr('key', delta=3) == 3 def test_incr_timeout(cache): shards = mock.Mock() shard = mock.Mock() incr_func = mock.Mock() shards.__getitem__ = mock.Mock(side_effect=lambda key: shard) shard.incr = incr_func incr_func.side_effect = dc.Timeout with mock.patch.object(cache, '_shards', shards): assert cache.incr('key', 1) is None def test_decr(cache): cache.decr('key', delta=2) == -2 def test_decr_timeout(cache): shards = mock.Mock() shard = mock.Mock() decr_func = mock.Mock() shards.__getitem__ = mock.Mock(side_effect=lambda key: shard) shard.decr = decr_func decr_func.side_effect = dc.Timeout with mock.patch.object(cache, '_shards', shards): assert cache.decr('key', 1) is None def stress_incr(cache, limit): for _ in range(limit): cache.incr(b'key', retry=True) time.sleep(0.001) def test_incr_concurrent(): with dc.FanoutCache(shards=1, timeout=0.001) as cache: count = 16 limit = 50 threads = [ threading.Thread(target=stress_incr, args=(cache, limit)) for _ in range(count) ] for thread in threads: thread.start() for thread in threads: thread.join() assert cache.get(b'key') == count * limit cache.check() shutil.rmtree(cache.directory, ignore_errors=True) def test_getsetdel(cache): values = [ (None, False), ((None,) * 2 ** 10, False), (1234, False), (2 ** 512, False), (56.78, False), (u'hello', False), (u'hello' * 2 ** 10, False), (b'world', False), (b'world' * 2 ** 10, False), (io.BytesIO(b'world' * 2 ** 10), True), ] for key, (value, file_like) in enumerate(values): assert cache.set(key, value, read=file_like) assert len(cache) == len(values) for key, (value, file_like) in enumerate(values): if file_like: assert cache[key] == value.getvalue() else: assert cache[key] == value for key, _ in enumerate(values): del cache[key] assert len(cache) == 0 for value, (key, _) in enumerate(values): cache[key] = value assert len(cache) == len(values) for value, (key, _) in enumerate(values): assert cache[key] == value for _, (key, _) in enumerate(values): del cache[key] assert len(cache) == 0 cache.check() def test_get_timeout(cache): cache.set(0, 0) shards = mock.Mock() shard = mock.Mock() get_func = mock.Mock() shards.__getitem__ = mock.Mock(side_effect=lambda key: shard) shard.get = get_func get_func.side_effect = dc.Timeout with mock.patch.object(cache, '_shards', shards): assert cache.get(0) is None def test_pop(cache): for num in range(100): cache[num] = num for num in range(100): assert cache.pop(num) == num def test_pop_timeout(cache): shards = mock.Mock() shard = mock.Mock() pop_func = mock.Mock() shards.__getitem__ = mock.Mock(side_effect=lambda key: shard) shard.pop = pop_func pop_func.side_effect = dc.Timeout with mock.patch.object(cache, '_shards', shards): assert cache.pop(0) is None def test_delete_timeout(cache): shards = mock.Mock() shard = mock.Mock() delete_func = mock.Mock() shards.__getitem__ = mock.Mock(side_effect=lambda key: shard) shard.delete = delete_func delete_func.side_effect = dc.Timeout with mock.patch.object(cache, '_shards', shards): assert not cache.delete(0) def test_delitem(cache): cache[0] = 0 assert cache[0] == 0 del cache[0] def test_delitem_keyerror(cache): with pytest.raises(KeyError): del cache[0] def test_tag_index(cache): assert cache.tag_index == 0 cache.create_tag_index() assert cache.tag_index == 1 cache.drop_tag_index() assert cache.tag_index == 0 def test_read(cache): cache.set(0, b'abcd' * 2 ** 20) with cache.read(0) as reader: assert reader is not None def test_read_keyerror(cache): with pytest.raises(KeyError): with cache.read(0): pass def test_getitem_keyerror(cache): with pytest.raises(KeyError): cache[0] def test_expire(cache): cache.reset('cull_limit', 0) for value in range(100): cache.set(value, value, expire=1e-9) assert len(cache) == 100 time.sleep(0.01) cache.reset('cull_limit', 10) assert cache.expire() == 100 def test_evict(cache): colors = ('red', 'blue', 'yellow') for value in range(90): assert cache.set(value, value, tag=colors[value % len(colors)]) assert len(cache) == 90 assert cache.evict('red') == 30 assert len(cache) == 60 assert len(cache.check()) == 0 def test_size_limit_with_files(cache): shards = 8 cache.reset('cull_limit', 0) size_limit = 30 * cache.disk_min_file_size cache.reset('size_limit', size_limit) value = b'foo' * cache.disk_min_file_size for key in range(40 * shards): cache.set(key, value) assert (cache.volume() // shards) > size_limit cache.cull() assert (cache.volume() // shards) <= size_limit def test_size_limit_with_database(cache): shards = 8 cache.reset('cull_limit', 0) size_limit = 2 * cache.disk_min_file_size cache.reset('size_limit', size_limit) value = b'0123456789' * 10 count = size_limit // (8 + len(value)) * shards for key in range(count): cache.set(key, value) assert (cache.volume() // shards) > size_limit cache.cull() assert (cache.volume() // shards) <= size_limit def test_clear(cache): for value in range(100): cache[value] = value assert len(cache) == 100 assert cache.clear() == 100 assert len(cache) == 0 assert len(cache.check()) == 0 def test_remove_timeout(cache): shard = mock.Mock() clear = mock.Mock() shard.clear = clear clear.side_effect = [dc.Timeout(2), 3] with mock.patch.object(cache, '_shards', [shard]): assert cache.clear() == 5 def test_reset_timeout(cache): shard = mock.Mock() reset = mock.Mock() shard.reset = reset reset.side_effect = [dc.Timeout, 0] with mock.patch.object(cache, '_shards', [shard]): assert cache.reset('blah', 1) == 0 def test_stats(cache): for value in range(100): cache[value] = value assert cache.stats(enable=True) == (0, 0) for value in range(100): cache[value] for value in range(100, 110): cache.get(value) assert cache.stats(reset=True) == (100, 10) assert cache.stats(enable=False) == (0, 0) for value in range(100): cache[value] for value in range(100, 110): cache.get(value) assert cache.stats() == (0, 0) assert len(cache.check()) == 0 def test_volume(cache): volume = sum(shard.volume() for shard in cache._shards) assert volume == cache.volume() def test_iter(cache): for num in range(100): cache[num] = num assert set(cache) == set(range(100)) def test_iter_expire(cache): """Test iteration with expiration. Iteration does not expire keys. """ cache.reset('cull_limit', 0) for num in range(100): cache.set(num, num, expire=1e-9) time.sleep(0.1) assert set(cache) == set(range(100)) cache.expire() assert set(cache) == set() def test_reversed(cache): for num in range(100): cache[num] = num reverse = list(reversed(cache)) assert list(cache) == list(reversed(reverse)) def test_pickle(cache): for num, val in enumerate('abcde'): cache[val] = num data = pickle.dumps(cache) other = pickle.loads(data) for key in other: assert other[key] == cache[key] def test_memoize(cache): count = 1000 def fibiter(num): alpha, beta = 0, 1 for _ in range(num): alpha, beta = beta, alpha + beta return alpha @cache.memoize(name='fib') def fibrec(num): if num == 0: return 0 elif num == 1: return 1 else: return fibrec(num - 1) + fibrec(num - 2) cache.stats(enable=True) for value in range(count): assert fibrec(value) == fibiter(value) hits1, misses1 = cache.stats() for value in range(count): assert fibrec(value) == fibiter(value) hits2, misses2 = cache.stats() assert hits2 == hits1 + count assert misses2 == misses1 def test_copy(): cache_dir1 = tempfile.mkdtemp() with dc.FanoutCache(cache_dir1) as cache1: for count in range(10): cache1[count] = str(count) for count in range(10, 20): cache1[count] = str(count) * int(1e5) cache_dir2 = tempfile.mkdtemp() shutil.rmtree(cache_dir2) shutil.copytree(cache_dir1, cache_dir2) with dc.FanoutCache(cache_dir2) as cache2: for count in range(10): assert cache2[count] == str(count) for count in range(10, 20): assert cache2[count] == str(count) * int(1e5) shutil.rmtree(cache_dir1, ignore_errors=True) shutil.rmtree(cache_dir2, ignore_errors=True) def run(command): print('run$ %r' % command) try: result = sp.check_output(command, stderr=sp.STDOUT) print(result) except sp.CalledProcessError as exc: print(exc.output) raise def test_rsync(): try: run(['rsync', '--version']) except OSError: return # No rsync installed. Skip test. rsync_args = ['rsync', '-a', '--checksum', '--delete', '--stats'] cache_dir1 = tempfile.mkdtemp() + os.sep cache_dir2 = tempfile.mkdtemp() + os.sep # Store some items in cache_dir1. with dc.FanoutCache(cache_dir1) as cache1: for count in range(100): cache1[count] = str(count) for count in range(100, 200): cache1[count] = str(count) * int(1e5) # Rsync cache_dir1 to cache_dir2. run(rsync_args + [cache_dir1, cache_dir2]) # Validate items in cache_dir2. with dc.FanoutCache(cache_dir2) as cache2: for count in range(100): assert cache2[count] == str(count) for count in range(100, 200): assert cache2[count] == str(count) * int(1e5) # Store more items in cache_dir2. with dc.FanoutCache(cache_dir2) as cache2: for count in range(200, 300): cache2[count] = str(count) for count in range(300, 400): cache2[count] = str(count) * int(1e5) # Rsync cache_dir2 to cache_dir1. run(rsync_args + [cache_dir2, cache_dir1]) # Validate items in cache_dir1. with dc.FanoutCache(cache_dir1) as cache1: for count in range(100): assert cache1[count] == str(count) for count in range(100, 200): assert cache1[count] == str(count) * int(1e5) for count in range(200, 300): assert cache1[count] == str(count) for count in range(300, 400): assert cache1[count] == str(count) * int(1e5) shutil.rmtree(cache_dir1, ignore_errors=True) shutil.rmtree(cache_dir2, ignore_errors=True) class SHA256FilenameDisk(dc.Disk): def filename(self, key=dc.UNKNOWN, value=dc.UNKNOWN): filename = hashlib.sha256(key).hexdigest()[:32] full_path = op.join(self._directory, filename) return filename, full_path def test_custom_filename_disk(): with dc.FanoutCache(disk=SHA256FilenameDisk) as cache: for count in range(100, 200): key = str(count).encode('ascii') cache[key] = str(count) * int(1e5) disk = SHA256FilenameDisk(cache.directory) for count in range(100, 200): key = str(count).encode('ascii') subdir = '%03d' % (disk.hash(key) % 8) filename = hashlib.sha256(key).hexdigest()[:32] full_path = op.join(cache.directory, subdir, filename) with open(full_path) as reader: content = reader.read() assert content == str(count) * int(1e5) shutil.rmtree(cache.directory, ignore_errors=True) python-diskcache-5.4.0/tests/test_index.py000066400000000000000000000070201416346170000206400ustar00rootroot00000000000000"""Test diskcache.persistent.Index.""" import pickle import shutil import tempfile import pytest import diskcache as dc def rmdir(directory): try: shutil.rmtree(directory) except OSError: pass @pytest.fixture def index(): index = dc.Index() yield index rmdir(index.directory) def test_init(): directory = tempfile.mkdtemp() mapping = {'a': 5, 'b': 4, 'c': 3, 'd': 2, 'e': 1} index = dc.Index(None, mapping) assert index == mapping rmdir(index.directory) del index rmdir(directory) index = dc.Index(directory, mapping) assert index.directory == directory assert index == mapping other = dc.Index(directory) assert other == index del index del other rmdir(directory) index = dc.Index(directory, mapping.items()) assert index == mapping del index rmdir(directory) index = dc.Index(directory, a=5, b=4, c=3, d=2, e=1) assert index == mapping def test_getsetdel(index): letters = 'abcde' assert len(index) == 0 for num, key in enumerate(letters): index[key] = num for num, key in enumerate(letters): assert index[key] == num for key in letters: del index[key] assert len(index) == 0 def test_pop(index): letters = 'abcde' assert len(index) == 0 for num, key in enumerate(letters): index[key] = num assert index.pop('a') == 0 assert index.pop('c') == 2 assert index.pop('e') == 4 assert index.pop('b') == 1 assert index.pop('d') == 3 assert len(index) == 0 def test_pop_keyerror(index): with pytest.raises(KeyError): index.pop('a') def test_popitem(index): letters = 'abcde' for num, key in enumerate(letters): index[key] = num assert index.popitem() == ('e', 4) assert index.popitem(last=True) == ('d', 3) assert index.popitem(last=False) == ('a', 0) assert len(index) == 2 def test_popitem_keyerror(index): with pytest.raises(KeyError): index.popitem() def test_setdefault(index): assert index.setdefault('a', 0) == 0 assert index.setdefault('a', 1) == 0 def test_iter(index): letters = 'abcde' for num, key in enumerate(letters): index[key] = num for num, key in enumerate(index): assert index[key] == num def test_reversed(index): letters = 'abcde' for num, key in enumerate(letters): index[key] = num for num, key in enumerate(reversed(index)): assert index[key] == (len(letters) - num - 1) def test_state(index): mapping = {'a': 5, 'b': 4, 'c': 3, 'd': 2, 'e': 1} index.update(mapping) assert index == mapping state = pickle.dumps(index) values = pickle.loads(state) assert values == mapping def test_memoize(index): count = 1000 def fibiter(num): alpha, beta = 0, 1 for _ in range(num): alpha, beta = beta, alpha + beta return alpha @index.memoize() def fibrec(num): if num == 0: return 0 elif num == 1: return 1 else: return fibrec(num - 1) + fibrec(num - 2) index._cache.stats(enable=True) for value in range(count): assert fibrec(value) == fibiter(value) hits1, misses1 = index._cache.stats() for value in range(count): assert fibrec(value) == fibiter(value) hits2, misses2 = index._cache.stats() assert hits2 == (hits1 + count) assert misses2 == misses1 def test_repr(index): assert repr(index).startswith('Index(') python-diskcache-5.4.0/tests/test_recipes.py000066400000000000000000000042411416346170000211650ustar00rootroot00000000000000"""Test diskcache.recipes.""" import shutil import threading import time import pytest import diskcache as dc @pytest.fixture def cache(): with dc.Cache() as cache: yield cache shutil.rmtree(cache.directory, ignore_errors=True) def test_averager(cache): nums = dc.Averager(cache, 'nums') for i in range(10): nums.add(i) assert nums.get() == 4.5 assert nums.pop() == 4.5 for i in range(20): nums.add(i) assert nums.get() == 9.5 assert nums.pop() == 9.5 def test_lock(cache): state = {'num': 0} lock = dc.Lock(cache, 'demo') def worker(): state['num'] += 1 with lock: assert lock.locked() state['num'] += 1 time.sleep(0.1) with lock: thread = threading.Thread(target=worker) thread.start() time.sleep(0.1) assert state['num'] == 1 thread.join() assert state['num'] == 2 def test_rlock(cache): state = {'num': 0} rlock = dc.RLock(cache, 'demo') def worker(): state['num'] += 1 with rlock: with rlock: state['num'] += 1 time.sleep(0.1) with rlock: thread = threading.Thread(target=worker) thread.start() time.sleep(0.1) assert state['num'] == 1 thread.join() assert state['num'] == 2 def test_semaphore(cache): state = {'num': 0} semaphore = dc.BoundedSemaphore(cache, 'demo', value=3) def worker(): state['num'] += 1 with semaphore: state['num'] += 1 time.sleep(0.1) semaphore.acquire() semaphore.acquire() with semaphore: thread = threading.Thread(target=worker) thread.start() time.sleep(0.1) assert state['num'] == 1 thread.join() assert state['num'] == 2 semaphore.release() semaphore.release() def test_memoize_stampede(cache): state = {'num': 0} @dc.memoize_stampede(cache, 0.1) def worker(num): time.sleep(0.01) state['num'] += 1 return num start = time.time() while (time.time() - start) < 1: worker(100) assert state['num'] > 0 python-diskcache-5.4.0/tests/timings_core_p1.txt000066400000000000000000000073251416346170000217530ustar00rootroot00000000000000 ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.Cache ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 89115 8714 19.073us 25.749us 32.902us 115.395us 1.800s set 8941 0 114.918us 137.091us 241.041us 4.946ms 1.242s delete 943 111 87.976us 149.202us 219.824us 4.795ms 120.738ms Total 98999 3.163s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.FanoutCache(shards=4, timeout=1.0) ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 89115 8714 21.935us 27.180us 36.001us 129.938us 2.028s set 8941 0 118.017us 170.946us 270.844us 5.129ms 1.307s delete 943 111 91.791us 153.780us 231.981us 4.883ms 119.732ms Total 98999 3.455s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.FanoutCache(shards=8, timeout=0.010) ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 89115 8714 20.981us 27.180us 35.286us 128.031us 2.023s set 8941 0 116.825us 175.953us 269.175us 5.248ms 1.367s delete 943 111 91.791us 158.787us 235.345us 4.634ms 106.991ms Total 98999 3.496s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for pylibmc.Client ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 89115 8714 42.915us 62.227us 79.155us 166.178us 3.826s set 8941 0 44.107us 63.896us 82.254us 121.832us 396.247ms delete 943 111 41.962us 60.797us 75.817us 92.983us 39.570ms Total 98999 4.262s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for redis.StrictRedis ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 89115 8714 86.069us 101.089us 144.005us 805.140us 7.722s set 8941 0 89.169us 104.189us 146.866us 408.173us 800.963ms delete 943 111 86.069us 99.182us 149.012us 327.826us 80.976ms Total 98999 8.604s ========= ========= ========= ========= ========= ========= ========= ========= python-diskcache-5.4.0/tests/timings_core_p8.txt000066400000000000000000000073171416346170000217630ustar00rootroot00000000000000 ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.Cache ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712612 69147 20.027us 28.133us 45.061us 2.792ms 15.838s set 71464 0 129.700us 1.388ms 35.831ms 1.342s 160.708s delete 7916 769 97.036us 1.340ms 21.605ms 837.003ms 13.551s Total 791992 194.943s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.FanoutCache(shards=4, timeout=1.0) ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712612 70432 27.895us 48.876us 77.963us 12.945ms 25.443s set 71464 0 176.907us 1.416ms 9.385ms 183.997ms 65.606s delete 7916 747 132.084us 1.354ms 9.272ms 86.189ms 6.576s Total 791992 98.248s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache.FanoutCache(shards=8, timeout=0.010) ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712612 69622 41.962us 71.049us 96.083us 16.896ms 36.145s set 71464 39 257.969us 1.456ms 7.132ms 19.774ms 46.160s delete 7916 773 190.020us 1.377ms 5.927ms 12.939ms 4.442s Total 791992 86.799s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for pylibmc.Client ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712612 70517 95.844us 113.010us 131.130us 604.153us 69.024s set 71464 0 97.036us 114.918us 136.137us 608.921us 7.024s delete 7916 817 94.891us 112.057us 132.084us 604.153us 760.844ms Total 791992 76.809s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for redis.StrictRedis ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712612 70540 187.874us 244.141us 305.891us 1.416ms 138.516s set 71464 0 192.881us 249.147us 311.136us 1.363ms 14.246s delete 7916 825 185.966us 242.949us 305.176us 519.276us 1.525s Total 791992 154.287s ========= ========= ========= ========= ========= ========= ========= ========= python-diskcache-5.4.0/tests/timings_djangocache.txt000066400000000000000000000071561416346170000226530ustar00rootroot00000000000000 ========= ========= ========= ========= ========= ========= ========= ========= Timings for locmem ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712770 141094 34.809us 47.922us 55.075us 15.140ms 26.159s set 71249 0 38.862us 41.008us 59.843us 8.094ms 2.725s delete 7973 0 32.902us 35.048us 51.260us 2.963ms 257.951ms Total 791992 29.142s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for memcached ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712770 71873 102.043us 118.017us 182.867us 2.054ms 73.453s set 71249 0 104.904us 123.978us 182.152us 836.849us 7.592s delete 7973 0 98.944us 114.918us 176.191us 473.261us 795.398ms Total 791992 81.841s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for redis ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712770 71694 214.100us 267.982us 358.820us 1.556ms 155.709s set 71249 0 230.789us 284.195us 377.178us 1.462ms 16.764s delete 7973 790 195.742us 251.770us 345.945us 1.105ms 1.596s Total 791992 174.069s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for diskcache ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712770 70909 55.075us 82.016us 106.096us 36.816ms 44.088s set 71249 0 303.984us 1.489ms 6.499ms 39.687ms 49.088s delete 7973 0 228.882us 1.409ms 5.769ms 24.750ms 4.755s Total 791992 98.465s ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= ========= Timings for filebased ------------------------------------------------------------------------------- Action Count Miss Median P90 P99 Max Total ========= ========= ========= ========= ========= ========= ========= ========= get 712792 112290 114.918us 161.171us 444.889us 61.068ms 94.438s set 71268 0 11.289ms 13.278ms 16.653ms 108.282ms 809.448s delete 7977 0 432.014us 675.917us 5.785ms 55.249ms 3.652s Total 791992 907.537s ========= ========= ========= ========= ========= ========= ========= ========= python-diskcache-5.4.0/tests/timings_glob.txt000066400000000000000000000004661416346170000213450ustar00rootroot00000000000000 ============ ============ Timings for glob.glob1 ------------------------- Count Time ============ ============ 1 1.602ms 10 2.213ms 100 8.946ms 1000 65.869ms 10000 604.972ms 100000 6.450s ============ ============ python-diskcache-5.4.0/tests/utils.py000066400000000000000000000044711416346170000176410ustar00rootroot00000000000000import os import subprocess as sp def percentile(sequence, percent): if not sequence: return None values = sorted(sequence) if percent == 0: return values[0] pos = int(len(values) * percent) - 1 return values[pos] def secs(value): units = ['s ', 'ms', 'us', 'ns'] if value is None: return ' 0.000ns' elif value == 0: return ' 0.000ns' else: for unit in units: if value > 1: return '%7.3f' % value + unit else: value *= 1000 def run(*args): """Run command, print output, and return output.""" print('utils$', *args) result = sp.check_output(args) print(result) return result.strip() def mount_ramdisk(size, path): """Mount RAM disk at `path` with `size` in bytes.""" sectors = size / 512 os.makedirs(path) dev_path = run('hdid', '-nomount', 'ram://%d' % sectors) run('newfs_hfs', '-v', 'RAMdisk', dev_path) run('mount', '-o', 'noatime', '-t', 'hfs', dev_path, path) return dev_path def unmount_ramdisk(dev_path, path): """Unmount RAM disk with `dev_path` and `path`.""" run('umount', path) run('diskutil', 'eject', dev_path) run('rm', '-r', path) def display(name, timings): cols = ('Action', 'Count', 'Miss', 'Median', 'P90', 'P99', 'Max', 'Total') template = ' '.join(['%9s'] * len(cols)) print() print(' '.join(['=' * 9] * len(cols))) print('Timings for %s' % name) print('-'.join(['-' * 9] * len(cols))) print(template % cols) print(' '.join(['=' * 9] * len(cols))) len_total = sum_total = 0 for action in ['get', 'set', 'delete']: values = timings[action] len_total += len(values) sum_total += sum(values) print( template % ( action, len(values), len(timings.get(action + '-miss', [])), secs(percentile(values, 0.5)), secs(percentile(values, 0.9)), secs(percentile(values, 0.99)), secs(percentile(values, 1.0)), secs(sum(values)), ) ) totals = ('Total', len_total, '', '', '', '', '', secs(sum_total)) print(template % totals) print(' '.join(['=' * 9] * len(cols))) print() python-diskcache-5.4.0/tox.ini000066400000000000000000000041361416346170000162760ustar00rootroot00000000000000[tox] envlist=bluecheck,doc8,docs,isortcheck,flake8,mypy,pylint,rstcheck,py36,py37,py38,py39 skip_missing_interpreters=True [testenv] commands=pytest deps= django==3.2.* pytest pytest-cov pytest-django pytest-xdist setenv= DJANGO_SETTINGS_MODULE=tests.settings PYTHONPATH={toxinidir} [testenv:blue] commands=blue {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests deps=blue [testenv:bluecheck] commands=blue --check {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests deps=blue [testenv:doc8] deps=doc8 commands=doc8 docs --ignore-path docs/_build [testenv:docs] allowlist_externals=make changedir=docs commands=make html deps= django==3.2.* sphinx [testenv:flake8] commands=flake8 {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests deps=flake8 [testenv:isort] commands=isort {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests deps=isort [testenv:isortcheck] commands=isort --check {toxinidir}/setup.py {toxinidir}/diskcache {toxinidir}/tests deps=isort [testenv:mypy] commands=mypy {toxinidir}/diskcache deps=mypy [testenv:pylint] commands=pylint {toxinidir}/diskcache deps= django==3.2.* pylint [testenv:rstcheck] commands=rstcheck {toxinidir}/README.rst deps=rstcheck [testenv:uploaddocs] allowlist_externals=rsync changedir=docs commands= rsync -azP --stats --delete _build/html/ \ grantjenks.com:/srv/www/www.grantjenks.com/public/docs/diskcache/ [isort] multi_line_output = 3 include_trailing_comma = True force_grid_wrap = 0 use_parentheses = True ensure_newline_before_comments = True line_length = 79 [pytest] addopts= -n auto --cov-branch --cov-fail-under=98 --cov-report=term-missing --cov=diskcache --doctest-glob="*.rst" --ignore docs/case-study-web-crawler.rst --ignore docs/sf-python-2017-meetup-talk.rst --ignore tests/benchmark_core.py --ignore tests/benchmark_djangocache.py --ignore tests/benchmark_glob.py --ignore tests/issue_85.py --ignore tests/plot.py [doc8] # ignore=D000 [flake8] exclude=tests/test_djangocache.py extend-ignore=E203 max-line-length=120