requests-cache-0.4.13/0000775000175000017500000000000013027170047017126 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/example.py0000664000175000017500000000134713027167133021142 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- import time import requests import requests_cache requests_cache.install_cache('example_cache') def main(): # Once cached, delayed page will be taken from cache # redirects also handled for i in range(5): requests.get('http://httpbin.org/delay/2') r = requests.get('http://httpbin.org/redirect/5') print(r.text) # And if we need to get fresh page or don't want to cache it? with requests_cache.disabled(): print(requests.get('http://httpbin.org/ip').text) # Debugging info about cache print(requests_cache.get_cache()) if __name__ == "__main__": t = time.time() main() print('Elapsed: %.3f seconds' % (time.time() - t))requests-cache-0.4.13/setup.py0000664000175000017500000000251413027167767020661 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- import sys import os import glob try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'test': os.chdir('tests') for test in glob.glob('*.py'): os.system('python %s' % test) sys.exit() setup( name='requests-cache', packages=['requests_cache', 'requests_cache.backends', 'requests_cache.backends.storage'], version='0.4.13', description='Persistent cache for requests library', author='Roman Haritonov', author_email='reclosedev@gmail.com', url='https://github.com/reclosedev/requests-cache', install_requires=['requests>=1.1.0'], keywords=['requests', 'cache', 'persistence'], license='BSD License', include_package_data=True, classifiers=[ 'License :: OSI Approved :: BSD License', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', ], long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(), ) requests-cache-0.4.13/MANIFEST.in0000664000175000017500000000036713027167133020674 0ustar rharitonovrharitonov00000000000000include LICENSE README.rst HISTORY.rst CONTRIBUTORS.rst include *.py recursive-include tests *.py recursive-include examples *.py recursive-include docs * recursive-exclude docs *.pyc recursive-exclude docs *.pyo prune docs/_build prune temp/* requests-cache-0.4.13/requests_cache.egg-info/0000775000175000017500000000000013027170047023616 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/requests_cache.egg-info/requires.txt0000664000175000017500000000001713027170047026214 0ustar rharitonovrharitonov00000000000000requests>=1.1.0requests-cache-0.4.13/requests_cache.egg-info/top_level.txt0000664000175000017500000000001713027170047026346 0ustar rharitonovrharitonov00000000000000requests_cache requests-cache-0.4.13/requests_cache.egg-info/dependency_links.txt0000664000175000017500000000000113027170047027664 0ustar rharitonovrharitonov00000000000000 requests-cache-0.4.13/requests_cache.egg-info/SOURCES.txt0000664000175000017500000000200413027170047025476 0ustar rharitonovrharitonov00000000000000CONTRIBUTORS.rst HISTORY.rst LICENSE MANIFEST.in README.rst example.py sandbox.py setup.cfg setup.py docs/Makefile docs/api.rst docs/conf.py docs/index.rst docs/make.bat docs/user_guide.rst docs/_static/.empty docs/_templates/.empty requests_cache/__init__.py requests_cache/compat.py requests_cache/core.py requests_cache.egg-info/PKG-INFO requests_cache.egg-info/SOURCES.txt requests_cache.egg-info/dependency_links.txt requests_cache.egg-info/requires.txt requests_cache.egg-info/top_level.txt requests_cache/backends/__init__.py requests_cache/backends/base.py requests_cache/backends/mongo.py requests_cache/backends/redis.py requests_cache/backends/sqlite.py requests_cache/backends/storage/__init__.py requests_cache/backends/storage/dbdict.py requests_cache/backends/storage/mongodict.py requests_cache/backends/storage/redisdict.py tests/__init__.py tests/test_cache.py tests/test_custom_dict.py tests/test_dbdict.py tests/test_mongodict.py tests/test_monkey_patch.py tests/test_redisdict.py tests/test_thread_safety.pyrequests-cache-0.4.13/requests_cache.egg-info/PKG-INFO0000664000175000017500000001562013027170047024717 0ustar rharitonovrharitonov00000000000000Metadata-Version: 1.1 Name: requests-cache Version: 0.4.13 Summary: Persistent cache for requests library Home-page: https://github.com/reclosedev/requests-cache Author: Roman Haritonov Author-email: reclosedev@gmail.com License: BSD License Description: requests-cache --------------- Requests-cache is a transparent persistent cache for requests_ (version >= 1.1.0) library. .. _requests: http://python-requests.org/ .. image:: https://travis-ci.org/reclosedev/requests-cache.svg?branch=master :target: https://travis-ci.org/reclosedev/requests-cache .. image:: https://coveralls.io/repos/reclosedev/requests-cache/badge.svg?branch=master&service=github :target: https://coveralls.io/github/reclosedev/requests-cache?branch=master Usage example ------------- Just write: .. code-block:: python import requests import requests_cache requests_cache.install_cache('demo_cache') And all responses with headers and cookies will be transparently cached to `demo_cache.sqlite` database. For example, following code will take only 1-2 seconds instead of 10, and will run instantly on next launch: .. code-block:: python for i in range(10): requests.get('http://httpbin.org/delay/1') It can be useful when you are creating some simple data scraper with constantly changing parsing logic or data format, and don't want to redownload pages or write complex error handling and persistence. Note ---- ``requests-cache`` ignores all cache headers, it just caches the data for the time you specify. If you need library which knows how to use HTTP headers and status codes, take a look at `httpcache `_ and `CacheControl `_. Links ----- - **Documentation** at `readthedocs.org `_ - **Source code and issue tracking** at `GitHub `_. - **Working example** at `Real Python `_. .. :changelog: History ------- 0.4.13 (2016-12-23) +++++++++++++++++++ * Support PyMongo3, thanks to @craigls #72 * Fix streaming releate issue #68 0.4.12 (2016-03-19) +++++++++++++++++++ * Fix ability to pass backend instance in ``install_cache`` #61 0.4.11 (2016-03-07) +++++++++++++++++++ * ``ignore_parameters`` feature, thanks to @themiurgo and @YetAnotherNerd (#52, #55) * More informative message for missing backend dependencies, thanks to @Garrett-R (#60) 0.4.10 (2015-04-28) +++++++++++++++++++ * Better transactional handling in sqlite #50, thanks to @rgant * Compatibility with streaming in requests >= 2.6.x 0.4.9 (2015-01-17) ++++++++++++++++++ * ``expire_after`` now also accepts ``timedelta``, thanks to @femtotrader * Added Ability to include headers to cache key (``include_get_headers`` option) * Added string representation for ``CachedSession`` 0.4.8 (2014-12-13) ++++++++++++++++++ * Fix bug in reading cached streaming response 0.4.7 (2014-12-06) ++++++++++++++++++ * Fix compatibility with Requests > 2.4.1 (json arg, response history) 0.4.6 (2014-10-13) ++++++++++++++++++ * Monkey patch now uses class instead lambda (compatibility with rauth) * Normalize (sort) parameters passed as builtin dict 0.4.5 (2014-08-22) ++++++++++++++++++ * Requests==2.3.0 compatibility, thanks to @gwillem 0.4.4 (2013-10-31) ++++++++++++++++++ * Check for backend availability in install_cache(), not at the first request * Default storage fallbacks to memory if ``sqlite`` is not available 0.4.3 (2013-09-12) ++++++++++++++++++ * Fix ``response.from_cache`` not set in hooks 0.4.2 (2013-08-25) ++++++++++++++++++ * Fix ``UnpickleableError`` for gzip responses 0.4.1 (2013-08-19) ++++++++++++++++++ * ``requests_cache.enabled()`` context manager * Compatibility with Requests 1.2.3 cookies handling 0.4.0 (2013-04-25) ++++++++++++++++++ * Redis backend. Thanks to @michaelbeaumont * Fix for changes in Requests 1.2.0 hooks dispatching 0.3.0 (2013-02-24) ++++++++++++++++++ * Support for ``Requests`` 1.x.x * ``CachedSession`` * Many backward incompatible changes 0.2.1 (2013-01-13) ++++++++++++++++++ * Fix broken PyPi package 0.2.0 (2013-01-12) ++++++++++++++++++ * Last backward compatible version for ``Requests`` 0.14.2 0.1.3 (2012-05-04) ++++++++++++++++++ * Thread safety for default ``sqlite`` backend * Take into account the POST parameters when cache is configured with 'POST' in ``allowable_methods`` 0.1.2 (2012-05-02) ++++++++++++++++++ * Reduce number of ``sqlite`` database write operations * ``fast_save`` option for ``sqlite`` backend 0.1.1 (2012-04-11) ++++++++++++++++++ * Fix: restore responses from response.history * Internal refactoring (``MemoryCache`` -> ``BaseCache``, ``reduce_response`` and ``restore_response`` moved to ``BaseCache``) * ``connection`` option for ``MongoCache`` 0.1.0 (2012-04-10) ++++++++++++++++++ * initial PyPI release Keywords: requests,cache,persistence Platform: UNKNOWN Classifier: License :: OSI Approved :: BSD License Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 requests-cache-0.4.13/setup.cfg0000664000175000017500000000013013027170047020741 0ustar rharitonovrharitonov00000000000000[bdist_wheel] universal = 1 [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 requests-cache-0.4.13/README.rst0000664000175000017500000000340413027167133020620 0ustar rharitonovrharitonov00000000000000requests-cache --------------- Requests-cache is a transparent persistent cache for requests_ (version >= 1.1.0) library. .. _requests: http://python-requests.org/ .. image:: https://travis-ci.org/reclosedev/requests-cache.svg?branch=master :target: https://travis-ci.org/reclosedev/requests-cache .. image:: https://coveralls.io/repos/reclosedev/requests-cache/badge.svg?branch=master&service=github :target: https://coveralls.io/github/reclosedev/requests-cache?branch=master Usage example ------------- Just write: .. code-block:: python import requests import requests_cache requests_cache.install_cache('demo_cache') And all responses with headers and cookies will be transparently cached to `demo_cache.sqlite` database. For example, following code will take only 1-2 seconds instead of 10, and will run instantly on next launch: .. code-block:: python for i in range(10): requests.get('http://httpbin.org/delay/1') It can be useful when you are creating some simple data scraper with constantly changing parsing logic or data format, and don't want to redownload pages or write complex error handling and persistence. Note ---- ``requests-cache`` ignores all cache headers, it just caches the data for the time you specify. If you need library which knows how to use HTTP headers and status codes, take a look at `httpcache `_ and `CacheControl `_. Links ----- - **Documentation** at `readthedocs.org `_ - **Source code and issue tracking** at `GitHub `_. - **Working example** at `Real Python `_. requests-cache-0.4.13/LICENSE0000664000175000017500000000245013027167133020136 0ustar rharitonovrharitonov00000000000000Copyright (c) 2012, Roman Haritonov All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.requests-cache-0.4.13/docs/0000775000175000017500000000000013027170047020056 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/docs/api.rst0000664000175000017500000000164713027167133021373 0ustar rharitonovrharitonov00000000000000API === This part of the documentation covers all the interfaces of `requests-cache` Public api ---------- .. automodule:: requests_cache.core :members: ---------------------------------------------- .. _cache_backends: Cache backends -------------- .. automodule:: requests_cache.backends.base :members: .. _backends_sqlite: .. automodule:: requests_cache.backends.sqlite :members: .. _backends_mongo: .. automodule:: requests_cache.backends.mongo :members: .. _backends_redis: .. automodule:: requests_cache.backends.redis :members: ---------------------------------------------- Internal modules which can be used outside ------------------------------------------ .. _backends_dbdict: .. automodule:: requests_cache.backends.storage.dbdict :members: .. automodule:: requests_cache.backends.storage.mongodict :members: .. automodule:: requests_cache.backends.storage.redisdict :members: requests-cache-0.4.13/docs/conf.py0000664000175000017500000002037513027167133021366 0ustar rharitonovrharitonov00000000000000# -*- coding: utf-8 -*- # # requests-cache documentation build configuration file, created by # sphinx-quickstart on Sun Apr 08 20:51:24 2012. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) class Mock(object): def __init__(self, *args, **kwargs): pass def __call__(self, *args, **kwargs): return Mock() @classmethod def __getattr__(self, name): if name in ('__file__', '__path__'): return os.devnull elif name[0] == name[0].upper(): return type(name, (), {}) else: return Mock() MOCK_MODULES = ['pymongo', 'redis'] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() from requests_cache import __version__ # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'requests-cache' copyright = u'2012, Roman Haritonov' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = __version__ # The full version, including alpha/beta/rc tags. release = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'requests-cachedoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'requests-cache.tex', u'requests-cache Documentation', u'Roman Haritonov', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'requests-cache', u'requests-cache Documentation', [u'Roman Haritonov'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'requests-cache', u'requests-cache Documentation', u'Roman Haritonov', 'requests-cache', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' autoclass_content = 'both' autodoc_member_order = 'bysource'requests-cache-0.4.13/docs/make.bat0000664000175000017500000001177013027167133021473 0ustar rharitonovrharitonov00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\requests-cache.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\requests-cache.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end requests-cache-0.4.13/docs/Makefile0000664000175000017500000001273413027167133021527 0ustar rharitonovrharitonov00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/requests-cache.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/requests-cache.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/requests-cache" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/requests-cache" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." requests-cache-0.4.13/docs/index.rst0000664000175000017500000000134113027167133021720 0ustar rharitonovrharitonov00000000000000.. requests-cache documentation master file, created by sphinx-quickstart on Sun Apr 08 20:51:24 2012. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Requests-cache documentation ============================ `Requests-cache `_ is a transparent persistent cache for requests_ (version >= 1.1.0) library. Source code and issue tracking can be found at `GitHub `_. **Contents:** .. toctree:: :maxdepth: 2 user_guide api Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` .. _requests: http://docs.python-requests.org/requests-cache-0.4.13/docs/_templates/0000775000175000017500000000000013027170047022213 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/docs/_templates/.empty0000664000175000017500000000000013027167133023342 0ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/docs/user_guide.rst0000664000175000017500000001267413027167133022757 0ustar rharitonovrharitonov00000000000000.. _user_guide: User guide ========== Installation ------------ Install with pip_ or easy_install_:: pip install --upgrade requests-cache or download latest version from version control:: git clone git://github.com/reclosedev/requests-cache.git cd requests-cache python setup.py install .. warning:: Version updates of ``requests``, ``urllib3`` or ``requests_cache`` itself can break existing cache database (see https://github.com/reclosedev/requests-cache/issues/56 ). So if your code relies on cache, or is expensive in terms of time and traffic, please be sure to use something like ``virtualenv`` and pin your requirements. .. _pip: http://pypi.python.org/pypi/pip/ .. _easy_install: http://pypi.python.org/pypi/setuptools Usage ----- .. currentmodule:: requests_cache.core There is two ways of using :mod:`requests_cache`: - Using :class:`CachedSession` instead ``requests.Session`` - Monkey patching ``requests`` to use :class:`CachedSession` by default Monkey-patching allows to add caching to existent program by adding just two lines: Import :mod:`requests_cache` and call :func:`install_cache` :: import requests import requests_cache requests_cache.install_cache() And you can use ``requests``, all responses will be cached transparently! For example, following code will take only 1-2 seconds instead 10:: for i in range(10): requests.get('http://httpbin.org/delay/1') Cache can be configured with some options, such as cache filename, backend (sqlite, mongodb, redis, memory), expiration time, etc. E.g. cache stored in sqlite database (default format) named ``'test_cache.sqlite'`` with expiration set to 300 seconds can be configured as:: requests_cache.install_cache('test_cache', backend='sqlite', expire_after=300) .. seealso:: Full list of options can be found in :func:`requests_cache.install_cache() ` reference Transparent caching is achieved by monkey-patching ``requests`` library It is possible to uninstall this patch with :func:`requests_cache.uninstall_cache() `. Also, you can use :func:`requests_cache.disabled() ` context manager for temporary disabling caching:: with requests_cache.disabled(): print(requests.get('http://httpbin.org/ip').text) If ``Response`` is taken from cache, ``from_cache`` attribute will be ``True``: :: >>> import requests >>> import requests_cache >>> requests_cache.install_cache() >>> requests_cache.clear() >>> r = requests.get('http://httpbin.org/get') >>> r.from_cache False >>> r = requests.get('http://httpbin.org/get') >>> r.from_cache True It can be used, for example, for request throttling with help of ``requests`` hook system:: import time import requests import requests_cache def make_throttle_hook(timeout=1.0): """ Returns a response hook function which sleeps for `timeout` seconds if response is not cached """ def hook(response, *args, **kwargs): if not getattr(response, 'from_cache', False): print('sleeping') time.sleep(timeout) return response return hook if __name__ == '__main__': requests_cache.install_cache('wait_test') requests_cache.clear() s = requests_cache.CachedSession() s.hooks = {'response': make_throttle_hook(0.1)} s.get('http://httpbin.org/delay/get') s.get('http://httpbin.org/delay/get') .. seealso:: `example.py `_ .. note:: requests_cache prefetchs response content, be aware if your code uses streaming requests. .. _persistence: Persistence ----------- :mod:`requests_cache` designed to support different backends for persistent storage. By default it uses ``sqlite`` database. Type of storage can be selected with ``backend`` argument of :func:`install_cache`. List of available backends: - ``'sqlite'`` - sqlite database (**default**) - ``'memory'`` - not persistent, stores all data in Python ``dict`` in memory - ``'mongodb'`` - (**experimental**) MongoDB database (``pymongo < 3.0`` required) - ``'redis'`` - stores all data on a redis data store (``redis`` required) You can write your own and pass instance to :func:`install_cache` or :class:`CachedSession` constructor. See :ref:`cache_backends` API documentation and sources. .. _expiration: Expiration ---------- If you are using cache with ``expire_after`` parameter set, responses are removed from the storage only when the same request is made. Since the store sizes can get out of control pretty quickly with expired items you can remove them using :func:`remove_expired_responses` or :meth:`BaseCache.remove_old_entries(created_before) `. :: expire_after = timedelta(hours=1) requests_cache.install_cache(expire_after=expire_after) ... requests_cache.remove_expired_responses() # or remove_old_entries.get_cache().remove_old_entries(datetime.utcnow() - expire_after) # when used as session session = CachedSession(..., expire_after=expire_after) ... session.cache.remove_old_entries(datetime.utcnow() - expire_after) For more information see :doc:`API reference `. requests-cache-0.4.13/docs/_static/0000775000175000017500000000000013027170047021504 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/docs/_static/.empty0000664000175000017500000000000013027167133022633 0ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/sandbox.py0000664000175000017500000000047313027167133021144 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- import requests from requests_cache import CachedSession cs = CachedSession(allowable_methods=('GET', 'POST')) cs.cache.clear() for i in range(2): r = cs.get("http://httpbin.org/get?p1=v1", params={'p2': 'v2', 'p3': 'cyrЯЯ'}) print r print r.from_cache requests-cache-0.4.13/requests_cache/0000775000175000017500000000000013027170047022124 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/requests_cache/__init__.py0000664000175000017500000000142013027167767024251 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache ~~~~~~~~~~~~~~ Transparent cache for ``requests`` library with persistence and async support Just write:: import requests_cache requests_cache.install_cache() And requests to resources will be cached for faster repeated access:: import requests for i in range(10): r = requests.get('http://httpbin.org/delay/5') # will took approximately 5 seconds instead 50 :copyright: (c) 2012 by Roman Haritonov. :license: BSD, see LICENSE for more details. """ __docformat__ = 'restructuredtext' __version__ = '0.4.13' from .core import( CachedSession, install_cache, uninstall_cache, disabled, enabled, get_cache, clear, configure ) requests-cache-0.4.13/requests_cache/backends/0000775000175000017500000000000013027170047023676 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/requests_cache/backends/mongo.py0000664000175000017500000000142313027167133025371 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.backends.mongo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``mongo`` cache backend """ from .base import BaseCache from .storage.mongodict import MongoDict, MongoPickleDict class MongoCache(BaseCache): """ ``mongo`` cache backend. """ def __init__(self, db_name='requests-cache', **options): """ :param db_name: database name (default: ``'requests-cache'``) :param connection: (optional) ``pymongo.Connection`` """ super(MongoCache, self).__init__(**options) self.responses = MongoPickleDict(db_name, 'responses', options.get('connection')) self.keys_map = MongoDict(db_name, 'urls', self.responses.connection) requests-cache-0.4.13/requests_cache/backends/storage/0000775000175000017500000000000013027170047025342 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/requests_cache/backends/storage/__init__.py0000664000175000017500000000000013027167133027443 0ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/requests_cache/backends/storage/mongodict.py0000664000175000017500000000473513027167133027712 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.backends.mongodict ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Dictionary-like objects for saving large data sets to ``mongodb`` database """ from collections import MutableMapping try: import cPickle as pickle except ImportError: import pickle # Use PyMongo 3 if present try: from pymongo import MongoClient except ImportError: from pymongo import Connection as MongoClient class MongoDict(MutableMapping): """ MongoDict - a dictionary-like interface for ``mongo`` database """ def __init__(self, db_name, collection_name='mongo_dict_data', connection=None): """ :param db_name: database name (be careful with production databases) :param collection_name: collection name (default: mongo_dict_data) :param connection: ``pymongo.Connection`` instance. If it's ``None`` (default) new connection with default options will be created """ if connection is not None: self.connection = connection else: self.connection = MongoClient() self.db = self.connection[db_name] self.collection = self.db[collection_name] def __getitem__(self, key): result = self.collection.find_one({'_id': key}) if result is None: raise KeyError return result['data'] def __setitem__(self, key, item): self.collection.save({'_id': key, 'data': item}) def __delitem__(self, key): spec = {'_id': key} if hasattr(self.collection, "find_one_and_delete"): res = self.collection.find_one_and_delete(spec, {'_id': True}) else: res = self.collection.find_and_modify(spec, remove=True, fields={'_id': True}) if res is None: raise KeyError def __len__(self): return self.collection.count() def __iter__(self): for d in self.collection.find({}, {'_id': True}): yield d['_id'] def clear(self): self.collection.drop() def __str__(self): return str(dict(self.items())) class MongoPickleDict(MongoDict): """ Same as :class:`MongoDict`, but pickles values before saving """ def __setitem__(self, key, item): super(MongoPickleDict, self).__setitem__(key, pickle.dumps(item)) def __getitem__(self, key): return pickle.loads(bytes(super(MongoPickleDict, self).__getitem__(key))) requests-cache-0.4.13/requests_cache/backends/storage/redisdict.py0000664000175000017500000000422213027167133027670 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.backends.redisdict ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Dictionary-like objects for saving large data sets to ``redis`` key-store """ from collections import MutableMapping try: import cPickle as pickle except ImportError: import pickle from redis import StrictRedis as Redis class RedisDict(MutableMapping): """ RedisDict - a dictionary-like interface for ``redis`` key-stores """ def __init__(self, namespace, collection_name='redis_dict_data', connection=None): """ The actual key name on the redis server will be ``namespace``:``collection_name`` In order to deal with how redis stores data/keys, everything, i.e. keys and data, must be pickled. :param namespace: namespace to use :param collection_name: name of the hash map stored in redis (default: redis_dict_data) :param connection: ``redis.StrictRedis`` instance. If it's ``None`` (default), a new connection with default options will be created """ if connection is not None: self.connection = connection else: self.connection = Redis() self._self_key = ':'.join([namespace, collection_name]) def __getitem__(self, key): result = self.connection.hget(self._self_key, pickle.dumps(key)) if result is None: raise KeyError return pickle.loads(bytes(result)) def __setitem__(self, key, item): self.connection.hset(self._self_key, pickle.dumps(key), pickle.dumps(item)) def __delitem__(self, key): if not self.connection.hdel(self._self_key, pickle.dumps(key)): raise KeyError def __len__(self): return self.connection.hlen(self._self_key) def __iter__(self): for v in self.connection.hkeys(self._self_key): yield pickle.loads(bytes(v)) def clear(self): self.connection.delete(self._self_key) def __str__(self): return str(dict(self.items())) requests-cache-0.4.13/requests_cache/backends/storage/dbdict.py0000664000175000017500000001266713027167133027163 0ustar rharitonovrharitonov00000000000000#!/usr/bin/python # -*- coding: utf-8 -*- """ requests_cache.backends.dbdict ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Dictionary-like objects for saving large data sets to `sqlite` database """ from collections import MutableMapping import sqlite3 as sqlite from contextlib import contextmanager try: import threading except ImportError: import dummy_threading as threading try: import cPickle as pickle except ImportError: import pickle from requests_cache.compat import bytes class DbDict(MutableMapping): """ DbDict - a dictionary-like object for saving large datasets to `sqlite` database It's possible to create multiply DbDict instances, which will be stored as separate tables in one database:: d1 = DbDict('test', 'table1') d2 = DbDict('test', 'table2') d3 = DbDict('test', 'table3') all data will be stored in ``test.sqlite`` database into correspondent tables: ``table1``, ``table2`` and ``table3`` """ def __init__(self, filename, table_name='data', fast_save=False, **options): """ :param filename: filename for database (without extension) :param table_name: table name :param fast_save: If it's True, then sqlite will be configured with `"PRAGMA synchronous = 0;" `_ to speedup cache saving, but be careful, it's dangerous. Tests showed that insertion order of records can be wrong with this option. """ self.filename = filename self.table_name = table_name self.fast_save = fast_save #: Transactions can be commited if this property is set to `True` self.can_commit = True self._bulk_commit = False self._pending_connection = None self._lock = threading.RLock() with self.connection() as con: con.execute("create table if not exists `%s` (key PRIMARY KEY, value)" % self.table_name) @contextmanager def connection(self, commit_on_success=False): with self._lock: if self._bulk_commit: if self._pending_connection is None: self._pending_connection = sqlite.connect(self.filename) con = self._pending_connection else: con = sqlite.connect(self.filename) try: if self.fast_save: con.execute("PRAGMA synchronous = 0;") yield con if commit_on_success and self.can_commit: con.commit() finally: if not self._bulk_commit: con.close() def commit(self, force=False): """ Commits pending transaction if :attr:`can_commit` or `force` is `True` :param force: force commit, ignore :attr:`can_commit` """ if force or self.can_commit: if self._pending_connection is not None: self._pending_connection.commit() @contextmanager def bulk_commit(self): """ Context manager used to speedup insertion of big number of records :: >>> d1 = DbDict('test') >>> with d1.bulk_commit(): ... for i in range(1000): ... d1[i] = i * 2 """ self._bulk_commit = True self.can_commit = False try: yield self.commit(True) finally: self._bulk_commit = False self.can_commit = True self._pending_connection.close() self._pending_connection = None def __getitem__(self, key): with self.connection() as con: row = con.execute("select value from `%s` where key=?" % self.table_name, (key,)).fetchone() if not row: raise KeyError return row[0] def __setitem__(self, key, item): with self.connection(True) as con: con.execute("insert or replace into `%s` (key,value) values (?,?)" % self.table_name, (key, item)) def __delitem__(self, key): with self.connection(True) as con: cur = con.execute("delete from `%s` where key=?" % self.table_name, (key,)) if not cur.rowcount: raise KeyError def __iter__(self): with self.connection() as con: for row in con.execute("select key from `%s`" % self.table_name): yield row[0] def __len__(self): with self.connection() as con: return con.execute("select count(key) from `%s`" % self.table_name).fetchone()[0] def clear(self): with self.connection(True) as con: con.execute("drop table `%s`" % self.table_name) con.execute("create table `%s` (key PRIMARY KEY, value)" % self.table_name) def __str__(self): return str(dict(self.items())) class DbPickleDict(DbDict): """ Same as :class:`DbDict`, but pickles values before saving """ def __setitem__(self, key, item): super(DbPickleDict, self).__setitem__(key, sqlite.Binary(pickle.dumps(item))) def __getitem__(self, key): return pickle.loads(bytes(super(DbPickleDict, self).__getitem__(key))) requests-cache-0.4.13/requests_cache/backends/__init__.py0000664000175000017500000000275513027167133026022 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.backends ~~~~~~~~~~~~~~~~~~~~~~~ Classes and functions for cache persistence """ from .base import BaseCache registry = { 'memory': BaseCache, } _backend_dependencies = { 'sqlite': 'sqlite3', 'mongo': 'pymongo', 'redis': 'redis' } try: # Heroku doesn't allow the SQLite3 module to be installed from .sqlite import DbCache registry['sqlite'] = DbCache except ImportError: DbCache = None try: from .mongo import MongoCache registry['mongo'] = registry['mongodb'] = MongoCache except ImportError: MongoCache = None try: from .redis import RedisCache registry['redis'] = RedisCache except ImportError: RedisCache = None def create_backend(backend_name, cache_name, options): if isinstance(backend_name, BaseCache): return backend_name if backend_name is None: backend_name = _get_default_backend_name() try: return registry[backend_name](cache_name, **options) except KeyError: if backend_name in _backend_dependencies: raise ImportError('You must install the python package: %s' % _backend_dependencies[backend_name]) else: raise ValueError('Unsupported backend "%s" try one of: %s' % (backend_name, ', '.join(registry.keys()))) def _get_default_backend_name(): if 'sqlite' in registry: return 'sqlite' return 'memory' requests-cache-0.4.13/requests_cache/backends/base.py0000664000175000017500000002166613027167133025177 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.backends.base ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Contains BaseCache class which can be used as in-memory cache backend or extended to support persistence. """ from datetime import datetime import hashlib from copy import copy from io import BytesIO import requests from ..compat import is_py2, urlencode, urlparse, urlunparse, parse_qsl, bytes, str _DEFAULT_HEADERS = requests.utils.default_headers() class BaseCache(object): """ Base class for cache implementations, can be used as in-memory cache. To extend it you can provide dictionary-like objects for :attr:`keys_map` and :attr:`responses` or override public methods. """ def __init__(self, *args, **kwargs): #: `key` -> `key_in_responses` mapping self.keys_map = {} #: `key_in_cache` -> `response` mapping self.responses = {} self._include_get_headers = kwargs.get("include_get_headers", False) self._ignored_parameters = set(kwargs.get("ignored_parameters") or []) def save_response(self, key, response): """ Save response to cache :param key: key for this response :param response: response to save .. note:: Response is reduced before saving (with :meth:`reduce_response`) to make it picklable """ self.responses[key] = self.reduce_response(response), datetime.utcnow() def add_key_mapping(self, new_key, key_to_response): """ Adds mapping of `new_key` to `key_to_response` to make it possible to associate many keys with single response :param new_key: new key (e.g. url from redirect) :param key_to_response: key which can be found in :attr:`responses` :return: """ self.keys_map[new_key] = key_to_response def get_response_and_time(self, key, default=(None, None)): """ Retrieves response and timestamp for `key` if it's stored in cache, otherwise returns `default` :param key: key of resource :param default: return this if `key` not found in cache :returns: tuple (response, datetime) .. note:: Response is restored after unpickling with :meth:`restore_response` """ try: if key not in self.responses: key = self.keys_map[key] response, timestamp = self.responses[key] except KeyError: return default return self.restore_response(response), timestamp def delete(self, key): """ Delete `key` from cache. Also deletes all responses from response history """ try: if key in self.responses: response, _ = self.responses[key] del self.responses[key] else: response, _ = self.responses[self.keys_map[key]] del self.keys_map[key] for r in response.history: del self.keys_map[self.create_key(r.request)] except KeyError: pass def delete_url(self, url): """ Delete response associated with `url` from cache. Also deletes all responses from response history. Works only for GET requests """ self.delete(self._url_to_key(url)) def clear(self): """ Clear cache """ self.responses.clear() self.keys_map.clear() def remove_old_entries(self, created_before): """ Deletes entries from cache with creation time older than ``created_before`` """ keys_to_delete = set() for key in self.responses: try: response, created_at = self.responses[key] except KeyError: continue if created_at < created_before: keys_to_delete.add(key) for key in keys_to_delete: self.delete(key) def has_key(self, key): """ Returns `True` if cache has `key`, `False` otherwise """ return key in self.responses or key in self.keys_map def has_url(self, url): """ Returns `True` if cache has `url`, `False` otherwise. Works only for GET request urls """ return self.has_key(self._url_to_key(url)) def _url_to_key(self, url): session = requests.Session() return self.create_key(session.prepare_request(requests.Request('GET', url))) _response_attrs = ['_content', 'url', 'status_code', 'cookies', 'headers', 'encoding', 'request', 'reason', 'raw'] _raw_response_attrs = ['_original_response', 'decode_content', 'headers', 'reason', 'status', 'strict', 'version'] def reduce_response(self, response, seen=None): """ Reduce response object to make it compatible with ``pickle`` """ if seen is None: seen = {} try: return seen[id(response)] except KeyError: pass result = _Store() # prefetch content = response.content for field in self._response_attrs: setattr(result, field, self._picklable_field(response, field)) seen[id(response)] = result result.history = tuple(self.reduce_response(r, seen) for r in response.history) # Emulate stream fp is not consumed yet. See #68 if response.raw is not None: response.raw._fp = BytesIO(content) return result def _picklable_field(self, response, name): value = getattr(response, name) if name == 'request': value = copy(value) value.hooks = [] elif name == 'raw': result = _RawStore() for field in self._raw_response_attrs: setattr(result, field, getattr(value, field, None)) if result._original_response is not None: setattr(result._original_response, "fp", None) # _io.BufferedReader is not picklable value = result return value def restore_response(self, response, seen=None): """ Restore response object after unpickling """ if seen is None: seen = {} try: return seen[id(response)] except KeyError: pass result = requests.Response() for field in self._response_attrs: setattr(result, field, getattr(response, field, None)) result.raw._cached_content_ = result.content seen[id(response)] = result result.history = tuple(self.restore_response(r, seen) for r in response.history) return result def _remove_ignored_parameters(self, request): def filter_ignored_parameters(data): return [(k, v) for k, v in data if k not in self._ignored_parameters] url = urlparse(request.url) query = parse_qsl(url.query) query = filter_ignored_parameters(query) query = urlencode(query) url = urlunparse((url.scheme, url.netloc, url.path, url.params, query, url.fragment)) body = request.body content_type = request.headers.get('content-type') if body and content_type: if content_type == 'application/x-www-form-urlencoded': body = parse_qsl(body) body = filter_ignored_parameters(body) body = urlencode(body) elif content_type == 'application/json': import json if not is_py2 and isinstance(body, bytes): body = str(body, "utf8") # TODO how to get body encoding? body = json.loads(body) body = filter_ignored_parameters(sorted(body.items())) body = json.dumps(body) return url, body def create_key(self, request): if self._ignored_parameters: url, body = self._remove_ignored_parameters(request) else: url, body = request.url, request.body key = hashlib.sha256() key.update(_to_bytes(request.method.upper())) key.update(_to_bytes(url)) if request.body: key.update(_to_bytes(body)) else: if self._include_get_headers and request.headers != _DEFAULT_HEADERS: for name, value in sorted(request.headers.items()): key.update(_to_bytes(name)) key.update(_to_bytes(value)) return key.hexdigest() def __str__(self): return 'keys: %s\nresponses: %s' % (self.keys_map, self.responses) # used for saving response attributes class _Store(object): pass class _RawStore(object): # noop for cached response def release_conn(self): pass # for streaming requests support def read(self, chunk_size=1): if not hasattr(self, "_io_with_content_"): self._io_with_content_ = BytesIO(self._cached_content_) return self._io_with_content_.read(chunk_size) def _to_bytes(s, encoding='utf-8'): if is_py2 or isinstance(s, bytes): return s return bytes(s, encoding) requests-cache-0.4.13/requests_cache/backends/sqlite.py0000664000175000017500000000205413027167133025554 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.backends.sqlite ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``sqlite3`` cache backend """ from .base import BaseCache from .storage.dbdict import DbDict, DbPickleDict class DbCache(BaseCache): """ sqlite cache backend. Reading is fast, saving is a bit slower. It can store big amount of data with low memory usage. """ def __init__(self, location='cache', fast_save=False, extension='.sqlite', **options): """ :param location: database filename prefix (default: ``'cache'``) :param fast_save: Speedup cache saving up to 50 times but with possibility of data loss. See :ref:`backends.DbDict ` for more info :param extension: extension for filename (default: ``'.sqlite'``) """ super(DbCache, self).__init__(**options) self.responses = DbPickleDict(location + extension, 'responses', fast_save=fast_save) self.keys_map = DbDict(location + extension, 'urls') requests-cache-0.4.13/requests_cache/backends/redis.py0000664000175000017500000000137713027167133025370 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.backends.redis ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``redis`` cache backend """ from .base import BaseCache from .storage.redisdict import RedisDict class RedisCache(BaseCache): """ ``redis`` cache backend. """ def __init__(self, namespace='requests-cache', **options): """ :param namespace: redis namespace (default: ``'requests-cache'``) :param connection: (optional) ``redis.StrictRedis`` """ super(RedisCache, self).__init__(**options) self.responses = RedisDict(namespace, 'responses', options.get('connection')) self.keys_map = RedisDict(namespace, 'urls', self.responses.connection) requests-cache-0.4.13/requests_cache/compat.py0000664000175000017500000000405513027167133023767 0ustar rharitonovrharitonov00000000000000# -*- coding: utf-8 -*- # taken from requests library: https://github.com/kennethreitz/requests """ pythoncompat """ import sys # ------- # Pythons # ------- # Syntax sugar. _ver = sys.version_info #: Python 2.x? is_py2 = (_ver[0] == 2) #: Python 3.x? is_py3 = (_ver[0] == 3) #: Python 3.0.x is_py30 = (is_py3 and _ver[1] == 0) #: Python 3.1.x is_py31 = (is_py3 and _ver[1] == 1) #: Python 3.2.x is_py32 = (is_py3 and _ver[1] == 2) #: Python 3.3.x is_py33 = (is_py3 and _ver[1] == 3) #: Python 3.4.x is_py34 = (is_py3 and _ver[1] == 4) #: Python 2.7.x is_py27 = (is_py2 and _ver[1] == 7) #: Python 2.6.x is_py26 = (is_py2 and _ver[1] == 6) #: Python 2.5.x is_py25 = (is_py2 and _ver[1] == 5) #: Python 2.4.x is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice. # --------- # Platforms # --------- # Syntax sugar. _ver = sys.version.lower() is_pypy = ('pypy' in _ver) is_jython = ('jython' in _ver) is_ironpython = ('iron' in _ver) # Assume CPython, if nothing else. is_cpython = not any((is_pypy, is_jython, is_ironpython)) # Windows-based system. is_windows = 'win32' in str(sys.platform).lower() # Standard Linux 2+ system. is_linux = ('linux' in str(sys.platform).lower()) is_osx = ('darwin' in str(sys.platform).lower()) is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess. is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess. # --------- # Specifics # --------- if is_py2: from urllib import quote, unquote, urlencode from urlparse import urlparse, urlunparse, urljoin, urlsplit, parse_qsl from urllib2 import parse_http_list import cookielib from StringIO import StringIO bytes = str str = unicode basestring = basestring elif is_py3: from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, parse_qsl from urllib.request import parse_http_list from http import cookiejar as cookielib from http.cookies import SimpleCookie from io import StringIO str = str bytes = bytes basestring = (str,bytes) requests-cache-0.4.13/requests_cache/core.py0000664000175000017500000002315713027167133023440 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests_cache.core ~~~~~~~~~~~~~~~~~~~ Core functions for configuring cache and monkey patching ``requests`` """ from contextlib import contextmanager from datetime import datetime, timedelta from operator import itemgetter import requests from requests import Session as OriginalSession from requests.hooks import dispatch_hook from requests_cache import backends from requests_cache.compat import basestring try: ver = tuple(map(int, requests.__version__.split("."))) except ValueError: pass else: # We don't need to dispatch hook in Requests <= 1.1.0 if ver < (1, 2, 0): dispatch_hook = lambda key, hooks, hook_data, *a, **kw: hook_data del ver class CachedSession(OriginalSession): """ Requests ``Sessions`` with caching support. """ def __init__(self, cache_name='cache', backend=None, expire_after=None, allowable_codes=(200,), allowable_methods=('GET',), old_data_on_error=False, **backend_options): """ :param cache_name: for ``sqlite`` backend: cache file will start with this prefix, e.g ``cache.sqlite`` for ``mongodb``: it's used as database name for ``redis``: it's used as the namespace. This means all keys are prefixed with ``'cache_name:'`` :param backend: cache backend name e.g ``'sqlite'``, ``'mongodb'``, ``'redis'``, ``'memory'``. (see :ref:`persistence`). Or instance of backend implementation. Default value is ``None``, which means use ``'sqlite'`` if available, otherwise fallback to ``'memory'``. :param expire_after: ``timedelta`` or number of seconds after cache will be expired or `None` (default) to ignore expiration :type expire_after: float :param allowable_codes: limit caching only for response with this codes (default: 200) :type allowable_codes: tuple :param allowable_methods: cache only requests of this methods (default: 'GET') :type allowable_methods: tuple :kwarg backend_options: options for chosen backend. See corresponding :ref:`sqlite `, :ref:`mongo ` and :ref:`redis ` backends API documentation :param include_get_headers: If `True` headers will be part of cache key. E.g. after get('some_link', headers={'Accept':'application/json'}) get('some_link', headers={'Accept':'application/xml'}) is not from cache. :param ignored_parameters: List of parameters to be excluded from the cache key. Useful when requesting the same resource through different credentials or access tokens, passed as parameters. :param old_data_on_error: If `True` it will return expired cached response if update fails """ self.cache = backends.create_backend(backend, cache_name, backend_options) self._cache_name = cache_name if expire_after is not None and not isinstance(expire_after, timedelta): expire_after = timedelta(seconds=expire_after) self._cache_expire_after = expire_after self._cache_allowable_codes = allowable_codes self._cache_allowable_methods = allowable_methods self._return_old_data_on_error = old_data_on_error self._is_cache_disabled = False super(CachedSession, self).__init__() def send(self, request, **kwargs): if (self._is_cache_disabled or request.method not in self._cache_allowable_methods): response = super(CachedSession, self).send(request, **kwargs) response.from_cache = False return response cache_key = self.cache.create_key(request) def send_request_and_cache_response(): response = super(CachedSession, self).send(request, **kwargs) if response.status_code in self._cache_allowable_codes: self.cache.save_response(cache_key, response) response.from_cache = False return response response, timestamp = self.cache.get_response_and_time(cache_key) if response is None: return send_request_and_cache_response() if self._cache_expire_after is not None: is_expired = datetime.utcnow() - timestamp > self._cache_expire_after if is_expired: if not self._return_old_data_on_error: self.cache.delete(cache_key) return send_request_and_cache_response() try: new_response = send_request_and_cache_response() except Exception: return response else: if new_response.status_code not in self._cache_allowable_codes: return response return new_response # dispatch hook here, because we've removed it before pickling response.from_cache = True response = dispatch_hook('response', request.hooks, response, **kwargs) return response def request(self, method, url, params=None, data=None, **kwargs): response = super(CachedSession, self).request( method, url, _normalize_parameters(params), _normalize_parameters(data), **kwargs ) if self._is_cache_disabled: return response main_key = self.cache.create_key(response.request) for r in response.history: self.cache.add_key_mapping( self.cache.create_key(r.request), main_key ) return response @contextmanager def cache_disabled(self): """ Context manager for temporary disabling cache :: >>> s = CachedSession() >>> with s.cache_disabled(): ... s.get('http://httpbin.org/ip') """ self._is_cache_disabled = True try: yield finally: self._is_cache_disabled = False def remove_expired_responses(self): """ Removes expired responses from storage """ if not self._cache_expire_after: return self.cache.remove_old_entries(datetime.utcnow() - self._cache_expire_after) def __repr__(self): return ( "" % ( self.cache.__class__.__name__, self._cache_name, self._cache_expire_after, self._cache_allowable_methods ) ) def install_cache(cache_name='cache', backend=None, expire_after=None, allowable_codes=(200,), allowable_methods=('GET',), session_factory=CachedSession, **backend_options): """ Installs cache for all ``Requests`` requests by monkey-patching ``Session`` Parameters are the same as in :class:`CachedSession`. Additional parameters: :param session_factory: Session factory. It must be class which inherits :class:`CachedSession` (default) """ if backend: backend = backends.create_backend(backend, cache_name, backend_options) class _ConfiguredCachedSession(session_factory): def __init__(self): super(_ConfiguredCachedSession, self).__init__( cache_name=cache_name, backend=backend, expire_after=expire_after, allowable_codes=allowable_codes, allowable_methods=allowable_methods, **backend_options ) _patch_session_factory(_ConfiguredCachedSession) # backward compatibility configure = install_cache def uninstall_cache(): """ Restores ``requests.Session`` and disables cache """ _patch_session_factory(OriginalSession) @contextmanager def disabled(): """ Context manager for temporary disabling globally installed cache .. warning:: not thread-safe :: >>> with requests_cache.disabled(): ... requests.get('http://httpbin.org/ip') ... requests.get('http://httpbin.org/get') """ previous = requests.Session uninstall_cache() try: yield finally: _patch_session_factory(previous) @contextmanager def enabled(*args, **kwargs): """ Context manager for temporary installing global cache. Accepts same arguments as :func:`install_cache` .. warning:: not thread-safe :: >>> with requests_cache.enabled('cache_db'): ... requests.get('http://httpbin.org/get') """ install_cache(*args, **kwargs) try: yield finally: uninstall_cache() def get_cache(): """ Returns internal cache object from globally installed ``CachedSession`` """ return requests.Session().cache def clear(): """ Clears globally installed cache """ get_cache().clear() def remove_expired_responses(): """ Removes expired responses from storage """ return requests.Session().remove_expired_responses() def _patch_session_factory(session_factory=CachedSession): requests.Session = requests.sessions.Session = session_factory def _normalize_parameters(params): """ If builtin dict is passed as parameter, returns sorted list of key-value pairs """ if type(params) is dict: return sorted(params.items(), key=itemgetter(0)) return params requests-cache-0.4.13/HISTORY.rst0000664000175000017500000000557113027167767021050 0ustar rharitonovrharitonov00000000000000.. :changelog: History ------- 0.4.13 (2016-12-23) +++++++++++++++++++ * Support PyMongo3, thanks to @craigls #72 * Fix streaming releate issue #68 0.4.12 (2016-03-19) +++++++++++++++++++ * Fix ability to pass backend instance in ``install_cache`` #61 0.4.11 (2016-03-07) +++++++++++++++++++ * ``ignore_parameters`` feature, thanks to @themiurgo and @YetAnotherNerd (#52, #55) * More informative message for missing backend dependencies, thanks to @Garrett-R (#60) 0.4.10 (2015-04-28) +++++++++++++++++++ * Better transactional handling in sqlite #50, thanks to @rgant * Compatibility with streaming in requests >= 2.6.x 0.4.9 (2015-01-17) ++++++++++++++++++ * ``expire_after`` now also accepts ``timedelta``, thanks to @femtotrader * Added Ability to include headers to cache key (``include_get_headers`` option) * Added string representation for ``CachedSession`` 0.4.8 (2014-12-13) ++++++++++++++++++ * Fix bug in reading cached streaming response 0.4.7 (2014-12-06) ++++++++++++++++++ * Fix compatibility with Requests > 2.4.1 (json arg, response history) 0.4.6 (2014-10-13) ++++++++++++++++++ * Monkey patch now uses class instead lambda (compatibility with rauth) * Normalize (sort) parameters passed as builtin dict 0.4.5 (2014-08-22) ++++++++++++++++++ * Requests==2.3.0 compatibility, thanks to @gwillem 0.4.4 (2013-10-31) ++++++++++++++++++ * Check for backend availability in install_cache(), not at the first request * Default storage fallbacks to memory if ``sqlite`` is not available 0.4.3 (2013-09-12) ++++++++++++++++++ * Fix ``response.from_cache`` not set in hooks 0.4.2 (2013-08-25) ++++++++++++++++++ * Fix ``UnpickleableError`` for gzip responses 0.4.1 (2013-08-19) ++++++++++++++++++ * ``requests_cache.enabled()`` context manager * Compatibility with Requests 1.2.3 cookies handling 0.4.0 (2013-04-25) ++++++++++++++++++ * Redis backend. Thanks to @michaelbeaumont * Fix for changes in Requests 1.2.0 hooks dispatching 0.3.0 (2013-02-24) ++++++++++++++++++ * Support for ``Requests`` 1.x.x * ``CachedSession`` * Many backward incompatible changes 0.2.1 (2013-01-13) ++++++++++++++++++ * Fix broken PyPi package 0.2.0 (2013-01-12) ++++++++++++++++++ * Last backward compatible version for ``Requests`` 0.14.2 0.1.3 (2012-05-04) ++++++++++++++++++ * Thread safety for default ``sqlite`` backend * Take into account the POST parameters when cache is configured with 'POST' in ``allowable_methods`` 0.1.2 (2012-05-02) ++++++++++++++++++ * Reduce number of ``sqlite`` database write operations * ``fast_save`` option for ``sqlite`` backend 0.1.1 (2012-04-11) ++++++++++++++++++ * Fix: restore responses from response.history * Internal refactoring (``MemoryCache`` -> ``BaseCache``, ``reduce_response`` and ``restore_response`` moved to ``BaseCache``) * ``connection`` option for ``MongoCache`` 0.1.0 (2012-04-10) ++++++++++++++++++ * initial PyPI release requests-cache-0.4.13/CONTRIBUTORS.rst0000664000175000017500000000070213027167133021616 0ustar rharitonovrharitonov00000000000000Patches and Suggestions ``````````````````````` - `Daniel Rech `_ - `Wouter Vanden Hove `_ - `Massimo Santini `_ - `Nathan Cahill `_ - `Honza Javorek `_ - `@michaelbeaumont `_ - `Alex Morega `_ - `@paulfurley `_ requests-cache-0.4.13/PKG-INFO0000664000175000017500000001562013027170047020227 0ustar rharitonovrharitonov00000000000000Metadata-Version: 1.1 Name: requests-cache Version: 0.4.13 Summary: Persistent cache for requests library Home-page: https://github.com/reclosedev/requests-cache Author: Roman Haritonov Author-email: reclosedev@gmail.com License: BSD License Description: requests-cache --------------- Requests-cache is a transparent persistent cache for requests_ (version >= 1.1.0) library. .. _requests: http://python-requests.org/ .. image:: https://travis-ci.org/reclosedev/requests-cache.svg?branch=master :target: https://travis-ci.org/reclosedev/requests-cache .. image:: https://coveralls.io/repos/reclosedev/requests-cache/badge.svg?branch=master&service=github :target: https://coveralls.io/github/reclosedev/requests-cache?branch=master Usage example ------------- Just write: .. code-block:: python import requests import requests_cache requests_cache.install_cache('demo_cache') And all responses with headers and cookies will be transparently cached to `demo_cache.sqlite` database. For example, following code will take only 1-2 seconds instead of 10, and will run instantly on next launch: .. code-block:: python for i in range(10): requests.get('http://httpbin.org/delay/1') It can be useful when you are creating some simple data scraper with constantly changing parsing logic or data format, and don't want to redownload pages or write complex error handling and persistence. Note ---- ``requests-cache`` ignores all cache headers, it just caches the data for the time you specify. If you need library which knows how to use HTTP headers and status codes, take a look at `httpcache `_ and `CacheControl `_. Links ----- - **Documentation** at `readthedocs.org `_ - **Source code and issue tracking** at `GitHub `_. - **Working example** at `Real Python `_. .. :changelog: History ------- 0.4.13 (2016-12-23) +++++++++++++++++++ * Support PyMongo3, thanks to @craigls #72 * Fix streaming releate issue #68 0.4.12 (2016-03-19) +++++++++++++++++++ * Fix ability to pass backend instance in ``install_cache`` #61 0.4.11 (2016-03-07) +++++++++++++++++++ * ``ignore_parameters`` feature, thanks to @themiurgo and @YetAnotherNerd (#52, #55) * More informative message for missing backend dependencies, thanks to @Garrett-R (#60) 0.4.10 (2015-04-28) +++++++++++++++++++ * Better transactional handling in sqlite #50, thanks to @rgant * Compatibility with streaming in requests >= 2.6.x 0.4.9 (2015-01-17) ++++++++++++++++++ * ``expire_after`` now also accepts ``timedelta``, thanks to @femtotrader * Added Ability to include headers to cache key (``include_get_headers`` option) * Added string representation for ``CachedSession`` 0.4.8 (2014-12-13) ++++++++++++++++++ * Fix bug in reading cached streaming response 0.4.7 (2014-12-06) ++++++++++++++++++ * Fix compatibility with Requests > 2.4.1 (json arg, response history) 0.4.6 (2014-10-13) ++++++++++++++++++ * Monkey patch now uses class instead lambda (compatibility with rauth) * Normalize (sort) parameters passed as builtin dict 0.4.5 (2014-08-22) ++++++++++++++++++ * Requests==2.3.0 compatibility, thanks to @gwillem 0.4.4 (2013-10-31) ++++++++++++++++++ * Check for backend availability in install_cache(), not at the first request * Default storage fallbacks to memory if ``sqlite`` is not available 0.4.3 (2013-09-12) ++++++++++++++++++ * Fix ``response.from_cache`` not set in hooks 0.4.2 (2013-08-25) ++++++++++++++++++ * Fix ``UnpickleableError`` for gzip responses 0.4.1 (2013-08-19) ++++++++++++++++++ * ``requests_cache.enabled()`` context manager * Compatibility with Requests 1.2.3 cookies handling 0.4.0 (2013-04-25) ++++++++++++++++++ * Redis backend. Thanks to @michaelbeaumont * Fix for changes in Requests 1.2.0 hooks dispatching 0.3.0 (2013-02-24) ++++++++++++++++++ * Support for ``Requests`` 1.x.x * ``CachedSession`` * Many backward incompatible changes 0.2.1 (2013-01-13) ++++++++++++++++++ * Fix broken PyPi package 0.2.0 (2013-01-12) ++++++++++++++++++ * Last backward compatible version for ``Requests`` 0.14.2 0.1.3 (2012-05-04) ++++++++++++++++++ * Thread safety for default ``sqlite`` backend * Take into account the POST parameters when cache is configured with 'POST' in ``allowable_methods`` 0.1.2 (2012-05-02) ++++++++++++++++++ * Reduce number of ``sqlite`` database write operations * ``fast_save`` option for ``sqlite`` backend 0.1.1 (2012-04-11) ++++++++++++++++++ * Fix: restore responses from response.history * Internal refactoring (``MemoryCache`` -> ``BaseCache``, ``reduce_response`` and ``restore_response`` moved to ``BaseCache``) * ``connection`` option for ``MongoCache`` 0.1.0 (2012-04-10) ++++++++++++++++++ * initial PyPI release Keywords: requests,cache,persistence Platform: UNKNOWN Classifier: License :: OSI Approved :: BSD License Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 requests-cache-0.4.13/tests/0000775000175000017500000000000013027170047020270 5ustar rharitonovrharitonov00000000000000requests-cache-0.4.13/tests/test_thread_safety.py0000664000175000017500000000254113027167133024527 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Path hack import os, sys sys.path.insert(0, os.path.abspath('..')) try: import unittest2 as unittest except ImportError: import unittest from threading import Thread from requests_cache import CachedSession CACHE_NAME = 'requests_cache_test' class ThreadSafetyTestCase(unittest.TestCase): def test_caching_with_threads(self): def do_tests_for(backend): s = CachedSession(CACHE_NAME, backend) s.cache.clear() n_threads = 10 url = 'http://httpbin.org/get' def do_requests(url, params): for i in range(10): # for testing write and read from cache s.get(url, params=params) for _ in range(20): # stress test threads = [Thread(target=do_requests, args=(url, {'param': i})) for i in range(n_threads)] for t in threads: t.start() for t in threads: t.join() for i in range(n_threads): self.assert_(s.cache.has_url('%s?param=%s' % (url, i))) for backend in ('sqlite', 'mongodb'): try: do_tests_for(backend) except Exception: print("Failed to test %s" % backend) if __name__ == '__main__': unittest.main() requests-cache-0.4.13/tests/test_cache.py0000664000175000017500000004215313027167133022753 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Path hack import os, sys sys.path.insert(0, os.path.abspath('..')) try: import unittest2 as unittest except ImportError: import unittest import time import json from collections import defaultdict from datetime import datetime, timedelta import mock import requests from requests import Request import requests_cache from requests_cache import CachedSession from requests_cache.compat import bytes, str, is_py3 CACHE_BACKEND = 'sqlite' CACHE_NAME = 'requests_cache_test' FAST_SAVE = False HTTPBIN_URL = os.getenv('HTTPBIN_URL', 'http://httpbin.org/') def httpbin(*suffix): """Returns url for HTTPBIN resource.""" return HTTPBIN_URL + '/'.join(suffix) class CacheTestCase(unittest.TestCase): def setUp(self): self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) self.s.cache.clear() requests_cache.uninstall_cache() @classmethod def tearDownClass(cls): super(CacheTestCase, cls).tearDownClass() filename = "{0}.{1}".format(CACHE_NAME, CACHE_BACKEND) if os.path.exists(filename): try: os.unlink(filename) except OSError: pass def test_expire_cache(self): delay = 1 url = httpbin('delay/%s' % delay) s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, expire_after=0.06) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) time.sleep(0.5) t = time.time() r = s.get(url) delta = time.time() - t self.assertGreaterEqual(delta, delay) def test_delete_urls(self): url = httpbin('relative-redirect/3') r = self.s.get(url) for i in range(1, 4): self.assert_(self.s.cache.has_url(httpbin('relative-redirect/%s' % i))) self.s.cache.delete_url(url) self.assert_(not self.s.cache.has_url(url)) def test_unregistered_backend(self): with self.assertRaises(ValueError): CachedSession(CACHE_NAME, backend='nonexistent') @mock.patch('requests_cache.backends.registry') def test_missing_backend_dependency(self, mocked_registry): # Testing that the correct error is thrown when a user does not have # the Python package `redis` installed. We mock out the registry # to simulate `redis` not being installed. mocked_registry.__getitem__.side_effect = KeyError with self.assertRaises(ImportError): CachedSession(CACHE_NAME, backend='redis') def test_hooks(self): state = defaultdict(int) for hook in ('response',): # TODO it's only one hook here def hook_func(r, *args, **kwargs): state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_attr_from_cache_in_hook(self): state = defaultdict(int) hook = 'response' def hook_func(r, *args, **kwargs): if state[hook] > 0: self.assert_(r.from_cache, True) state[hook] += 1 return r n = 5 for i in range(n): r = self.s.get(httpbin('get'), hooks={hook: hook_func}) self.assertEqual(state[hook], n) def test_post(self): url = httpbin('post') r1 = json.loads(self.s.post(url, data={'test1': 'test1'}).text) r2 = json.loads(self.s.post(url, data={'test2': 'test2'}).text) self.assertIn('test2', r2['form']) req = Request('POST', url).prepare() self.assert_(not self.s.cache.has_key(self.s.cache.create_key(req))) def test_disabled(self): url = httpbin('get') requests_cache.install_cache(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) requests.get(url) with requests_cache.disabled(): for i in range(2): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) with self.s.cache_disabled(): for i in range(2): r = self.s.get(url) self.assertFalse(getattr(r, 'from_cache', False)) r = self.s.get(url) self.assertTrue(getattr(r, 'from_cache', False)) def test_enabled(self): url = httpbin('get') options = dict(cache_name=CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE) with requests_cache.enabled(**options): r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) for i in range(2): r = requests.get(url) self.assertTrue(getattr(r, 'from_cache', False)) r = requests.get(url) self.assertFalse(getattr(r, 'from_cache', False)) def test_content_and_cookies(self): requests_cache.install_cache(CACHE_NAME, CACHE_BACKEND) s = requests.session() def js(url): return json.loads(s.get(url).text) r1 = js(httpbin('cookies/set/test1/test2')) with requests_cache.disabled(): r2 = js(httpbin('cookies')) self.assertEqual(r1, r2) r3 = js(httpbin('cookies')) with requests_cache.disabled(): r4 = js(httpbin('cookies/set/test3/test4')) # from cache self.assertEqual(r3, js(httpbin('cookies'))) # updated with requests_cache.disabled(): self.assertEqual(r4, js(httpbin('cookies'))) def test_response_history(self): r1 = self.s.get(httpbin('relative-redirect/3')) def test_redirect_history(url): r2 = self.s.get(url) self.assertTrue(r2.from_cache) for r11, r22 in zip(r1.history, r2.history): self.assertEqual(r11.url, r22.url) test_redirect_history(httpbin('relative-redirect/3')) test_redirect_history(httpbin('relative-redirect/2')) r3 = requests.get(httpbin('relative-redirect/1')) self.assertEqual(len(r3.history), 1) def test_response_history_simple(self): r1 = self.s.get(httpbin('relative-redirect/2')) r2 = self.s.get(httpbin('relative-redirect/1')) self.assertTrue(r2.from_cache) def post(self, data): return json.loads(self.s.post(httpbin('post'), data=data).text) def test_post_params(self): # issue #2 self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d = {'param1': 'test1'} for _ in range(2): self.assertEqual(self.post(d)['form'], d) d = {'param1': 'test1', 'param3': 'test3'} self.assertEqual(self.post(d)['form'], d) self.assertTrue(self.s.post(httpbin('post'), data=d).from_cache) d.update({'something': 'else'}) self.assertFalse(self.s.post(httpbin('post'), data=d).from_cache) def test_post_data(self): # issue #2, raw payload self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) d1 = json.dumps({'param1': 'test1'}) d2 = json.dumps({'param1': 'test1', 'param2': 'test2'}) d3 = str('some unicode data') if is_py3: bin_data = bytes('some binary data', 'utf8') else: bin_data = bytes('some binary data') for d in (d1, d2, d3): self.assertEqual(self.post(d)['data'], d) r = self.s.post(httpbin('post'), data=d) self.assert_(hasattr(r, 'from_cache')) self.assertEqual(self.post(bin_data)['data'], bin_data.decode('utf8')) r = self.s.post(httpbin('post'), data=bin_data) self.assert_(hasattr(r, 'from_cache')) def test_get_params_as_argument(self): for _ in range(5): p = {'arg1': 'value1'} r = self.s.get(httpbin('get'), params=p) self.assert_(self.s.cache.has_url( httpbin('get?arg1=value1'))) def test_https_support(self): n = 10 delay = 1 url = 'https://httpbin.org/delay/%s?ar1=value1' % delay t = time.time() for _ in range(n): r = self.s.get(url, verify=False) self.assertLessEqual(time.time() - t, delay * n / 2) def test_from_cache_attribute(self): url = httpbin('get?q=1') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) self.s.cache.clear() self.assertFalse(self.s.get(url).from_cache) def test_gzip_response(self): url = httpbin('gzip') self.assertFalse(self.s.get(url).from_cache) self.assertTrue(self.s.get(url).from_cache) def test_close_response(self): for _ in range(3): r = self.s.get(httpbin("get")) r.close() def test_get_parameters_normalization(self): url = httpbin("get") params = {"a": "a", "b": ["1", "2", "3"], "c": "4"} self.assertFalse(self.s.get(url, params=params).from_cache) r = self.s.get(url, params=params) self.assertTrue(r.from_cache) self.assertEquals(r.json()["args"], params) self.assertFalse(self.s.get(url, params={"a": "b"}).from_cache) self.assertTrue(self.s.get(url, params=sorted(params.items())).from_cache) class UserSubclass(dict): def items(self): return sorted(super(UserSubclass, self).items(), reverse=True) params["z"] = "5" custom_dict = UserSubclass(params) self.assertFalse(self.s.get(url, params=custom_dict).from_cache) self.assertTrue(self.s.get(url, params=custom_dict).from_cache) def test_post_parameters_normalization(self): params = {"a": "a", "b": ["1", "2", "3"], "c": "4"} url = httpbin("post") s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST')) self.assertFalse(s.post(url, data=params).from_cache) self.assertTrue(s.post(url, data=params).from_cache) self.assertTrue(s.post(url, data=sorted(params.items())).from_cache) self.assertFalse(s.post(url, data=sorted(params.items(), reverse=True)).from_cache) def test_stream_requests_support(self): n = 100 url = httpbin("stream/%s" % n) r = self.s.get(url, stream=True) first_char = r.raw.read(1) lines = list(r.iter_lines()) self.assertTrue(first_char) self.assertEquals(len(lines), n) for i in range(2): r = self.s.get(url, stream=True) first_char_cached = r.raw.read(1) self.assertTrue(r.from_cache) cached_lines = list(r.iter_lines()) self.assertEquals(cached_lines, lines) self.assertEquals(first_char, first_char_cached) def test_headers_in_get_query(self): url = httpbin("get") s = CachedSession(CACHE_NAME, CACHE_BACKEND, include_get_headers=True) headers = {"Accept": "text/json"} self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) headers["Accept"] = "text/xml" self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) headers["X-custom-header"] = "custom" self.assertFalse(s.get(url, headers=headers).from_cache) self.assertTrue(s.get(url, headers=headers).from_cache) self.assertFalse(s.get(url).from_cache) self.assertTrue(s.get(url).from_cache) def test_str_and_repr(self): s = repr(CachedSession(CACHE_NAME, CACHE_BACKEND, expire_after=10)) self.assertIn(CACHE_NAME, s) self.assertIn("10", s) @mock.patch("requests_cache.core.datetime") def test_return_old_data_on_error(self, datetime_mock): datetime_mock.utcnow.return_value = datetime.utcnow() expire_after = 100 url = httpbin("get") s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=True, expire_after=expire_after) header = "X-Tst" def get(n): return s.get(url, headers={header: n}).json()["headers"][header] get("expired") self.assertEquals(get("2"), "expired") datetime_mock.utcnow.return_value = datetime.utcnow() + timedelta(seconds=expire_after * 2) with mock.patch.object(s.cache, "save_response", side_effect=Exception): self.assertEquals(get("3"), "expired") with mock.patch("requests_cache.core.OriginalSession.send") as send_mock: resp_mock = requests.Response() request = requests.Request("GET", url) resp_mock.request = request.prepare() resp_mock.status_code = 400 resp_mock._content = '{"other": "content"}' send_mock.return_value = resp_mock self.assertEquals(get("3"), "expired") resp_mock.status_code = 200 self.assertIs(s.get(url).content, resp_mock.content) # default behaviour datetime_mock.return_value = datetime.utcnow() + timedelta(seconds=expire_after * 2) s = CachedSession(CACHE_NAME, CACHE_BACKEND, old_data_on_error=False, expire_after=100) with mock.patch.object(s.cache, "save_response", side_effect=Exception): with self.assertRaises(Exception): s.get(url) def test_ignore_parameters_get(self): url = httpbin("get") ignored_param = "ignored" usual_param = "some" params = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, ignored_parameters=[ignored_param]) r = s.get(url, params=params) self.assertIn(ignored_param, r.json()['args'].keys()) self.assertFalse(r.from_cache) self.assertTrue(s.get(url, params=params).from_cache) params[ignored_param] = "new" self.assertTrue(s.get(url, params=params).from_cache) params[usual_param] = "new" self.assertFalse(s.get(url, params=params).from_cache) def test_ignore_parameters_post(self): url = httpbin("post") ignored_param = "ignored" usual_param = "some" d = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) r = s.post(url, data=d) self.assertIn(ignored_param, r.json()['form'].keys()) self.assertFalse(r.from_cache) self.assertTrue(s.post(url, data=d).from_cache) d[ignored_param] = "new" self.assertTrue(s.post(url, data=d).from_cache) d[usual_param] = "new" self.assertFalse(s.post(url, data=d).from_cache) def test_ignore_parameters_post_json(self): url = httpbin("post") ignored_param = "ignored" usual_param = "some" d = {ignored_param: "1", usual_param: "1"} s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) r = s.post(url, json=d) self.assertIn(ignored_param, json.loads(r.json()['data']).keys()) self.assertFalse(r.from_cache) self.assertTrue(s.post(url, json=d).from_cache) d[ignored_param] = "new" self.assertTrue(s.post(url, json=d).from_cache) d[usual_param] = "new" self.assertFalse(s.post(url, json=d).from_cache) def test_ignore_parameters_post_raw(self): url = httpbin("post") ignored_param = "ignored" raw_data = "raw test data" s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('POST'), ignored_parameters=[ignored_param]) self.assertFalse(s.post(url, data=raw_data).from_cache) self.assertTrue(s.post(url, data=raw_data).from_cache) raw_data = "new raw data" self.assertFalse(s.post(url, data=raw_data).from_cache) @mock.patch("requests_cache.backends.base.datetime") @mock.patch("requests_cache.core.datetime") def test_remove_expired_entries(self, datetime_mock, datetime_mock2): expire_after = timedelta(minutes=10) start_time = datetime.utcnow().replace(year=2010, minute=0) datetime_mock.utcnow.return_value = start_time datetime_mock2.utcnow.return_value = start_time s = CachedSession(CACHE_NAME, CACHE_BACKEND, expire_after=expire_after) s.get(httpbin('get')) s.get(httpbin('relative-redirect/3')) datetime_mock.utcnow.return_value = start_time + expire_after * 2 datetime_mock2.utcnow.return_value = datetime_mock.utcnow.return_value ok_url = 'get?x=1' s.get(httpbin(ok_url)) self.assertEqual(len(s.cache.responses), 3) self.assertEqual(len(s.cache.keys_map), 3) s.remove_expired_responses() self.assertEqual(len(s.cache.responses), 1) self.assertEqual(len(s.cache.keys_map), 0) self.assertIn(ok_url, list(s.cache.responses.values())[0][0].url) if __name__ == '__main__': unittest.main() requests-cache-0.4.13/tests/__init__.py0000664000175000017500000000007613027167133022406 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- #date: 08.04.12 requests-cache-0.4.13/tests/test_redisdict.py0000664000175000017500000000111013027167133023646 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Path hack import os, sys sys.path.insert(0, os.path.abspath('..')) try: import unittest2 as unittest except ImportError: import unittest from tests.test_custom_dict import BaseCustomDictTestCase try: from requests_cache.backends.storage.redisdict import RedisDict except ImportError: print("Redis not installed") else: class RedisDictTestCase(BaseCustomDictTestCase, unittest.TestCase): dict_class = RedisDict pickled_dict_class = RedisDict if __name__ == '__main__': unittest.main() requests-cache-0.4.13/tests/test_monkey_patch.py0000664000175000017500000000563213027167133024372 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Path hack import os, sys sys.path.insert(0, os.path.abspath('..')) try: import unittest2 as unittest except ImportError: import unittest import requests from requests.sessions import Session as OriginalSession import requests_cache from requests_cache import CachedSession from requests_cache.backends import BaseCache CACHE_NAME = 'requests_cache_test' CACHE_BACKEND = 'sqlite' FAST_SAVE = False class MonkeyPatchTestCase(unittest.TestCase): def setUp(self): requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND) requests.Session().cache.clear() requests_cache.uninstall_cache() def test_install_uninstall(self): for _ in range(2): requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND) self.assertTrue(isinstance(requests.Session(), CachedSession)) self.assertTrue(isinstance(requests.sessions.Session(), CachedSession)) self.assertTrue(isinstance(requests.session(), CachedSession)) requests_cache.uninstall_cache() self.assertFalse(isinstance(requests.Session(), CachedSession)) self.assertFalse(isinstance(requests.sessions.Session(), CachedSession)) self.assertFalse(isinstance(requests.session(), CachedSession)) def test_requests_from_cache(self): requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND) r = requests.get('http://httpbin.org/get') self.assertFalse(r.from_cache) r = requests.get('http://httpbin.org/get') self.assertTrue(r.from_cache) def test_session_is_a_class_with_original_attributes(self): requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND) self.assertTrue(isinstance(requests.Session, type)) for attribute in dir(OriginalSession): self.assertTrue(hasattr(requests.Session, attribute)) self.assertTrue(isinstance(requests.Session(), CachedSession)) def test_inheritance_after_monkey_patch(self): requests_cache.install_cache(name=CACHE_NAME, backend=CACHE_BACKEND) class FooSession(requests.Session): __attrs__ = requests.Session.__attrs__ + ["new_one"] def __init__(self, param): self.param = param super(FooSession, self).__init__() s = FooSession(1) self.assertEquals(s.param, 1) self.assertIn("new_one", s.__attrs__) self.assertTrue(isinstance(s, CachedSession)) def test_passing_backend_instance_support(self): class MyCache(BaseCache): pass backend = MyCache() requests_cache.install_cache(name=CACHE_NAME, backend=backend) self.assertIs(requests.Session().cache, backend) session = CachedSession(backend=backend) self.assertIs(session.cache, backend) if __name__ == '__main__': unittest.main() requests-cache-0.4.13/tests/test_dbdict.py0000664000175000017500000000522113027167133023134 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Path hack import os, sys sys.path.insert(0, os.path.abspath('..')) try: import unittest2 as unittest except ImportError: import unittest from threading import Thread from tests.test_custom_dict import BaseCustomDictTestCase from requests_cache.backends.storage.dbdict import DbDict, DbPickleDict class DbdictTestCase(BaseCustomDictTestCase, unittest.TestCase): def test_bulk_commit(self): d = DbDict(self.NAMESPACE, self.TABLES[0]) d.clear() n = 1000 with d.bulk_commit(): for i in range(n): d[i] = i self.assertEqual(list(d.keys()), list(range(n))) def test_switch_commit(self): d = DbDict(self.NAMESPACE) d.clear() d[1] = 1 d = DbDict(self.NAMESPACE) self.assertIn(1, d) d.can_commit = False d[2] = 2 d = DbDict(self.NAMESPACE) self.assertNotIn(2, d) self.assert_(d.can_commit) def test_fast_save(self): d1 = DbDict(self.NAMESPACE, fast_save=True) d2 = DbDict(self.NAMESPACE, self.TABLES[1], fast_save=True) d1.clear() n = 1000 for i in range(n): d1[i] = i d2[i * 2] = i # HACK if we will not sort, fast save can produce different order of records self.assertEqual(sorted(d1.keys()), list(range(n))) self.assertEqual(sorted(d2.values()), list(range(n))) def test_usage_with_threads(self): def do_test_for(d, n_threads=5): d.clear() fails = [] def do_inserts(values): try: for v in values: d[v] = v except Exception: fails.append(1) raise def values(x, n): return [i * x for i in range(n)] threads = [Thread(target=do_inserts, args=(values(i, n_threads),)) for i in range(n_threads)] for t in threads: t.start() for t in threads: t.join() self.assert_(not fails) for i in range(n_threads): for x in values(i, n_threads): self.assertEqual(d[x], x) do_test_for(DbDict(self.NAMESPACE, fast_save=True), 20) do_test_for(DbPickleDict(self.NAMESPACE, fast_save=True), 10) d1 = DbDict(self.NAMESPACE, fast_save=True) d2 = DbDict(self.NAMESPACE, self.TABLES[1], fast_save=True) do_test_for(d1) do_test_for(d2) do_test_for(DbDict(self.NAMESPACE)) if __name__ == '__main__': unittest.main() requests-cache-0.4.13/tests/test_custom_dict.py0000664000175000017500000000554013027167133024224 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Path hack import os, sys sys.path.insert(0, os.path.abspath('..')) from requests_cache.backends.storage.dbdict import DbDict, DbPickleDict class BaseCustomDictTestCase(object): dict_class = DbDict pickled_dict_class = DbPickleDict NAMESPACE = 'requests-cache-temporary-db-test-will-be-deleted' TABLES = ['table%s' % i for i in range(5)] def tearDown(self): if self.dict_class is DbDict: try: os.unlink(self.NAMESPACE) except: pass return for table in self.TABLES: d = self.dict_class(self.NAMESPACE, table) d.clear() super(BaseCustomDictTestCase, self).tearDown() def test_set_get(self): d1 = self.dict_class(self.NAMESPACE, self.TABLES[0]) d2 = self.dict_class(self.NAMESPACE, self.TABLES[1]) d3 = self.dict_class(self.NAMESPACE, self.TABLES[2]) d1[1] = 1 d2[2] = 2 d3[3] = 3 self.assertEqual(list(d1.keys()), [1]) self.assertEqual(list(d2.keys()), [2]) self.assertEqual(list(d3.keys()), [3]) with self.assertRaises(KeyError): a = d1[4] def test_str(self): d = self.dict_class(self.NAMESPACE) d.clear() d[1] = 1 d[2] = 2 self.assertEqual(str(d), '{1: 1, 2: 2}') def test_del(self): d = self.dict_class(self.NAMESPACE) d.clear() for i in range(5): d[i] = i del d[0] del d[1] del d[2] self.assertEqual(list(d.keys()), list(range(3, 5))) self.assertEqual(list(d.values()), list(range(3, 5))) with self.assertRaises(KeyError): del d[0] def test_picklable_dict(self): d = self.pickled_dict_class(self.NAMESPACE) d[1] = ForPickle() d = self.pickled_dict_class(self.NAMESPACE) self.assertEqual(d[1].a, 1) self.assertEqual(d[1].b, 2) def test_clear_and_work_again(self): d = self.dict_class(self.NAMESPACE) for _ in range(3): d.clear() d.clear() self.assertEqual(len(d), 0) n = 5 for i in range(n): d[i] = i * 2 self.assertEqual(len(d), n) self.assertEqual(d[2], 4) d.clear() self.assertEqual(len(d), 0) def test_same_settings(self): d1 = self.dict_class(self.NAMESPACE) d2 = self.dict_class(self.NAMESPACE, connection=d1.connection) d1.clear() d2.clear() d1[1] = 1 d2[2] = 2 self.assertEqual(d1, d2) def test_len(self): n = 5 d = self.dict_class(self.NAMESPACE) d.clear() for i in range(n): d[i] = i self.assertEqual(len(d), 5) class ForPickle(object): a = 1 b = 2 requests-cache-0.4.13/tests/test_mongodict.py0000664000175000017500000000114113027167133023663 0ustar rharitonovrharitonov00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Path hack import os, sys sys.path.insert(0, os.path.abspath('..')) try: import unittest2 as unittest except ImportError: import unittest from tests.test_custom_dict import BaseCustomDictTestCase try: from requests_cache.backends.storage.mongodict import MongoDict, MongoPickleDict except ImportError: print("pymongo not installed") else: class MongoDictTestCase(BaseCustomDictTestCase, unittest.TestCase): dict_class = MongoDict pickled_dict_class = MongoPickleDict if __name__ == '__main__': unittest.main()