././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/0000775000175000017500000000000000000000000013562 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/.coveragerc0000664000175000017500000000014100000000000015677 0ustar00zuulzuul00000000000000[run] branch = True source = oslo_cache omit = oslo_cache/tests/* [report] ignore_errors = True ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/.mailmap0000664000175000017500000000013100000000000015176 0ustar00zuulzuul00000000000000# Format is: # # ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/.pre-commit-config.yaml0000664000175000017500000000171600000000000020050 0ustar00zuulzuul00000000000000repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: - id: trailing-whitespace # Replaces or checks mixed line ending - id: mixed-line-ending args: ['--fix', 'lf'] exclude: '.*\.(svg)$' # Forbid files which have a UTF-8 byte-order marker - id: check-byte-order-marker # Checks that non-binary executables have a proper shebang - id: check-executables-have-shebangs # Check for files that contain merge conflict strings. - id: check-merge-conflict # Check for debugger imports and py37+ breakpoint() # calls in python source - id: debug-statements - id: check-yaml files: .*\.(yaml|yml)$ - repo: https://opendev.org/openstack/hacking rev: 6.1.0 hooks: - id: hacking additional_dependencies: [] - repo: https://github.com/PyCQA/bandit rev: 1.7.6 hooks: - id: bandit args: ['-x', 'tests'] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/.stestr.conf0000664000175000017500000000010100000000000016023 0ustar00zuulzuul00000000000000[DEFAULT] test_path=${STESTR_TEST_PATH:-./oslo_cache/tests/unit} ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/.zuul.yaml0000664000175000017500000000452100000000000015525 0ustar00zuulzuul00000000000000## Functional Tests env vars # # PIFPAF_DAEMON: # The binary to be installed by bindep filtered with # [tests-functional-{PIFPAF_DAEMON}] and executed by pifpaf. # # OSLO_BACKEND: # The functional test directory to pass to STESTR_TEST_PATH - job: name: oslo.cache-functional parent: openstack-tox abstract: true pre-run: playbooks/tests/functional/pre.yml vars: tox_envlist: functional - job: name: oslo.cache-functional-etcd3gw parent: oslo.cache-functional vars: tox_environment: PIFPAF_DAEMON: etcd OSLO_BACKEND: etcd3gw - job: name: oslo.cache-functional-memcached parent: oslo.cache-functional vars: tox_environment: PIFPAF_DAEMON: memcached - job: name: oslo.cache-functional-dogpile.cache.bmemcached parent: oslo.cache-functional-memcached vars: tox_environment: OSLO_BACKEND: dogpile_cache_bmemcached - job: name: oslo.cache-functional-dogpile.cache.pymemcache parent: oslo.cache-functional-memcached vars: tox_environment: OSLO_BACKEND: dogpile_cache_pymemcache - job: name: oslo.cache-functional-memcache_pool parent: oslo.cache-functional-memcached vars: tox_environment: OSLO_BACKEND: memcache_pool - job: name: oslo.cache-functional-redis parent: oslo.cache-functional vars: tox_environment: PIFPAF_DAEMON: redis - job: name: oslo.cache-functional-dogpile.cache.redis parent: oslo.cache-functional-redis vars: tox_environment: OSLO_BACKEND: dogpile_cache_redis - job: name: oslo.cache-functional-dogpile.cache.redis_sentinel parent: oslo.cache-functional-redis vars: tox_environment: OSLO_BACKEND: dogpile_cache_redis_sentinel PIFPAF_OPTS: --sentinel - project: templates: - check-requirements - lib-forward-testing-python3 - openstack-python3-jobs - periodic-stable-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: jobs: - oslo.cache-functional-etcd3gw - oslo.cache-functional-dogpile.cache.bmemcached - oslo.cache-functional-dogpile.cache.pymemcache - oslo.cache-functional-memcache_pool - oslo.cache-functional-dogpile.cache.redis - oslo.cache-functional-dogpile.cache.redis_sentinel ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/AUTHORS0000664000175000017500000000671500000000000014643 0ustar00zuulzuul00000000000000Adam Alex Gaynor Alexander Makarov Andreas Jaeger Andreas Jaeger Andrii Ostapenko Anh Tran Arun Kant Ben Nemec Boris Bobrov Boris Bobrov Brant Knudson ChangBo Guo(gcb) Corey Bryant Damian Dabrowski Daniel Bengtsson Davanum Srinivas Dave Chen David Stanek Dolph Mathews Dong Ma Doug Hellmann Dougal Matthews Eric Brown Eric Guo Eric Larson Flavio Percoco George Silvis, III Ghanshyam Ghanshyam Mann Hervé Beraud Ilya Pekelny James E. Blair Jamie Lennox Jamie Lennox Jeremy Stanley Jonathan Rosser Jordan Pittier Joshua Harlow Joshua Harlow Joshua Harlow Juan Antonio Osorio Kenneth Giusti Khaled Hussein Kirill Bespalov Lance Bragstad Lars Butler Luong Anh Tuan Matt Riedemann Matthew Thode Michal Arbet Mitya_Eremeev Moisés Guimarães de Medeiros Monty Taylor Morgan Fainberg Morgan Fainberg Nguyen Hai OpenStack Release Bot Pavel Kholkin Pierre Riteau Radosław Piliszek Rodolfo Alonso Hernandez Sai Krishna Sean McGinnis Sean McGinnis Sergey Nikitin Stephen Finucane Steve Martinelli Swapnil Kulkarni (coolsvap) Takashi Kajinami Takashi Kajinami Thomas Goirand Tony Breeds Tovin Seven Vieri <15050873171@163.com> Vincent Untz Vu Cong Tuan Yuriy Taraday ZhongShengping avnish caoyuan dengzhaosen jacky06 kairat_kushaev lingyongxu loooosy melissaml ricolin root songwenping termie tomas wangqi wangxiyuan yangyawei ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/CONTRIBUTING.rst0000664000175000017500000000134500000000000016226 0ustar00zuulzuul00000000000000If you would like to contribute to the development of oslo's libraries, first you must take a look to this page: https://specs.openstack.org/openstack/oslo-specs/specs/policy/contributing.html If you would like to contribute to the development of OpenStack, you must follow the steps in this page: https://docs.openstack.org/infra/manual/developers.html Once those steps have been completed, changes to OpenStack should be submitted for review via the Gerrit tool, following the workflow documented at: https://docs.openstack.org/infra/manual/developers.html#development-workflow Pull requests submitted through GitHub will be ignored. Bugs should be filed on Launchpad, not GitHub: https://bugs.launchpad.net/oslo.cache ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/ChangeLog0000664000175000017500000003736200000000000015347 0ustar00zuulzuul00000000000000CHANGES ======= 3.7.0 ----- * Fix incomplete exception message * Prevent potential ReDoS attack * config: Document backends supporting specific options * Ensure requirements are capped by upper constraints * Display coverage report * reno: Update master for unmaintained/yoga * redis: Add functional tests * Add support for Redis Sentinel backend * Add native options for redis backend * memcache: Remove "default" username and password * Automate TLS certificates settings for redis backend * Fail if tls\_enabled is True but backend does not support it * Fix minor typos in parameter help texts * pre-commit: Integrate bandit * pre-commit: Bump versions * Bump hacking (again) * Add a new option to enforce the OpenSSL FIPS mode * Bump hacking * Update python classifier in setup.cfg 3.6.0 ----- * Fix wrong path in coveragerc * Clean-up memcache connection sockets * Update master for stable/2023.2 3.5.0 ----- * Imported Translations from Zanata * Bump bandit * Imported Translations from Zanata 3.4.0 ----- * Revert "Moves supported python runtimes from version 3.8 to 3.10" * Moves supported python runtimes from version 3.8 to 3.10 * [cache] memcache\_password should be secret * Do not mark hosts as alive when they are all dead * Update master for stable/2023.1 * Allow bmemcached to be optional for memcache\_pool * Imported Translations from Zanata 3.3.1 ----- * Fix issues related to tox4 * Cleanup py27 support 3.3.0 ----- * Imported Translations from Zanata * Add Python3 antelope unit tests * Update master for stable/zed * Add bmemcached as optional dependency 3.1.0 ----- 3.0.0 ----- * Imported Translations from Zanata * Drop python3.6/3.7 support in testing runtime * Support SASL protocol for memcached 2.11.0 ------ * Remove unnecessary unicode prefixes * Fix description of memcache\_servers * Add Python3 zed unit tests * Update master for stable/yoga 2.10.1 ------ * Use the right backend for TLS tests * Add Python 3 only classifier 2.10.0 ------ * Expose pymemcache's HashClient public params * Add retry mechanisms to oslo.cache * Add socket keepalive options to oslo.cache 2.9.0 ----- * Update python testing classifier * Drop pymongo useless and removed attribute * Add Python3 yoga unit tests * Update master for stable/xena * fix typo 2.8.1 ----- * Don't import module "etcd3gw" if etcd3gw backend is not used 2.8.0 ----- * setup.cfg: Replace dashes with underscores * Move flake8 as a pre-commit local target * Add Python3 xena unit tests * Update master for stable/wallaby * Adding technical details about the memcache pool backend * Add bug comment and fix nits * Dropping lower constraints testing 2.7.0 ----- * Drop use of deprecated collections classes * Add dogpile.cache.pymemcache backend * Use py3 as the default runtime for tox * Do not hardcode flush\_on\_reconnect, move to oslo.cache config * Dropping lower constraints testing * Imported Translations from Zanata * Add Python3 wallaby unit tests * Update master for stable/victoria 2.6.1 ----- * Adding functional tests for memcache\_pool backend * Add Memcached functional tests to gate 2.6.0 ----- * Add python-binary-memcached to support SASL * Drop hard choice on py38 for functional tests * Bump dogpile.cache's version for Memcached TLS support * Add pre-commit to pep8 gate * Apply fixes for pre-commit compliance in all code * Uses commit hash instead of version tags in pre-commit * Removes pre-commit hooks from Lucas-C * Bump bandit version * Introducing usage of pre-commit 2.5.0 ----- * Update lower-constraints versions * Add TLS context creation from config * Add TLS options to oslo.cache * Fix pygments style 2.4.1 ----- * Stop to use the \_\_future\_\_ module 2.4.0 ----- * Refactoring functional tests based on standard usages * Switch to newer openstackdocstheme and reno versions * Remove the unused coding style modules * Fix pep8 failure * Adding functional tests * Small cleanups in tox.ini * Remove unneeded babel.cfg etc * Remove six * Switch to Victoria tests * Align contributing doc with oslo's policy * [doc] tell to prefix ipv6 hosts with inet6 * Add release notes links to doc index * Imported Translations from Zanata * Update master for stable/ussuri 2.3.0 ----- * Use unittest.mock instead of third party mock * Update hacking for Python3 2.2.0 ----- * Add memcache\_pool backend job * Revert "Switch from python-memcached to pymemcache." * Remove dogpile kwarg test * Fix python2 syntaxe in doc * Fix pymemcache release note python package name 2.1.0 ----- * Switch from python-memcached to pymemcache * trivial: Cleanup tox.ini * remove outdated header 2.0.0 ----- * [ussuri][goal] Drop python 2.7 support and testing * Ignore reno artifacts * tox: Trivial cleanup 1.38.1 ------ * Imported Translations from Zanata 1.38.0 ------ * tox: Keeping going with docs * Update master for stable/train * Update memcached configuration help text 1.37.0 ------ * Lower the memcache\_socket\_timeout to one second * Add Python 3 Train unit tests 1.36.0 ------ * Pass server\_max\_value\_length to memcached client 1.35.0 ------ * Cap Bandit below 1.6.0 and update Sphinx requirement * Replace git.openstack.org URLs with opendev.org URLs * tox: Use the new hotness for constraints * Avoid tox\_install.sh for constraints support * Fix lower-constraints 1.34.0 ------ * OpenDev Migration Patch * Dropping the py35 testing * Pass \`flush\_on\_reconnect\` to memcache pooled backend * Update master for stable/stein * Update hacking version * add python 3.7 unit test job 1.33.2 ------ * Fix memcache pool client in monkey-patched environments 1.33.1 ------ * fix dogpile.cache issue due to the new usage of decorate in the lib * Remove stale comment in \_memcache\_pool 1.33.0 ------ * uncap dogpile 1.32.0 ------ * Update the requirements to match global-requirements for dogpile.cache * Use template for lower-constraints 1.31.2 ------ * Update mailinglist from dev to discuss * Only split url arg with memcache backend 1.31.1 ------ * Config dict arguments.url must be a list * Clean up .gitignore references to personal tools * Don't quote {posargs} in tox.ini * Always build universal wheels 1.31.0 ------ * Imported Translations from Zanata * Drop expired connections before retrieving from the queue * add lib-forward-testing-python3 test job * add python 3.6 unit test job * import zuul job settings from project-config * Update reno for stable/rocky * tox: Fix typo * Switch to stestr * Add release notes link to README * fix tox python3 overrides 1.30.1 ------ * Imported Translations from Zanata * Don't let tox\_install.sh error if there is nothing to do * Remove stale pip-missing-reqs tox test * Trivial: Update pypi url to new url * Avoid use of autodoc\_tree\_index\_modules * Switch pep8 job to python 3 * Fix incompatible requirement in lower constraints 1.30.0 ------ * Change memcache\_socket\_timeout to a float * add lower-constraints job * Clean old output before new doc builds * Updated from global requirements 1.29.0 ------ * Update links in README * Imported Translations from Zanata * Imported Translations from Zanata * Update reno for stable/queens * Updated from global requirements * Updated from global requirements 1.28.0 ------ * Follow the new PTI for document build * add bandit to pep8 job * Updated from global requirements 1.27.0 ------ * Remove setting of version/release from releasenotes * Updated from global requirements * Add more cache backend options * Updated from global requirements * Imported Translations from Zanata * Imported Translations from Zanata * Updated from global requirements * Updated from global requirements 1.26.0 ------ * Updated from global requirements * Update the documentation link for doc migration * Imported Translations from Zanata * Update reno for stable/pike * Updated from global requirements 1.25.0 ------ * Imported Translations from Zanata * update the doc URLs 1.24.0 ------ * turn on warning-is-error in doc build * switch from oslosphinx to openstackdocstheme * rearrange content to fit the new standard layout * Change locations of docs for intersphinx * Improve backend configuration help text * Add kwarg function key generator * Fix error in cache.configure\_cache\_region usage doc * Updated from global requirements 1.23.0 ------ * Updated from global requirements * Updated from global requirements * etcd3 : get rid of ast\_literal 1.22.0 ------ * etcd3 driver for oslo.cache * Updated from global requirements * Updated from global requirements 1.21.0 ------ * Remove unused/duplicated logging import * Optimize the link address * Updated from global requirements * Check reStructuredText documents for common style issues 1.20.0 ------ * Remove log translations 1.19.0 ------ * Updated from global requirements * Replaces uuid.uuid4 with uuidutils.generate\_uuid() 1.18.0 ------ * Updated from global requirements * [Fix gate]Update test requirement * Updated from global requirements * pbr.version.VersionInfo needs package name (oslo.xyz and not oslo\_xyz) * Update reno for stable/ocata * Switch to oslo\_log 1.17.0 ------ * Remove references to Python 3.4 * Add Constraints support * Show team and repo badges on README 1.16.0 ------ * Updated from global requirements * Updated from global requirements * Imported Translations from Zanata * TrivialFix: Remove default=None when set value in Config * Changed the home-page link 1.15.0 ------ * Enable release notes translation * Updated from global requirements * Updated from global requirements * Update reno for stable/newton * Updated from global requirements 1.14.0 ------ * Add usage example to documentation * Fix docstring for get\_memoization\_decorator * Correct help text for backend option 1.13.0 ------ * Updated from global requirements 1.12.0 ------ * Updated from global requirements * Fix OpenStack capitalization * Add Python 3.5 classifier and venv * Imported Translations from Zanata * Updated from global requirements 1.11.0 ------ * Updated from global requirements 1.10.0 ------ * Handle empty memcache pool corner case * Updated from global requirements * Imported Translations from Zanata * Fixup intersphinx docs * Use entrypoints for backends 1.9.0 ----- * Updated from global requirements * Add reno for releasenotes management 1.8.0 ----- * Trivial: ignore openstack/common in flake8 exclude list 1.7.0 ----- * Imported Translations from Zanata * Remove direct dependency on babel * Imported Translations from Zanata * Updated from global requirements * If caching is globally disabled force dogpile to use the null backend * Updated from global requirements * Updated from global requirements 1.5.0 ----- * Updated from global requirements * Updated from global requirements 1.4.0 ----- * Updated from global requirements 1.3.0 ----- * Move some of these noisy logs to trace level * Revert "Use assertTrue/False instead of assertEqual(T/F)" * Update translation setup * Updated from global requirements * Updated from global requirements * Updated from global requirements * Updated from global requirements * Delete python bytecode before every test run 1.2.0 ----- * Use assertTrue/False instead of assertEqual(T/F) * Updated from global requirements * Replace deprecated LOG.warn with LOG.warning * Updated from global requirements 1.1.0 ----- * Updated from global requirements * Updated from global requirements * Trival: Remove 'MANIFEST.in' 1.0.0 ----- * Updated from global requirements * Remove Python 2.6 classifier * Remove python 2.6 and cleanup tox.ini * Updated from global requirements * Updated from global requirements * Updated from global requirements 0.9.0 ----- * Updated from global requirements * Export functions from \_\_init\_\_ 0.8.0 ----- * Make CacheIsolatingProxy public * Add shields.io version/downloads links/badges into README.rst * add auto-generated docs for config options * Change ignore-errors to ignore\_errors * Updated from global requirements 0.7.0 ----- * Updated from global requirements * Implement get/set/delete multiple keys for dictionary backend 0.6.0 ----- * Move optional backend dependencies into setup.cfg [extras] * Ignore the .eggs/ directory * Remove remnants of noop backend registration * Updated from global requirements * Updated from global requirements * Setup translations * flake8 - remove unused rules and fix imports * Updated from global requirements 0.5.0 ----- * Updated from global requirements * Updated from global requirements * Remove NoOp cache from oslo.cache 0.4.0 ----- * Added NO\_VALUE to core file * Updated from global requirements * Updated from global requirements * Updated from global requirements * Fix some reminders of 'keystone' in oslo.cache * Updated from global requirements 0.3.0 ----- * Updated from global requirements * Fixes up the API docs and module index * Region handling * Don't register backends on import * Changed some comments and docs in DictCacheBackend * Implemented DictCacheBackend * Fixed typos 0.2.0 ----- * Usage docs * Document the backends * Remove unused \_on\_arguments * Fix up option help * Register backends in a cycle * Use "group" rather than "section" * swap the order of conf for a function * Don't use global conf * Make DebugProxy private * make REGION public * Fix docstring for configure\_cache\_region * Remove useless docs from private functions * Fix docs for get\_memoization\_decorator * Docs for configure() 0.1.2 ----- * Make symbols private * Module docs * Cleanup exceptions * handle utf8 encoding properly * Expose list\_opts entry points for oslo-config-generator * Remove unnecessary file * Add tests for \_memcache\_pool 0.1.1 ----- * remove non-cache related options * Add oslo.i18n to requirements * Add tox target to find missing requirements * Update i18n domain to correct project name 0.1.0 ----- * Enable all commented out tests * Updated from global requirements * Move files into the right spot and get tests working * fix branch name in .gitreview * Updated from global requirements * Updated from global requirements * Remove unnecessary openstack-common.conf * Remove empty script reference * Fix name of oslotest base test case * Get project basics configured * Fixes use of dict methods for Python3 * Handles Python3 builtin changes * Make memcache client reusable across threads * Make memcache client reusable across threads * Work with pymongo 3.0 * Initial commit from oslo cookiecutter template * Work with pymongo 3.0 * Prefer . to setattr()/getattr() * Address nits for default cache time more explicit * Fix the wrong order of parameters when using assertEqual * Fixes minor whitespace issues * Make the default cache time more explicit in code * Use dict comprehensions instead of dict constructor * Consistently use oslo\_config.cfg.CONF * Fix invalid super() usage in memcache pool * Use oslo.log instead of incubator * Move existing tests to unit * Change oslo.utils to oslo\_utils * Memcache connection pool excess check * Adds missing log hints for level E/I/W * Remove nonexistant param from docstring * Use importutils from oslo.utils * Address some late comments for memcache clients * Add a pool of memcached clients * Use functions in oslo.utils * Introduce pragma no cover to asbtract classes * Use oslo.i18n * Do not support toggling key\_manglers in cache layer * Adds hacking check for debug logging translations * remove a few backslash line continuations * Fixed some typos throughout the codebase * Fix cache configuration checks * Safer noqa handling * Uses explicit imports for \_ * Support for mongo as dogpile cache backend * Remove copyright from empty files * Remove vim header * Documentation cleanup * Style improvements to logging format strings * Handle unicode at the caching layer more elegantly * Implement basic caching around assignment CRUD * Implement caching for Tokens and Token Validation * Add Memory Isolating Cache Proxy * Implement Caching for Token Revocation List * Keystone Caching Layer for Manager Calls * initial * Initial * first commit ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/HACKING.rst0000664000175000017500000000017000000000000015356 0ustar00zuulzuul00000000000000Style Commandments ================== Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/LICENSE0000664000175000017500000002363700000000000014602 0ustar00zuulzuul00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/PKG-INFO0000664000175000017500000000460200000000000014661 0ustar00zuulzuul00000000000000Metadata-Version: 2.1 Name: oslo.cache Version: 3.7.0 Summary: Cache storage for OpenStack projects. Home-page: https://docs.openstack.org/oslo.cache/latest Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: ======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/oslo.cache.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on ========== oslo.cache ========== .. image:: https://img.shields.io/pypi/v/oslo.cache.svg :target: https://pypi.org/project/oslo.cache/ :alt: Latest Version .. image:: https://img.shields.io/pypi/dm/oslo.cache.svg :target: https://pypi.org/project/oslo.cache/ :alt: Downloads `oslo.cache` aims to provide a generic caching mechanism for OpenStack projects by wrapping the `dogpile.cache `_ library. The dogpile.cache library provides support memoization, key value storage and interfaces to common caching backends such as `Memcached `_. * Free software: Apache license * Documentation: https://docs.openstack.org/oslo.cache/latest/ * Source: https://opendev.org/openstack/oslo.cache/ * Bugs: https://bugs.launchpad.net/oslo.cache * Release notes: https://docs.openstack.org/releasenotes/oslo.cache/ Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Requires-Python: >=3.8 Provides-Extra: dogpile Provides-Extra: etcd3gw Provides-Extra: mongo Provides-Extra: test ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/README.rst0000664000175000017500000000220400000000000015247 0ustar00zuulzuul00000000000000======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/oslo.cache.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on ========== oslo.cache ========== .. image:: https://img.shields.io/pypi/v/oslo.cache.svg :target: https://pypi.org/project/oslo.cache/ :alt: Latest Version .. image:: https://img.shields.io/pypi/dm/oslo.cache.svg :target: https://pypi.org/project/oslo.cache/ :alt: Downloads `oslo.cache` aims to provide a generic caching mechanism for OpenStack projects by wrapping the `dogpile.cache `_ library. The dogpile.cache library provides support memoization, key value storage and interfaces to common caching backends such as `Memcached `_. * Free software: Apache license * Documentation: https://docs.openstack.org/oslo.cache/latest/ * Source: https://opendev.org/openstack/oslo.cache/ * Bugs: https://bugs.launchpad.net/oslo.cache * Release notes: https://docs.openstack.org/releasenotes/oslo.cache/ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/bindep.txt0000664000175000017500000000052600000000000015567 0ustar00zuulzuul00000000000000# This file contains runtime (non-python) dependencies # More info at: https://docs.openstack.org/infra/bindep/readme.html etcd [tests-functional-etcd] memcached [tests-functional-memcached] redis [platform:rpm tests-functional-redis] redis-server [platform:dpkg tests-functional-redis] redis-sentinel [platform:dpkg tests-functional-redis] ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/doc/0000775000175000017500000000000000000000000014327 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/requirements.txt0000664000175000017500000000107300000000000017614 0ustar00zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. # For generating sphinx documentation openstackdocstheme>=2.2.0 # Apache-2.0 sphinx>=2.0.0,!=2.1.0 # BSD reno>=3.1.0 # Apache-2.0 sphinxcontrib-apidoc>=0.2.0 # BSD # For autodoc builds mock>=2.0.0 # BSD oslotest>=3.2.0 # Apache-2.0 pymemcache>=3.5.0 # Apache-2.0 python-binary-memcached>=0.29.0 # MIT python-memcached>=1.56 # PSF etcd3gw>=0.2.0 # Apache-2.0 ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/doc/source/0000775000175000017500000000000000000000000015627 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/conf.py0000664000175000017500000000541200000000000017130 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinxcontrib.apidoc', 'sphinx.ext.intersphinx', 'openstackdocstheme', 'oslo_config.sphinxext', ] # openstackdocstheme options openstackdocs_repo_name = 'openstack/oslo.cache' openstackdocs_bug_project = 'oslo.cache' openstackdocs_bug_tag = '' # sphinxcontrib.apidoc options apidoc_module_dir = '../../oslo_cache' apidoc_output_dir = 'reference/api' apidoc_excluded_paths = [ 'tests' ] # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'oslo.cache' copyright = '2014, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # A list of ignored prefixes for module index sorting. modindex_common_prefix = ['oslo_cache.'] # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] html_theme = 'openstackdocs' # html_static_path = ['static'] # Output file base name for HTML help builder. htmlhelp_basename = '%sdoc' % project # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '%s.tex' % project, '%s Documentation' % project, 'OpenStack Foundation', 'manual'), ] intersphinx_mapping = { 'python': ('https://docs.python.org/', None), 'osloconfig': ('http://docs.openstack.org/oslo.config/latest/', None), 'dogpilecache': ('https://dogpilecache.readthedocs.io/en/latest/', None), } ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/doc/source/configuration/0000775000175000017500000000000000000000000020476 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/configuration/index.rst0000664000175000017500000000035700000000000022344 0ustar00zuulzuul00000000000000===================== Configuration Options ===================== oslo.cache uses oslo.config to define and manage configuration options to allow the deployer to control how an application uses this library. .. show-options:: oslo.cache ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/doc/source/contributor/0000775000175000017500000000000000000000000020201 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/contributor/index.rst0000664000175000017500000000011700000000000022041 0ustar00zuulzuul00000000000000============ Contributing ============ .. include:: ../../../CONTRIBUTING.rst ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/index.rst0000664000175000017500000000066600000000000017500 0ustar00zuulzuul00000000000000========== oslo.cache ========== Cache storage for OpenStack projects. Contents ======== .. toctree:: :maxdepth: 2 install/index contributor/index configuration/index user/index reference/index Release Notes ============= Read also the `oslo.cache Release Notes `_. Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/doc/source/install/0000775000175000017500000000000000000000000017275 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/install/index.rst0000664000175000017500000000013400000000000021134 0ustar00zuulzuul00000000000000============ Installation ============ At the command line:: $ pip install oslo.cache ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/doc/source/reference/0000775000175000017500000000000000000000000017565 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/reference/index.rst0000664000175000017500000000014200000000000021423 0ustar00zuulzuul00000000000000.. _using: ========= Reference ========= .. toctree:: :maxdepth: 2 Modules ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/doc/source/user/0000775000175000017500000000000000000000000016605 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/user/history.rst0000664000175000017500000000004000000000000021032 0ustar00zuulzuul00000000000000.. include:: ../../../ChangeLog ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/user/index.rst0000664000175000017500000000020500000000000020443 0ustar00zuulzuul00000000000000================ Using oslo.cache ================ .. toctree:: :maxdepth: 2 usage .. toctree:: :maxdepth: 1 history ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/doc/source/user/usage.rst0000664000175000017500000000265600000000000020454 0ustar00zuulzuul00000000000000===== Usage ===== A simple example of oslo.cache in use:: from oslo_cache import core as cache from oslo_config import cfg CONF = cfg.CONF caching = cfg.BoolOpt('caching', default=True) cache_time = cfg.IntOpt('cache_time', default=3600) CONF.register_opts([caching, cache_time], "feature-name") cache.configure(CONF) example_cache_region = cache.create_region() MEMOIZE = cache.get_memoization_decorator( CONF, example_cache_region, "feature-name") # Load config file here cache.configure_cache_region(CONF, example_cache_region) @MEMOIZE def f(x): print(x) return x An example config file for this is:: [cache] enabled = true backend = dogpile.cache.memory [feature-name] caching = True cache_time = 7200 There is some subtlety in the order of the calls in the example above. The requirements are: ``configure`` must be done first; ``create_region`` must be before both ``get_memoization_decorator`` and ``configure_cache_region`` (because they use the output); the config file must be fully loaded before ``configure_cache_region`` is called; all these calls must complete before a decorated function is actually invoked. In principle, there are several different orders this can happen in. In practice, the decorator will be used at import time, and the config file will be loaded later, so the above order is the only possible one. ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo.cache.egg-info/0000775000175000017500000000000000000000000017272 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/oslo.cache.egg-info/PKG-INFO0000664000175000017500000000460200000000000020371 0ustar00zuulzuul00000000000000Metadata-Version: 2.1 Name: oslo.cache Version: 3.7.0 Summary: Cache storage for OpenStack projects. Home-page: https://docs.openstack.org/oslo.cache/latest Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: ======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/oslo.cache.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on ========== oslo.cache ========== .. image:: https://img.shields.io/pypi/v/oslo.cache.svg :target: https://pypi.org/project/oslo.cache/ :alt: Latest Version .. image:: https://img.shields.io/pypi/dm/oslo.cache.svg :target: https://pypi.org/project/oslo.cache/ :alt: Downloads `oslo.cache` aims to provide a generic caching mechanism for OpenStack projects by wrapping the `dogpile.cache `_ library. The dogpile.cache library provides support memoization, key value storage and interfaces to common caching backends such as `Memcached `_. * Free software: Apache license * Documentation: https://docs.openstack.org/oslo.cache/latest/ * Source: https://opendev.org/openstack/oslo.cache/ * Bugs: https://bugs.launchpad.net/oslo.cache * Release notes: https://docs.openstack.org/releasenotes/oslo.cache/ Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Requires-Python: >=3.8 Provides-Extra: dogpile Provides-Extra: etcd3gw Provides-Extra: mongo Provides-Extra: test ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/oslo.cache.egg-info/SOURCES.txt0000664000175000017500000001165100000000000021162 0ustar00zuulzuul00000000000000.coveragerc .mailmap .pre-commit-config.yaml .stestr.conf .zuul.yaml AUTHORS CONTRIBUTING.rst ChangeLog HACKING.rst LICENSE README.rst bindep.txt requirements.txt setup.cfg setup.py test-requirements.txt tox.ini doc/requirements.txt doc/source/conf.py doc/source/index.rst doc/source/configuration/index.rst doc/source/contributor/index.rst doc/source/install/index.rst doc/source/reference/index.rst doc/source/user/history.rst doc/source/user/index.rst doc/source/user/usage.rst oslo.cache.egg-info/PKG-INFO oslo.cache.egg-info/SOURCES.txt oslo.cache.egg-info/dependency_links.txt oslo.cache.egg-info/entry_points.txt oslo.cache.egg-info/not-zip-safe oslo.cache.egg-info/pbr.json oslo.cache.egg-info/requires.txt oslo.cache.egg-info/top_level.txt oslo_cache/__init__.py oslo_cache/_bmemcache_pool.py oslo_cache/_i18n.py oslo_cache/_memcache_pool.py oslo_cache/_opts.py oslo_cache/core.py oslo_cache/exception.py oslo_cache/testing.py oslo_cache/version.py oslo_cache/backends/__init__.py oslo_cache/backends/dictionary.py oslo_cache/backends/etcd3gw.py oslo_cache/backends/memcache_pool.py oslo_cache/backends/mongo.py oslo_cache/locale/de/LC_MESSAGES/oslo_cache.po oslo_cache/locale/en_GB/LC_MESSAGES/oslo_cache.po oslo_cache/locale/es/LC_MESSAGES/oslo_cache.po oslo_cache/locale/fr/LC_MESSAGES/oslo_cache.po oslo_cache/locale/it/LC_MESSAGES/oslo_cache.po oslo_cache/locale/ko_KR/LC_MESSAGES/oslo_cache.po oslo_cache/locale/pt_BR/LC_MESSAGES/oslo_cache.po oslo_cache/locale/ru/LC_MESSAGES/oslo_cache.po oslo_cache/locale/tr_TR/LC_MESSAGES/oslo_cache.po oslo_cache/locale/zh_CN/LC_MESSAGES/oslo_cache.po oslo_cache/locale/zh_TW/LC_MESSAGES/oslo_cache.po oslo_cache/tests/__init__.py oslo_cache/tests/test_cache.py oslo_cache/tests/functional/__init__.py oslo_cache/tests/functional/test_base.py oslo_cache/tests/functional/dogpile_cache_bmemcached/__init__.py oslo_cache/tests/functional/dogpile_cache_bmemcached/test_cache_backend.py oslo_cache/tests/functional/dogpile_cache_pymemcache/__init__.py oslo_cache/tests/functional/dogpile_cache_pymemcache/test_cache_backend.py oslo_cache/tests/functional/dogpile_cache_redis/__init__.py oslo_cache/tests/functional/dogpile_cache_redis/test_cache_backend.py oslo_cache/tests/functional/dogpile_cache_redis_sentinel/__init__.py oslo_cache/tests/functional/dogpile_cache_redis_sentinel/test_cache_backend.py oslo_cache/tests/functional/etcd3gw/__init__.py oslo_cache/tests/functional/etcd3gw/test_cache_backend.py oslo_cache/tests/functional/memcache_pool/__init__.py oslo_cache/tests/functional/memcache_pool/test_cache_backend.py oslo_cache/tests/unit/__init__.py oslo_cache/tests/unit/test_cache_backend_mongo.py oslo_cache/tests/unit/test_cache_basics.py oslo_cache/tests/unit/test_connection_pool.py oslo_cache/tests/unit/test_dict_backend.py playbooks/tests/functional/Debian.yaml playbooks/tests/functional/RedHat.yaml playbooks/tests/functional/pre.yml releasenotes/notes/add-dogpile.cache.pymemcache-backend-627d31a76013f8e1.yaml releasenotes/notes/add_reno-3b4ae0789e9c45b4.yaml releasenotes/notes/bug-1743036-320ed918d5fb4325.yaml releasenotes/notes/bug-1819957-ccff6b0ec9d1cbf2.yaml releasenotes/notes/bug-1888394-5a53e7a9cb25375b.yaml releasenotes/notes/bug-1991250-23bc3463273e5a91.yaml releasenotes/notes/drop-python-2-7-73d3113c69d724d6.yaml releasenotes/notes/enable-sasl-protocol-46d11530b87e7832.yaml releasenotes/notes/enforce_fips_mode-c3296a0cc1fb7ad9.yaml releasenotes/notes/etcd3gw_driver-8ba4511ae9553a91.yaml releasenotes/notes/fix-memcache-pool-backend-b9e6aaab08075d68.yaml releasenotes/notes/lower_socket_timeout-ff5680a6be23bdb2.yaml releasenotes/notes/memcache_socket_timeout-a7db772f052c107e.yaml releasenotes/notes/pymemcache_hashclient_configure-f6f48c5ca38bce47.yaml releasenotes/notes/pymemcache_retry_mecchanisms-fa969d1ac6f64096.yaml releasenotes/notes/pymemcache_socket_keepalive-f91c69770961e2b6.yaml releasenotes/notes/redis-backend-opts-27915f2b672512c9.yaml releasenotes/notes/redis-sentinel-18ba4a0da83dabc7.yaml releasenotes/notes/redis-ssl-ca14b4b99c2e5a84.yaml releasenotes/notes/switch-from-python-memcached-to-pymemcache-566e70b224f92b73.yaml releasenotes/source/2023.1.rst releasenotes/source/2023.2.rst releasenotes/source/conf.py releasenotes/source/index.rst releasenotes/source/newton.rst releasenotes/source/ocata.rst releasenotes/source/pike.rst releasenotes/source/queens.rst releasenotes/source/rocky.rst releasenotes/source/stein.rst releasenotes/source/train.rst releasenotes/source/unreleased.rst releasenotes/source/ussuri.rst releasenotes/source/victoria.rst releasenotes/source/wallaby.rst releasenotes/source/xena.rst releasenotes/source/yoga.rst releasenotes/source/zed.rst releasenotes/source/_static/.placeholder releasenotes/source/_templates/.placeholder releasenotes/source/locale/de/LC_MESSAGES/releasenotes.po releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po releasenotes/source/locale/fr/LC_MESSAGES/releasenotes.po releasenotes/source/locale/ko_KR/LC_MESSAGES/releasenotes.po tools/setup-etcd-env.sh././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/oslo.cache.egg-info/dependency_links.txt0000664000175000017500000000000100000000000023340 0ustar00zuulzuul00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/oslo.cache.egg-info/entry_points.txt0000664000175000017500000000054700000000000022576 0ustar00zuulzuul00000000000000[dogpile.cache] oslo_cache.dict = oslo_cache.backends.dictionary:DictCacheBackend oslo_cache.etcd3gw = oslo_cache.backends.etcd3gw:Etcd3gwCacheBackend oslo_cache.memcache_pool = oslo_cache.backends.memcache_pool:PooledMemcachedBackend oslo_cache.mongo = oslo_cache.backends.mongo:MongoCacheBackend [oslo.config.opts] oslo.cache = oslo_cache._opts:list_opts ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/oslo.cache.egg-info/not-zip-safe0000664000175000017500000000000100000000000021520 0ustar00zuulzuul00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/oslo.cache.egg-info/pbr.json0000664000175000017500000000005600000000000020751 0ustar00zuulzuul00000000000000{"git_version": "e8de6c9", "is_release": true}././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/oslo.cache.egg-info/requires.txt0000664000175000017500000000064300000000000021675 0ustar00zuulzuul00000000000000dogpile.cache>=1.1.5 oslo.config>=8.1.0 oslo.i18n>=5.0.0 oslo.log>=4.2.1 oslo.utils>=4.2.0 [dogpile] pymemcache>=3.5.0 python-binary-memcached>=0.29.0 python-memcached>=1.56 redis>=3.0.0 [etcd3gw] etcd3gw>=0.2.0 [mongo] pymongo!=3.1,>=3.0.2 [test] etcd3gw>=0.2.0 oslotest>=3.2.0 pifpaf>=0.10.0 pymemcache>=3.5.0 pymongo!=3.1,>=3.0.2 python-binary-memcached>=0.29.0 python-memcached>=1.56 redis>=3.0.0 stestr>=2.0.0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932956.0 oslo.cache-3.7.0/oslo.cache.egg-info/top_level.txt0000664000175000017500000000001300000000000022016 0ustar00zuulzuul00000000000000oslo_cache ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/0000775000175000017500000000000000000000000015661 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/__init__.py0000664000175000017500000000131600000000000017773 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_cache.core import * # noqa __all__ = [ 'configure', 'configure_cache_region', 'create_region', 'get_memoization_decorator', 'NO_VALUE', ] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/_bmemcache_pool.py0000664000175000017500000000410000000000000021322 0ustar00zuulzuul00000000000000# Copyright 2022 Inspur # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Thread-safe connection pool for python-binary-memcached.""" try: import eventlet except ImportError: eventlet = None import bmemcached from oslo_cache._memcache_pool import MemcacheClientPool from oslo_log import log LOG = log.getLogger(__name__) class _BMemcacheClient(bmemcached.Client): """Thread global memcache client As client is inherited from threading.local we have to restore object methods overloaded by threading.local so we can reuse clients in different threads """ __delattr__ = object.__delattr__ __getattribute__ = object.__getattribute__ __setattr__ = object.__setattr__ # Hack for lp 1812935 if eventlet and eventlet.patcher.is_monkey_patched('thread'): # NOTE(bnemec): I'm not entirely sure why this works in a # monkey-patched environment and not with vanilla stdlib, but it does. def __new__(cls, *args, **kwargs): return object.__new__(cls) else: __new__ = object.__new__ def __del__(self): pass class BMemcacheClientPool(MemcacheClientPool): def __init__(self, urls, arguments, **kwargs): MemcacheClientPool.__init__(self, urls, arguments, **kwargs) self._arguments = { 'username': arguments.get('username', None), 'password': arguments.get('password', None), 'tls_context': arguments.get('tls_context', None), } def _create_connection(self): return _BMemcacheClient(self.urls, **self._arguments) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/_i18n.py0000664000175000017500000000152300000000000017152 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/index.html """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain='oslo_cache') # The primary translation function using the well-known name "_" _ = _translators.primary ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/_memcache_pool.py0000664000175000017500000002462600000000000021177 0ustar00zuulzuul00000000000000# Copyright 2014 Mirantis Inc # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Thread-safe connection pool for python-memcached.""" import collections import contextlib import itertools import queue import threading import time try: import eventlet except ImportError: eventlet = None import memcache from oslo_log import log from oslo_cache._i18n import _ from oslo_cache import exception LOG = log.getLogger(__name__) class _MemcacheClient(memcache.Client): """Thread global memcache client As client is inherited from threading.local we have to restore object methods overloaded by threading.local so we can reuse clients in different threads """ __delattr__ = object.__delattr__ __getattribute__ = object.__getattribute__ __setattr__ = object.__setattr__ # Hack for lp 1812935 if eventlet and eventlet.patcher.is_monkey_patched('thread'): # NOTE(bnemec): I'm not entirely sure why this works in a # monkey-patched environment and not with vanilla stdlib, but it does. def __new__(cls, *args, **kwargs): return object.__new__(cls) else: __new__ = object.__new__ def __del__(self): pass _PoolItem = collections.namedtuple('_PoolItem', ['ttl', 'connection']) class ConnectionPool(queue.Queue): """Base connection pool class This class implements the basic connection pool logic as an abstract base class. """ def __init__(self, maxsize, unused_timeout, conn_get_timeout=None): """Initialize the connection pool. :param maxsize: maximum number of client connections for the pool :type maxsize: int :param unused_timeout: idle time to live for unused clients (in seconds). If a client connection object has been in the pool and idle for longer than the unused_timeout, it will be reaped. This is to ensure resources are released as utilization goes down. :type unused_timeout: int :param conn_get_timeout: maximum time in seconds to wait for a connection. If set to `None` timeout is indefinite. :type conn_get_timeout: int """ # super() cannot be used here because Queue in stdlib is an # old-style class queue.Queue.__init__(self, maxsize) self._unused_timeout = unused_timeout self._connection_get_timeout = conn_get_timeout self._acquired = 0 def __del__(self): """Delete the connection pool. Destory all connections left in the queue. """ while True: # As per https://docs.python.org/3/library/collections.html # self.queue.pop() will raise IndexError when no elements are # present, ending the while True: loop. # The logic loops over all connections in the queue but it does # not retry for a single one in case a connection closure fails # then it leaves that one and process the next. try: conn = self.queue.pop().connection self._destroy_connection(conn) except IndexError: break except Exception as e: self._do_log( LOG.warning, "Unable to cleanup a connection: %s", e) def _create_connection(self): """Returns a connection instance. This is called when the pool needs another instance created. :returns: a new connection instance """ raise NotImplementedError def _destroy_connection(self, conn): """Destroy and cleanup a connection instance. This is called when the pool wishes to get rid of an existing connection. This is the opportunity for a subclass to free up resources and cleanup after itself. :param conn: the connection object to destroy """ raise NotImplementedError def _do_log(self, level, msg, *args, **kwargs): if LOG.isEnabledFor(level): thread_id = threading.current_thread().ident args = (id(self), thread_id) + args prefix = 'Memcached pool %s, thread %s: ' LOG.log(level, prefix + msg, *args, **kwargs) def _debug_logger(self, msg, *args, **kwargs): self._do_log(log.DEBUG, msg, *args, **kwargs) def _trace_logger(self, msg, *args, **kwargs): self._do_log(log.TRACE, msg, *args, **kwargs) @contextlib.contextmanager def acquire(self): self._trace_logger('Acquiring connection') self._drop_expired_connections() try: conn = self.get(timeout=self._connection_get_timeout) except queue.Empty: raise exception.QueueEmpty( _('Unable to get a connection from pool id %(id)s after ' '%(seconds)s seconds.') % {'id': id(self), 'seconds': self._connection_get_timeout}) self._trace_logger('Acquired connection %s', id(conn)) try: yield conn finally: self._trace_logger('Releasing connection %s', id(conn)) try: # super() cannot be used here because Queue in stdlib is an # old-style class queue.Queue.put(self, conn, block=False) except queue.Full: self._trace_logger('Reaping exceeding connection %s', id(conn)) self._destroy_connection(conn) def _qsize(self): if self.maxsize: return self.maxsize - self._acquired else: # A value indicating there is always a free connection # if maxsize is None or 0 return 1 # NOTE(dstanek): stdlib and eventlet Queue implementations # have different names for the qsize method. This ensures # that we override both of them. if not hasattr(queue.Queue, '_qsize'): qsize = _qsize def _get(self): try: conn = self.queue.pop().connection except IndexError: conn = self._create_connection() self._acquired += 1 return conn def _drop_expired_connections(self): """Drop all expired connections from the left end of the queue.""" now = time.time() try: while self.queue[0].ttl < now: conn = self.queue.popleft().connection self._trace_logger('Reaping connection %s', id(conn)) self._destroy_connection(conn) except IndexError: # NOTE(amakarov): This is an expected excepton. so there's no # need to react. We have to handle exceptions instead of # checking queue length as IndexError is a result of race # condition too as well as of mere queue depletio of mere queue # depletionn. pass def _put(self, conn): self.queue.append(_PoolItem( ttl=time.time() + self._unused_timeout, connection=conn, )) self._acquired -= 1 class MemcacheClientPool(ConnectionPool): def __init__(self, urls, arguments, **kwargs): # super() cannot be used here because Queue in stdlib is an # old-style class ConnectionPool.__init__(self, **kwargs) self.urls = urls self._arguments = { 'dead_retry': arguments.get('dead_retry', 5 * 60), 'socket_timeout': arguments.get('socket_timeout', 3.0), 'server_max_value_length': arguments.get('server_max_value_length'), 'flush_on_reconnect': arguments.get( 'pool_flush_on_reconnect', False), } # NOTE(morganfainberg): The host objects expect an int for the # deaduntil value. Initialize this at 0 for each host with 0 indicating # the host is not dead. self._hosts_deaduntil = [0] * len(urls) def _create_connection(self): return _MemcacheClient(self.urls, **self._arguments) def _destroy_connection(self, conn): conn.disconnect_all() def _get(self): # super() cannot be used here because Queue in stdlib is an # old-style class conn = ConnectionPool._get(self) try: # Propagate host state known to us to this client's list now = time.time() for deaduntil, host in zip(self._hosts_deaduntil, conn.servers): if deaduntil > now and host.deaduntil <= now: host.mark_dead('propagating death mark from the pool') host.deaduntil = deaduntil except Exception: # We need to be sure that connection doesn't leak from the pool. # This code runs before we enter context manager's try-finally # block, so we need to explicitly release it here. # super() cannot be used here because Queue in stdlib is an # old-style class ConnectionPool._put(self, conn) raise return conn def _put(self, conn): try: # If this client found that one of the hosts is dead, mark it as # such in our internal list now = time.time() for i, host in zip(itertools.count(), conn.servers): deaduntil = self._hosts_deaduntil[i] # Do nothing if we already know this host is dead if deaduntil <= now: if host.deaduntil > now: self._hosts_deaduntil[i] = host.deaduntil self._debug_logger( 'Marked host %s dead until %s', self.urls[i], host.deaduntil) else: self._hosts_deaduntil[i] = 0 finally: # super() cannot be used here because Queue in stdlib is an # old-style class ConnectionPool._put(self, conn) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/_opts.py0000664000175000017500000003575600000000000017377 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg _DEFAULT_BACKEND = 'dogpile.cache.null' FILE_OPTIONS = { 'cache': [ cfg.StrOpt('config_prefix', default='cache.oslo', help='Prefix for building the configuration dictionary ' 'for the cache region. This should not need to be ' 'changed unless there is another dogpile.cache ' 'region with the same configuration name.'), cfg.IntOpt('expiration_time', default=600, help='Default TTL, in seconds, for any cached item in ' 'the dogpile.cache region. This applies to any ' 'cached method that doesn\'t have an explicit ' 'cache expiration time defined for it.'), # NOTE(morganfainberg): It is recommended that either Redis or # Memcached are used as the dogpile backend for real workloads. To # prevent issues with the memory cache ending up in "production" # unintentionally, we register a no-op as the default caching backend. cfg.StrOpt('backend', default=_DEFAULT_BACKEND, choices=['oslo_cache.memcache_pool', 'oslo_cache.dict', 'oslo_cache.mongo', 'oslo_cache.etcd3gw', 'dogpile.cache.pymemcache', 'dogpile.cache.memcached', 'dogpile.cache.pylibmc', 'dogpile.cache.bmemcached', 'dogpile.cache.dbm', 'dogpile.cache.redis', 'dogpile.cache.redis_sentinel', 'dogpile.cache.memory', 'dogpile.cache.memory_pickle', 'dogpile.cache.null'], help='Cache backend module. For eventlet-based or ' 'environments with hundreds of threaded servers, Memcache ' 'with pooling (oslo_cache.memcache_pool) is recommended. ' 'For environments with less than 100 threaded servers, ' 'Memcached (dogpile.cache.memcached) or Redis ' '(dogpile.cache.redis) is recommended. Test environments ' 'with a single instance of the server can use the ' 'dogpile.cache.memory backend.'), cfg.MultiStrOpt('backend_argument', default=[], secret=True, help='Arguments supplied to the backend module. ' 'Specify this option once per argument to be ' 'passed to the dogpile.cache backend. Example ' 'format: ":".'), cfg.ListOpt('proxies', default=[], help='Proxy classes to import that will affect the way ' 'the dogpile.cache backend functions. See the ' 'dogpile.cache documentation on ' 'changing-backend-behavior.'), cfg.BoolOpt('enabled', default=False, help='Global toggle for caching.'), cfg.BoolOpt('debug_cache_backend', default=False, help='Extra debugging from the cache backend (cache ' 'keys, get/set/delete/etc calls). This is only ' 'really useful if you need to see the specific ' 'cache-backend get/set/delete calls with the ' 'keys/values. Typically this should be left set ' 'to false.'), cfg.ListOpt('memcache_servers', default=['localhost:11211'], help='Memcache servers in the format of "host:port". ' 'This is used by backends dependent on Memcached.' 'If ``dogpile.cache.memcached`` or ' '``oslo_cache.memcache_pool`` is used and a given ' 'host refer to an IPv6 or a given domain refer to ' 'IPv6 then you should prefix the given address with ' 'the address family (``inet6``) ' '(e.g ``inet6[::1]:11211``, ' '``inet6:[fd12:3456:789a:1::1]:11211``, ' '``inet6:[controller-0.internalapi]:11211``). ' 'If the address family is not given then these ' 'backends will use the default ``inet`` address ' 'family which corresponds to IPv4'), cfg.IntOpt('memcache_dead_retry', default=5 * 60, help='Number of seconds memcached server is considered dead' ' before it is tried again. (dogpile.cache.memcache and' ' oslo_cache.memcache_pool backends only).'), cfg.FloatOpt('memcache_socket_timeout', default=1.0, help='Timeout in seconds for every call to a server.' ' (dogpile.cache.memcache and oslo_cache.memcache_pool' ' backends only).'), cfg.IntOpt('memcache_pool_maxsize', default=10, help='Max total number of open connections to every' ' memcached server. (oslo_cache.memcache_pool backend' ' only).'), cfg.IntOpt('memcache_pool_unused_timeout', default=60, help='Number of seconds a connection to memcached is held' ' unused in the pool before it is closed.' ' (oslo_cache.memcache_pool backend only).'), cfg.IntOpt('memcache_pool_connection_get_timeout', default=10, help='Number of seconds that an operation will wait to get ' 'a memcache client connection.'), cfg.BoolOpt('memcache_pool_flush_on_reconnect', default=False, help='Global toggle if memcache will be flushed' ' on reconnect.' ' (oslo_cache.memcache_pool backend only).'), cfg.BoolOpt('memcache_sasl_enabled', default=False, help='Enable the SASL(Simple Authentication and Security' 'Layer) if the SASL_enable is true, else disable.'), cfg.StrOpt('memcache_username', help='the user name for the memcached which SASL enabled'), cfg.StrOpt('memcache_password', secret=True, help='the password for the memcached which SASL enabled'), cfg.StrOpt('redis_server', default='localhost:6379', help='Redis server in the format of "host:port"'), cfg.StrOpt('redis_username', help='the user name for redis'), cfg.StrOpt('redis_password', secret=True, help='the password for redis'), cfg.ListOpt('redis_sentinels', default=['localhost:26379'], help='Redis sentinel servers in the format of ' '"host:port"'), cfg.FloatOpt('redis_socket_timeout', default=1.0, help='Timeout in seconds for every call to a server.' ' (dogpile.cache.redis and dogpile.cache.redis_sentinel ' 'backends only).'), cfg.StrOpt('redis_sentinel_service_name', default='mymaster', help='Service name of the redis sentinel cluster.'), cfg.BoolOpt('tls_enabled', default=False, help='Global toggle for TLS usage when communicating with' ' the caching servers. Currently supported by ' '``dogpile.cache.bmemcache``, ' '``dogpile.cache.pymemcache``, ' '``oslo_cache.memcache_pool``, ' '``dogpile.cache.redis`` and ' '``dogpile.cache.redis_sentinel``.'), cfg.StrOpt('tls_cafile', default=None, help='Path to a file of concatenated CA certificates in PEM' ' format necessary to establish the caching servers\'' ' authenticity. If tls_enabled is False, this option is' ' ignored.'), cfg.StrOpt('tls_certfile', default=None, help='Path to a single file in PEM format containing the' ' client\'s certificate as well as any number of CA' ' certificates needed to establish the certificate\'s' ' authenticity. This file is only required when client side' ' authentication is necessary. If tls_enabled is False,' ' this option is ignored.'), cfg.StrOpt('tls_keyfile', default=None, help='Path to a single file containing the client\'s' ' private key in. Otherwise the private key will be taken' ' from the file specified in tls_certfile. If tls_enabled' ' is False, this option is ignored.'), cfg.StrOpt('tls_allowed_ciphers', default=None, help='Set the available ciphers for sockets created with' ' the TLS context. It should be a string in the OpenSSL' ' cipher list format. If not specified, all OpenSSL enabled' ' ciphers will be available. Currently supported by ' '``dogpile.cache.bmemcache``, ' '``dogpile.cache.pymemcache`` and ' '``oslo_cache.memcache_pool``.'), cfg.BoolOpt( 'enable_socket_keepalive', default=False, help="Global toggle for the socket keepalive of " "dogpile's pymemcache backend"), cfg.IntOpt( 'socket_keepalive_idle', default=1, min=0, help='The time (in seconds) the connection needs to ' 'remain idle before TCP starts sending keepalive probes. ' 'Should be a positive integer most greater than zero.'), cfg.IntOpt( 'socket_keepalive_interval', default=1, min=0, help='The time (in seconds) between individual keepalive ' 'probes. Should be a positive integer greater ' 'than zero.'), cfg.IntOpt( 'socket_keepalive_count', default=1, min=0, help='The maximum number of keepalive probes TCP should ' 'send before dropping the connection. Should be a ' 'positive integer greater than zero.'), cfg.BoolOpt( 'enable_retry_client', default=False, help='Enable retry client mechanisms to handle failure. ' 'Those mechanisms can be used to wrap all kind of pymemcache ' 'clients. The wrapper allows you to define how many attempts ' 'to make and how long to wait between attemots.'), cfg.IntOpt( 'retry_attempts', min=1, default=2, help='Number of times to attempt an action before failing.'), cfg.FloatOpt( 'retry_delay', default=0, help='Number of seconds to sleep between each attempt.'), cfg.IntOpt( 'hashclient_retry_attempts', min=1, default=2, help='Amount of times a client should be tried ' 'before it is marked dead and removed from the pool in ' 'the HashClient\'s internal mechanisms.'), cfg.FloatOpt( 'hashclient_retry_delay', default=1, help='Time in seconds that should pass between ' 'retry attempts in the HashClient\'s internal mechanisms.'), cfg.FloatOpt( 'dead_timeout', default=60, help='Time in seconds before attempting to add a node ' 'back in the pool in the HashClient\'s internal mechanisms.'), cfg.BoolOpt('enforce_fips_mode', default=False, help='Global toggle for enforcing the OpenSSL FIPS mode. ' 'This feature requires Python support. ' 'This is available in Python 3.9 in all ' 'environments and may have been backported to older ' 'Python versions on select environments. If the Python ' 'executable used does not support OpenSSL FIPS mode, ' 'an exception will be raised. Currently supported by ' '``dogpile.cache.bmemcache``, ' '``dogpile.cache.pymemcache`` and ' '``oslo_cache.memcache_pool``.'), ], } def configure(conf): for section in FILE_OPTIONS: for option in FILE_OPTIONS[section]: conf.register_opt(option, group=section) def set_defaults(conf, memcache_pool_flush_on_reconnect=False): """Set defaults for configuration variables. Overrides default options values. :param conf: Configuration object, managed by the caller. :type conf: oslo.config.cfg.ConfigOpts :param memcache_pool_flush_on_reconnect: The default state for the ``flush_on_reconnect`` flag. By default deactivated :type memcache_pool_flush_on_reconnect: bool """ conf.register_opt(FILE_OPTIONS, group='cache') cfg.set_defaults( FILE_OPTIONS, memcache_pool_flush_on_reconnect=memcache_pool_flush_on_reconnect) def list_opts(): """Return a list of oslo_config options. The returned list includes all oslo_config options which are registered as the "FILE_OPTIONS". Each object in the list is a two element tuple. The first element of each tuple is the name of the group under which the list of options in the second element will be registered. A group name of None corresponds to the [DEFAULT] group in config files. This function is also discoverable via the 'oslo_config.opts' entry point under the 'oslo_cache.config.opts' namespace. The purpose of this is to allow tools like the Oslo sample config file generator to discover the options exposed to users by this library. :returns: a list of (group_name, opts) tuples """ return list(FILE_OPTIONS.items()) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/backends/0000775000175000017500000000000000000000000017433 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/backends/__init__.py0000664000175000017500000000000000000000000021532 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/backends/dictionary.py0000664000175000017500000000633200000000000022156 0ustar00zuulzuul00000000000000# Copyright 2015 Mirantis Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """dogpile.cache backend that uses dictionary for storage""" from dogpile.cache import api from oslo_cache import core from oslo_utils import timeutils __all__ = [ 'DictCacheBackend' ] _NO_VALUE = core.NO_VALUE class DictCacheBackend(api.CacheBackend): """A DictCacheBackend based on dictionary. Arguments accepted in the arguments dictionary: :param expiration_time: interval in seconds to indicate maximum time-to-live value for each key in DictCacheBackend. Default expiration_time value is 0, that means that all keys have infinite time-to-live value. :type expiration_time: real """ def __init__(self, arguments): self.expiration_time = arguments.get('expiration_time', 0) self.cache = {} def get(self, key): """Retrieves the value for a key. :param key: dictionary key :returns: value for a key or :data:`oslo_cache.core.NO_VALUE` for nonexistent or expired keys. """ (value, timeout) = self.cache.get(key, (_NO_VALUE, 0)) if self.expiration_time > 0 and timeutils.utcnow_ts() >= timeout: self.cache.pop(key, None) return _NO_VALUE return value def get_multi(self, keys): """Retrieves the value for a list of keys.""" return [self.get(key) for key in keys] def set(self, key, value): """Sets the value for a key. Expunges expired keys during each set. :param key: dictionary key :param value: value associated with the key """ self.set_multi({key: value}) def set_multi(self, mapping): """Set multiple values in the cache. Expunges expired keys during each set. :param mapping: dictionary with key/value pairs """ self._clear() timeout = 0 if self.expiration_time > 0: timeout = timeutils.utcnow_ts() + self.expiration_time for key, value in mapping.items(): self.cache[key] = (value, timeout) def delete(self, key): """Deletes the value associated with the key if it exists. :param key: dictionary key """ self.cache.pop(key, None) def delete_multi(self, keys): """Deletes the value associated with each key in list if it exists. :param keys: list of dictionary keys """ for key in keys: self.cache.pop(key, None) def _clear(self): """Expunges expired keys.""" now = timeutils.utcnow_ts() for k in list(self.cache): (_value, timeout) = self.cache[k] if timeout > 0 and now >= timeout: del self.cache[k] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/backends/etcd3gw.py0000664000175000017500000000464600000000000021357 0ustar00zuulzuul00000000000000# Copyright 2015 Mirantis Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """dogpile.cache backend that uses etcd 3.x for storage""" from dogpile.cache import api from oslo_cache import core from oslo_serialization import jsonutils __all__ = [ 'Etcd3gwCacheBackend' ] _NO_VALUE = core.NO_VALUE class Etcd3gwCacheBackend(api.CacheBackend): #: Default socket/lock/member/leader timeout used when none is provided. DEFAULT_TIMEOUT = 30 #: Default hostname used when none is provided. DEFAULT_HOST = "localhost" #: Default port used if none provided (4001 or 2379 are the common ones). DEFAULT_PORT = 2379 def __init__(self, arguments): self.host = arguments.get('host', self.DEFAULT_HOST) self.port = arguments.get('port', self.DEFAULT_PORT) self.timeout = int(arguments.get('timeout', self.DEFAULT_TIMEOUT)) # module etcd3gw is only required when etcd3gw backend is used import etcd3gw self._client = etcd3gw.client(host=self.host, port=self.port, timeout=self.timeout) def get(self, key): values = self._client.get(key, False) if not values: return core.NO_VALUE value, metadata = jsonutils.loads(values[0]) return api.CachedValue(value, metadata) def get_multi(self, keys): """Retrieves the value for a list of keys.""" return [self.get(key) for key in keys] def set(self, key, value): self.set_multi({key: value}) def set_multi(self, mapping): lease = None if self.timeout: lease = self._client.lease(ttl=self.timeout) for key, value in mapping.items(): self._client.put(key, jsonutils.dumps(value), lease) def delete(self, key): self._client.delete(key) def delete_multi(self, keys): for key in keys: self._client.delete(key) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/backends/memcache_pool.py0000664000175000017500000000731100000000000022602 0ustar00zuulzuul00000000000000# Copyright 2014 Mirantis Inc # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """dogpile.cache backend that uses Memcached connection pool""" import functools from dogpile.cache.backends import memcached as memcached_backend try: from oslo_cache import _bmemcache_pool except ImportError as e: if str(e) == "No module named 'bmemcached'": _bmemcache_pool = None else: raise from oslo_cache import _memcache_pool from oslo_cache import exception # Helper to ease backend refactoring class ClientProxy(object): def __init__(self, client_pool): self.client_pool = client_pool def _run_method(self, __name, *args, **kwargs): with self.client_pool.acquire() as client: return getattr(client, __name)(*args, **kwargs) def __getattr__(self, name): return functools.partial(self._run_method, name) class PooledMemcachedBackend(memcached_backend.MemcachedBackend): """Memcached backend that does connection pooling. This memcached backend only allows for reuse of a client object, prevents too many client object from being instantiated, and maintains proper tracking of dead servers so as to limit delays when a server (or all servers) become unavailable. This backend doesn't allow to load balance things between servers. Memcached isn't HA. Values aren't automatically replicated between servers unless the client went out and wrote the value multiple time. The memcache server to use is determined by `python-memcached` itself by picking the host to use (from the given server list) based on a key hash. """ # Composed from GenericMemcachedBackend's and MemcacheArgs's __init__ def __init__(self, arguments): super(PooledMemcachedBackend, self).__init__(arguments) if arguments.get('sasl_enabled', False): if (arguments.get('username') is None or arguments.get('password') is None): raise exception.ConfigurationError( 'username and password should be configured to use SASL ' 'authentication.') if not _bmemcache_pool: raise ImportError("python-binary-memcached package is missing") self.client_pool = _bmemcache_pool.BMemcacheClientPool( self.url, arguments, maxsize=arguments.get('pool_maxsize', 10), unused_timeout=arguments.get('pool_unused_timeout', 60), conn_get_timeout=arguments.get('pool_connection_get_timeout', 10), ) else: self.client_pool = _memcache_pool.MemcacheClientPool( self.url, arguments, maxsize=arguments.get('pool_maxsize', 10), unused_timeout=arguments.get('pool_unused_timeout', 60), conn_get_timeout=arguments.get('pool_connection_get_timeout', 10), ) # Since all methods in backend just call one of methods of client, this # lets us avoid need to hack it too much @property def client(self): return ClientProxy(self.client_pool) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/backends/mongo.py0000664000175000017500000005716300000000000021140 0ustar00zuulzuul00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import datetime from dogpile.cache import api from dogpile import util as dp_util from oslo_cache import core from oslo_log import log from oslo_utils import importutils from oslo_utils import timeutils from oslo_cache._i18n import _ from oslo_cache import exception __all__ = [ 'MongoCacheBackend' ] _NO_VALUE = core.NO_VALUE LOG = log.getLogger(__name__) class MongoCacheBackend(api.CacheBackend): """A MongoDB based caching backend implementing dogpile backend APIs. Arguments accepted in the arguments dictionary: :param db_hosts: string (required), hostname or IP address of the MongoDB server instance. This can be a single MongoDB connection URI, or a list of MongoDB connection URIs. :param db_name: string (required), the name of the database to be used. :param cache_collection: string (required), the name of collection to store cached data. *Note:* Different collection name can be provided if there is need to create separate container (i.e. collection) for cache data. So region configuration is done per collection. Following are optional parameters for MongoDB backend configuration, :param username: string, the name of the user to authenticate. :param password: string, the password of the user to authenticate. :param max_pool_size: integer, the maximum number of connections that the pool will open simultaneously. By default the pool size is 10. :param w: integer, write acknowledgement for MongoDB client If not provided, then no default is set on MongoDB and then write acknowledgement behavior occurs as per MongoDB default. This parameter name is same as what is used in MongoDB docs. This value is specified at collection level so its applicable to `cache_collection` db write operations. If this is a replica set, write operations will block until they have been replicated to the specified number or tagged set of servers. Setting w=0 disables write acknowledgement and all other write concern options. :param read_preference: string, the read preference mode for MongoDB client Expected value is ``primary``, ``primaryPreferred``, ``secondary``, ``secondaryPreferred``, or ``nearest``. This read_preference is specified at collection level so its applicable to `cache_collection` db read operations. :param use_replica: boolean, flag to indicate if replica client to be used. Default is `False`. `replicaset_name` value is required if `True`. :param replicaset_name: string, name of replica set. Becomes required if `use_replica` is `True` :param son_manipulator: string, name of class with module name which implements MongoDB SONManipulator. Default manipulator used is :class:`.BaseTransform`. This manipulator is added per database. In multiple cache configurations, the manipulator name should be same if same database name ``db_name`` is used in those configurations. SONManipulator is used to manipulate custom data types as they are saved or retrieved from MongoDB. Custom impl is only needed if cached data is custom class and needs transformations when saving or reading from db. If dogpile cached value contains built-in data types, then BaseTransform class is sufficient as it already handles dogpile CachedValue class transformation. :param mongo_ttl_seconds: integer, interval in seconds to indicate maximum time-to-live value. If value is greater than 0, then its assumed that cache_collection needs to be TTL type (has index at 'doc_date' field). By default, the value is -1 and its disabled. Reference: .. NOTE:: This parameter is different from Dogpile own expiration_time, which is the number of seconds after which Dogpile will consider the value to be expired. When Dogpile considers a value to be expired, it continues to use the value until generation of a new value is complete, when using CacheRegion.get_or_create(). Therefore, if you are setting `mongo_ttl_seconds`, you will want to make sure it is greater than expiration_time by at least enough seconds for new values to be generated, else the value would not be available during a regeneration, forcing all threads to wait for a regeneration each time a value expires. :param ssl: boolean, If True, create the connection to the server using SSL. Default is `False`. Client SSL connection parameters depends on server side SSL setup. For further reference on SSL configuration: :param ssl_keyfile: string, the private keyfile used to identify the local connection against mongod. If included with the certfile then only the `ssl_certfile` is needed. Used only when `ssl` is `True`. :param ssl_certfile: string, the certificate file used to identify the local connection against mongod. Used only when `ssl` is `True`. :param ssl_ca_certs: string, the ca_certs file contains a set of concatenated 'certification authority' certificates, which are used to validate certificates passed from the other end of the connection. Used only when `ssl` is `True`. :param ssl_cert_reqs: string, the parameter cert_reqs specifies whether a certificate is required from the other side of the connection, and whether it will be validated if provided. It must be one of the three values ``ssl.CERT_NONE`` (certificates ignored), ``ssl.CERT_OPTIONAL`` (not required, but validated if provided), or ``ssl.CERT_REQUIRED`` (required and validated). If the value of this parameter is not ``ssl.CERT_NONE``, then the ssl_ca_certs parameter must point to a file of CA certificates. Used only when `ssl` is `True`. Rest of arguments are passed to mongo calls for read, write and remove. So related options can be specified to pass to these operations. Further details of various supported arguments can be referred from """ def __init__(self, arguments): self.api = MongoApi(arguments) @dp_util.memoized_property def client(self): """Initializes MongoDB connection and collection defaults. This initialization is done only once and performed as part of lazy inclusion of MongoDB dependency i.e. add imports only if related backend is used. :return: :class:`.MongoApi` instance """ self.api.get_cache_collection() return self.api def get(self, key): """Retrieves the value for a key. :param key: key to be retrieved. :returns: value for a key or :data:`oslo_cache.core.NO_VALUE` for nonexistent or expired keys. """ value = self.client.get(key) if value is None: return _NO_VALUE else: return value def get_multi(self, keys): """Return multiple values from the cache, based on the given keys. :param keys: sequence of keys to be retrieved. :returns: returns values (or :data:`oslo_cache.core.NO_VALUE`) as a list matching the keys given. """ values = self.client.get_multi(keys) return [ _NO_VALUE if key not in values else values[key] for key in keys ] def set(self, key, value): self.client.set(key, value) def set_multi(self, mapping): self.client.set_multi(mapping) def delete(self, key): self.client.delete(key) def delete_multi(self, keys): self.client.delete_multi(keys) class MongoApi(object): """Class handling MongoDB specific functionality. This class uses PyMongo APIs internally to create database connection with configured pool size, ensures unique index on key, does database authentication and ensure TTL collection index if configured so. This class also serves as handle to cache collection for dogpile cache APIs. In a single deployment, multiple cache configuration can be defined. In that case of multiple cache collections usage, db client connection pool is shared when cache collections are within same database. """ # class level attributes for re-use of db client connection and collection _DB = {} # dict of db_name: db connection reference _MONGO_COLLS = {} # dict of cache_collection : db collection reference def __init__(self, arguments): self._init_args(arguments) self._data_manipulator = None def _init_args(self, arguments): """Helper logic for collecting and parsing MongoDB specific arguments. The arguments passed in are separated out in connection specific setting and rest of arguments are passed to create/update/delete db operations. """ self.conn_kwargs = {} # connection specific arguments self.hosts = arguments.pop('db_hosts', None) if self.hosts is None: msg = _('db_hosts value is required') raise exception.ConfigurationError(msg) self.db_name = arguments.pop('db_name', None) if self.db_name is None: msg = _('database db_name is required') raise exception.ConfigurationError(msg) self.cache_collection = arguments.pop('cache_collection', None) if self.cache_collection is None: msg = _('cache_collection name is required') raise exception.ConfigurationError(msg) self.username = arguments.pop('username', None) self.password = arguments.pop('password', None) self.max_pool_size = arguments.pop('max_pool_size', 10) self.w = arguments.pop('w', -1) try: self.w = int(self.w) except ValueError: msg = _('integer value expected for w (write concern attribute)') raise exception.ConfigurationError(msg) self.read_preference = arguments.pop('read_preference', None) self.use_replica = arguments.pop('use_replica', False) if self.use_replica: if arguments.get('replicaset_name') is None: msg = _('replicaset_name required when use_replica is True') raise exception.ConfigurationError(msg) self.replicaset_name = arguments.get('replicaset_name') self.son_manipulator = arguments.pop('son_manipulator', None) # set if mongo collection needs to be TTL type. # This needs to be max ttl for any cache entry. # By default, -1 means don't use TTL collection. # With ttl set, it creates related index and have doc_date field with # needed expiration interval self.ttl_seconds = arguments.pop('mongo_ttl_seconds', -1) try: self.ttl_seconds = int(self.ttl_seconds) except ValueError: msg = _('integer value expected for mongo_ttl_seconds') raise exception.ConfigurationError(msg) self.conn_kwargs['ssl'] = arguments.pop('ssl', False) if self.conn_kwargs['ssl']: ssl_keyfile = arguments.pop('ssl_keyfile', None) ssl_certfile = arguments.pop('ssl_certfile', None) ssl_ca_certs = arguments.pop('ssl_ca_certs', None) ssl_cert_reqs = arguments.pop('ssl_cert_reqs', None) if ssl_keyfile: self.conn_kwargs['ssl_keyfile'] = ssl_keyfile if ssl_certfile: self.conn_kwargs['ssl_certfile'] = ssl_certfile if ssl_ca_certs: self.conn_kwargs['ssl_ca_certs'] = ssl_ca_certs if ssl_cert_reqs: self.conn_kwargs['ssl_cert_reqs'] = ( self._ssl_cert_req_type(ssl_cert_reqs)) # rest of arguments are passed to mongo crud calls self.meth_kwargs = arguments def _ssl_cert_req_type(self, req_type): try: import ssl except ImportError: raise exception.ConfigurationError(_('no ssl support available')) req_type = req_type.upper() try: return { 'NONE': ssl.CERT_NONE, 'OPTIONAL': ssl.CERT_OPTIONAL, 'REQUIRED': ssl.CERT_REQUIRED }[req_type] except KeyError: msg = _('Invalid ssl_cert_reqs value of %s, must be one of ' '"NONE", "OPTIONAL", "REQUIRED"') % req_type raise exception.ConfigurationError(msg) def _get_db(self): # defer imports until backend is used global pymongo import pymongo if self.use_replica: connection = pymongo.MongoReplicaSetClient( host=self.hosts, replicaSet=self.replicaset_name, max_pool_size=self.max_pool_size, **self.conn_kwargs) else: # used for standalone node or mongos in sharded setup connection = pymongo.MongoClient( host=self.hosts, max_pool_size=self.max_pool_size, **self.conn_kwargs) database = getattr(connection, self.db_name) self._assign_data_mainpulator() database.add_son_manipulator(self._data_manipulator) if self.username and self.password: database.authenticate(self.username, self.password) return database def _assign_data_mainpulator(self): if self._data_manipulator is None: if self.son_manipulator: self._data_manipulator = importutils.import_object( self.son_manipulator) else: self._data_manipulator = BaseTransform() def _get_doc_date(self): if self.ttl_seconds > 0: expire_delta = datetime.timedelta(seconds=self.ttl_seconds) doc_date = timeutils.utcnow() + expire_delta else: doc_date = timeutils.utcnow() return doc_date def get_cache_collection(self): if self.cache_collection not in self._MONGO_COLLS: global pymongo import pymongo # re-use db client connection if already defined as part of # earlier dogpile cache configuration if self.db_name not in self._DB: self._DB[self.db_name] = self._get_db() coll = getattr(self._DB[self.db_name], self.cache_collection) self._assign_data_mainpulator() if self.read_preference: # pymongo 3.0 renamed mongos_enum to read_pref_mode_from_name f = getattr(pymongo.read_preferences, 'read_pref_mode_from_name', None) if not f: f = pymongo.read_preferences.mongos_enum self.read_preference = f(self.read_preference) coll.read_preference = self.read_preference if self.w > -1: coll.write_concern['w'] = self.w if self.ttl_seconds > 0: kwargs = {'expireAfterSeconds': self.ttl_seconds} coll.ensure_index('doc_date', cache_for=5, **kwargs) else: self._validate_ttl_index(coll, self.cache_collection, self.ttl_seconds) self._MONGO_COLLS[self.cache_collection] = coll return self._MONGO_COLLS[self.cache_collection] def _get_cache_entry(self, key, value, meta, doc_date): """MongoDB cache data representation. Storing cache key as ``_id`` field as MongoDB by default creates unique index on this field. So no need to create separate field and index for storing cache key. Cache data has additional ``doc_date`` field for MongoDB TTL collection support. """ return dict(_id=key, value=value, meta=meta, doc_date=doc_date) def _validate_ttl_index(self, collection, coll_name, ttl_seconds): """Checks if existing TTL index is removed on a collection. This logs warning when existing collection has TTL index defined and new cache configuration tries to disable index with ``mongo_ttl_seconds < 0``. In that case, existing index needs to be addressed first to make new configuration effective. Refer to MongoDB documentation around TTL index for further details. """ indexes = collection.index_information() for indx_name, index_data in indexes.items(): if all(k in index_data for k in ('key', 'expireAfterSeconds')): existing_value = index_data['expireAfterSeconds'] fld_present = 'doc_date' in index_data['key'][0] if fld_present and existing_value > -1 and ttl_seconds < 1: msg = ('TTL index already exists on db collection ' '<%(c_name)s>, remove index <%(indx_name)s> ' 'first to make updated mongo_ttl_seconds value ' 'to be effective') LOG.warning(msg, {'c_name': coll_name, 'indx_name': indx_name}) def get(self, key): criteria = {'_id': key} result = self.get_cache_collection().find_one(spec_or_id=criteria, **self.meth_kwargs) if result: return result['value'] else: return None def get_multi(self, keys): db_results = self._get_results_as_dict(keys) return {doc['_id']: doc['value'] for doc in db_results.values()} def _get_results_as_dict(self, keys): criteria = {'_id': {'$in': keys}} db_results = self.get_cache_collection().find(spec=criteria, **self.meth_kwargs) return {doc['_id']: doc for doc in db_results} def set(self, key, value): doc_date = self._get_doc_date() ref = self._get_cache_entry(key, value.payload, value.metadata, doc_date) spec = {'_id': key} # find and modify does not have manipulator support # so need to do conversion as part of input document ref = self._data_manipulator.transform_incoming(ref, self) self.get_cache_collection().find_and_modify(spec, ref, upsert=True, **self.meth_kwargs) def set_multi(self, mapping): """Insert multiple documents specified as key, value pairs. In this case, multiple documents can be added via insert provided they do not exist. Update of multiple existing documents is done one by one """ doc_date = self._get_doc_date() insert_refs = [] update_refs = [] existing_docs = self._get_results_as_dict(list(mapping.keys())) for key, value in mapping.items(): ref = self._get_cache_entry(key, value.payload, value.metadata, doc_date) if key in existing_docs: ref['_id'] = existing_docs[key]['_id'] update_refs.append(ref) else: insert_refs.append(ref) if insert_refs: self.get_cache_collection().insert(insert_refs, manipulate=True, **self.meth_kwargs) for upd_doc in update_refs: self.get_cache_collection().save(upd_doc, manipulate=True, **self.meth_kwargs) def delete(self, key): criteria = {'_id': key} self.get_cache_collection().remove(spec_or_id=criteria, **self.meth_kwargs) def delete_multi(self, keys): criteria = {'_id': {'$in': keys}} self.get_cache_collection().remove(spec_or_id=criteria, **self.meth_kwargs) class AbstractManipulator(object, metaclass=abc.ABCMeta): """Abstract class with methods which need to be implemented for custom manipulation. Adding this as a base class for :class:`.BaseTransform` instead of adding import dependency of pymongo specific class i.e. `pymongo.son_manipulator.SONManipulator` and using that as base class. This is done to avoid pymongo dependency if MongoDB backend is not used. """ @abc.abstractmethod def transform_incoming(self, son, collection): """Used while saving data to MongoDB. :param son: the SON object to be inserted into the database :param collection: the collection the object is being inserted into :returns: transformed SON object """ raise NotImplementedError() # pragma: no cover @abc.abstractmethod def transform_outgoing(self, son, collection): """Used while reading data from MongoDB. :param son: the SON object being retrieved from the database :param collection: the collection this object was stored in :returns: transformed SON object """ raise NotImplementedError() # pragma: no cover def will_copy(self): """Will this SON manipulator make a copy of the incoming document? Derived classes that do need to make a copy should override this method, returning `True` instead of `False`. :returns: boolean """ return False class BaseTransform(AbstractManipulator): """Base transformation class to store and read dogpile cached data from MongoDB. This is needed as dogpile internally stores data as a custom class i.e. dogpile.cache.api.CachedValue Note: Custom manipulator needs to always override ``transform_incoming`` and ``transform_outgoing`` methods. MongoDB manipulator logic specifically checks that overridden method in instance and its super are different. """ def transform_incoming(self, son, collection): """Used while saving data to MongoDB.""" for (key, value) in list(son.items()): if isinstance(value, api.CachedValue): son[key] = value.payload # key is 'value' field here son['meta'] = value.metadata elif isinstance(value, dict): # Make sure we recurse into sub-docs son[key] = self.transform_incoming(value, collection) return son def transform_outgoing(self, son, collection): """Used while reading data from MongoDB.""" metadata = None # make sure its top level dictionary with all expected fields names # present if isinstance(son, dict) and all(k in son for k in ('_id', 'value', 'meta', 'doc_date')): payload = son.pop('value', None) metadata = son.pop('meta', None) for (key, value) in list(son.items()): if isinstance(value, dict): son[key] = self.transform_outgoing(value, collection) if metadata is not None: son['value'] = api.CachedValue(payload, metadata) return son ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/core.py0000664000175000017500000006056700000000000017201 0ustar00zuulzuul00000000000000# Copyright 2013 Metacloud # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Caching Layer Implementation. To use this library: You must call :func:`configure`. Inside your application code, decorate the methods that you want the results to be cached with a memoization decorator created with :func:`get_memoization_decorator`. This function takes a group name from the config. Register [`group`] ``caching`` and [`group`] ``cache_time`` options for the groups that your decorators use so that caching can be configured. This library's configuration options must be registered in your application's :class:`oslo_config.cfg.ConfigOpts` instance. Do this by passing the ConfigOpts instance to :func:`configure`. The library has special public value for nonexistent or expired keys called :data:`NO_VALUE`. To use this value you should import it from oslo_cache.core:: from oslo_cache import core NO_VALUE = core.NO_VALUE """ import re import ssl import urllib.parse import dogpile.cache from dogpile.cache import api from dogpile.cache import proxy from dogpile.cache import util from oslo_log import log from oslo_utils import importutils from oslo_cache._i18n import _ from oslo_cache import _opts from oslo_cache import exception __all__ = [ 'configure', 'configure_cache_region', 'create_region', 'get_memoization_decorator', 'NO_VALUE', ] NO_VALUE = api.NO_VALUE """Value returned for nonexistent or expired keys.""" _LOG = log.getLogger(__name__) class _DebugProxy(proxy.ProxyBackend): """Extra Logging ProxyBackend.""" # NOTE(morganfainberg): Pass all key/values through repr to ensure we have # a clean description of the information. Without use of repr, it might # be possible to run into encode/decode error(s). For logging/debugging # purposes encode/decode is irrelevant and we should be looking at the # data exactly as it stands. def get(self, key): value = self.proxied.get(key) _LOG.debug('CACHE_GET: Key: "%(key)r" Value: "%(value)r"', {'key': key, 'value': value}) return value def get_multi(self, keys): values = self.proxied.get_multi(keys) _LOG.debug('CACHE_GET_MULTI: "%(keys)r" Values: "%(values)r"', {'keys': keys, 'values': values}) return values def set(self, key, value): _LOG.debug('CACHE_SET: Key: "%(key)r" Value: "%(value)r"', {'key': key, 'value': value}) return self.proxied.set(key, value) def set_multi(self, keys): _LOG.debug('CACHE_SET_MULTI: "%r"', keys) self.proxied.set_multi(keys) def delete(self, key): self.proxied.delete(key) _LOG.debug('CACHE_DELETE: "%r"', key) def delete_multi(self, keys): _LOG.debug('CACHE_DELETE_MULTI: "%r"', keys) self.proxied.delete_multi(keys) def _parse_sentinel(sentinel): # IPv6 (eg. [::1]:6379 ) match = re.search(r'^\[(\S+)\]:(\d+)$', sentinel) if match: return (match[1], int(match[2])) # IPv4 or hostname (eg. 127.0.0.1:6379 or localhost:6379) match = re.search(r'^(\S+):(\d+)$', sentinel) if match: return (match[1], int(match[2])) raise exception.ConfigurationError('Malformed sentinel server format') def _build_cache_config(conf): """Build the cache region dictionary configuration. :returns: dict """ prefix = conf.cache.config_prefix conf_dict = {} conf_dict['%s.backend' % prefix] = _opts._DEFAULT_BACKEND if conf.cache.enabled is True: conf_dict['%s.backend' % prefix] = conf.cache.backend conf_dict['%s.expiration_time' % prefix] = conf.cache.expiration_time for argument in conf.cache.backend_argument: try: (argname, argvalue) = argument.split(':', 1) except ValueError: msg = ('Unable to build cache config-key. Expected format ' '":". Skipping unknown format: %s') _LOG.error(msg, argument) continue arg_key = '.'.join([prefix, 'arguments', argname]) # NOTE(morgan): The handling of the URL data in memcache is bad and # only takes cases where the values are a list. This explicitly # checks for the base dogpile.cache.memcached backend and does the # split if needed. Other backends such as redis get the same # previous behavior. Overall the fact that the backends opaquely # take data and do not handle processing/validation as expected # directly makes for odd behaviors when wrapping dogpile.cache in # a library like oslo.cache if (conf.cache.backend in ('dogpile.cache.memcached', 'oslo_cache.memcache_pool') and argname == 'url'): argvalue = argvalue.split(',') conf_dict[arg_key] = argvalue _LOG.debug('Oslo Cache Config: %s', conf_dict) if conf.cache.backend == 'dogpile.cache.redis': if conf.cache.redis_password is None: netloc = conf.cache.redis_server else: if conf.cache.redis_username: netloc = '%s:%s@%s' % (conf.cache.redis_username, conf.cache.redis_password, conf.cache.redis_server) else: netloc = ':%s@%s' % (conf.cache.redis_password, conf.cache.redis_server) parts = urllib.parse.ParseResult( scheme=('rediss' if conf.cache.tls_enabled else 'redis'), netloc=netloc, path='', params='', query='', fragment='') conf_dict.setdefault( '%s.arguments.url' % prefix, urllib.parse.urlunparse(parts) ) for arg in ('socket_timeout',): value = getattr(conf.cache, 'redis_' + arg) conf_dict['%s.arguments.%s' % (prefix, arg)] = value elif conf.cache.backend == 'dogpile.cache.redis_sentinel': for arg in ('password', 'socket_timeout'): value = getattr(conf.cache, 'redis_' + arg) conf_dict['%s.arguments.%s' % (prefix, arg)] = value if conf.cache.redis_username: # TODO(tkajinam): Update dogpile.cache to add username argument, # similarly to password. conf_dict['%s.arguments.connection_kwargs' % prefix] = \ {'username': conf.cache.redis_username} conf_dict['%s.arguments.sentinel_kwargs' % prefix] = \ {'username': conf.cache.redis_username} conf_dict['%s.arguments.service_name' % prefix] = \ conf.cache.redis_sentinel_service_name if conf.cache.redis_sentinels: conf_dict['%s.arguments.sentinels' % prefix] = [ _parse_sentinel(s) for s in conf.cache.redis_sentinels] else: # NOTE(yorik-sar): these arguments will be used for memcache-related # backends. Use setdefault for url to support old-style setting through # backend_argument=url:127.0.0.1:11211 # # NOTE(morgan): If requested by config, 'flush_on_reconnect' will be # set for pooled connections. This can ensure that stale data is never # consumed from a server that pops in/out due to a network partition # or disconnect. # # See the help from python-memcached: # # param flush_on_reconnect: optional flag which prevents a # scenario that can cause stale data to be read: If there's more # than one memcached server and the connection to one is # interrupted, keys that mapped to that server will get # reassigned to another. If the first server comes back, those # keys will map to it again. If it still has its data, get()s # can read stale data that was overwritten on another # server. This flag is off by default for backwards # compatibility. # # The normal non-pooled clients connect explicitly on each use and # does not need the explicit flush_on_reconnect conf_dict.setdefault('%s.arguments.url' % prefix, conf.cache.memcache_servers) for arg in ('dead_retry', 'socket_timeout', 'pool_maxsize', 'pool_unused_timeout', 'pool_connection_get_timeout', 'pool_flush_on_reconnect', 'sasl_enabled', 'username', 'password'): value = getattr(conf.cache, 'memcache_' + arg) conf_dict['%s.arguments.%s' % (prefix, arg)] = value if conf.cache.tls_enabled: if conf.cache.backend in ('dogpile.cache.bmemcache', 'dogpile.cache.pymemcache', 'oslo_cache.memcache_pool'): _LOG.debug('Oslo Cache TLS - CA: %s', conf.cache.tls_cafile) tls_context = ssl.create_default_context( cafile=conf.cache.tls_cafile) if conf.cache.enforce_fips_mode: if hasattr(ssl, 'FIPS_mode'): _LOG.info("Enforcing the use of the OpenSSL FIPS mode") ssl.FIPS_mode_set(1) else: raise exception.ConfigurationError( "OpenSSL FIPS mode is not supported by your Python " "version. You must either change the Python " "executable used to a version with FIPS mode support " "or disable FIPS mode by setting " "the '[cache] enforce_fips_mode' configuration option " "to 'False'.") if conf.cache.tls_certfile is not None: _LOG.debug('Oslo Cache TLS - cert: %s', conf.cache.tls_certfile) _LOG.debug('Oslo Cache TLS - key: %s', conf.cache.tls_keyfile) tls_context.load_cert_chain( conf.cache.tls_certfile, conf.cache.tls_keyfile, ) if conf.cache.tls_allowed_ciphers is not None: _LOG.debug( 'Oslo Cache TLS - ciphers: %s', conf.cache.tls_allowed_ciphers, ) tls_context.set_ciphers(conf.cache.tls_allowed_ciphers) conf_dict['%s.arguments.tls_context' % prefix] = tls_context elif conf.cache.backend in ('dogpile.cache.redis', 'dogpile.cache.redis_sentinel'): if conf.cache.tls_allowed_ciphers is not None: raise exception.ConfigurationError( "Limiting allowed ciphers is not supported by " "the %s backend" % conf.cache.backend) if conf.cache.enforce_fips_mode: raise exception.ConfigurationError( "FIPS mode is not supported by the %s backend" % conf.cache.backend) conn_kwargs = {} if conf.cache.tls_cafile is not None: _LOG.debug('Oslo Cache TLS - CA: %s', conf.cache.tls_cafile) conn_kwargs['ssl_ca_certs'] = conf.cache.tls_cafile if conf.cache.tls_certfile is not None: _LOG.debug('Oslo Cache TLS - cert: %s', conf.cache.tls_certfile) _LOG.debug('Oslo Cache TLS - key: %s', conf.cache.tls_keyfile) conn_kwargs.update({ 'ssl_certfile': conf.cache.tls_certfile, 'ssl_keyfile': conf.cache.tls_keyfile }) if conf.cache.backend == 'dogpile.cache.redis_sentinel': conn_kwargs.update({'ssl': True}) conf_dict.setdefault( '%s.arguments.connection_kwargs' % prefix, {}).update(conn_kwargs) conf_dict.setdefault( '%s.arguments.sentinel_kwargs' % prefix, {}).update(conn_kwargs) else: conf_dict.setdefault( '%s.arguments.connection_kwargs' % prefix, {}).update(conn_kwargs) else: raise exception.ConfigurationError( "TLS setting via [cache] tls_enabled is not supported by the " "%s backend. Set [cache] tls_enabled=False or use a different " "backend." % conf.cache.backend ) # NOTE(hberaud): Pymemcache support socket keepalive, If it is enable in # our config then configure it to enable this feature. # The socket keepalive feature means that pymemcache will be able to check # your connected socket and determine whether the connection is still up # and running or if it has broken. # This could be used by users who want to handle fine grained failures. if conf.cache.enable_socket_keepalive: if conf.cache.backend != 'dogpile.cache.pymemcache': msg = _( "Socket keepalive is only supported by the " "'dogpile.cache.pymemcache' backend." ) raise exception.ConfigurationError(msg) import pymemcache socket_keepalive = pymemcache.KeepaliveOpts( idle=conf.cache.socket_keepalive_idle, intvl=conf.cache.socket_keepalive_interval, cnt=conf.cache.socket_keepalive_count) # As with the TLS context above, the config dict below will be # consumed by dogpile.cache that will be used as a proxy between # oslo.cache and pymemcache. conf_dict['%s.arguments.socket_keepalive' % prefix] = socket_keepalive # NOTE(hberaud): The pymemcache library comes with retry mechanisms that # can be used to wrap all kind of pymemcache clients. The retry wrapper # allow you to define how many attempts to make and how long to wait # between attempts. The section below will pass our config # to dogpile.cache to setup the pymemcache retry client wrapper. if conf.cache.enable_retry_client: if conf.cache.backend != 'dogpile.cache.pymemcache': msg = _( "Retry client is only supported by the " "'dogpile.cache.pymemcache' backend." ) raise exception.ConfigurationError(msg) import pymemcache conf_dict['%s.arguments.enable_retry_client' % prefix] = True conf_dict['%s.arguments.retry_attempts' % prefix] = \ conf.cache.retry_attempts conf_dict['%s.arguments.retry_delay' % prefix] = \ conf.cache.retry_delay conf_dict['%s.arguments.hashclient_retry_attempts' % prefix] = \ conf.cache.hashclient_retry_attempts conf_dict['%s.arguments.hashclient_retry_delay' % prefix] = \ conf.cache.hashclient_retry_delay conf_dict['%s.arguments.dead_timeout' % prefix] = \ conf.cache.dead_timeout return conf_dict def _sha1_mangle_key(key): """Wrapper for dogpile's sha1_mangle_key. dogpile's sha1_mangle_key function expects an encoded string, so we should take steps to properly handle multiple inputs before passing the key through. """ try: key = key.encode('utf-8', errors='xmlcharrefreplace') except (UnicodeError, AttributeError): # NOTE(stevemar): if encoding fails just continue anyway. pass return util.sha1_mangle_key(key) def _key_generate_to_str(s): # NOTE(morganfainberg): Since we need to stringify all arguments, attempt # to stringify and handle the Unicode error explicitly as needed. try: return str(s) except UnicodeEncodeError: return s.encode('utf-8') def function_key_generator(namespace, fn, to_str=_key_generate_to_str): # NOTE(morganfainberg): This wraps dogpile.cache's default # function_key_generator to change the default to_str mechanism. return util.function_key_generator(namespace, fn, to_str=to_str) def kwarg_function_key_generator(namespace, fn, to_str=_key_generate_to_str): # NOTE(ralonsoh): This wraps dogpile.cache's default # kwarg_function_key_generator to change the default to_str mechanism. return util.kwarg_function_key_generator(namespace, fn, to_str=to_str) def create_region(function=function_key_generator): """Create a region. This is just dogpile.cache.make_region, but the key generator has a different to_str mechanism. .. note:: You must call :func:`configure_cache_region` with this region before a memoized method is called. :param function: function used to generate a unique key depending on the arguments of the decorated function :type function: function :returns: The new region. :rtype: :class:`dogpile.cache.region.CacheRegion` """ return dogpile.cache.make_region(function_key_generator=function) def configure_cache_region(conf, region): """Configure a cache region. If the cache region is already configured, this function does nothing. Otherwise, the region is configured. :param conf: config object, must have had :func:`configure` called on it. :type conf: oslo_config.cfg.ConfigOpts :param region: Cache region to configure (see :func:`create_region`). :type region: dogpile.cache.region.CacheRegion :raises oslo_cache.exception.ConfigurationError: If the region parameter is not a dogpile.cache.CacheRegion. :returns: The region. :rtype: :class:`dogpile.cache.region.CacheRegion` """ if not isinstance(region, dogpile.cache.CacheRegion): raise exception.ConfigurationError( _('region not type dogpile.cache.CacheRegion')) if not region.is_configured: # NOTE(morganfainberg): this is how you tell if a region is configured. # There is a request logged with dogpile.cache upstream to make this # easier / less ugly. config_dict = _build_cache_config(conf) region.configure_from_config(config_dict, '%s.' % conf.cache.config_prefix) if conf.cache.debug_cache_backend: region.wrap(_DebugProxy) # NOTE(morganfainberg): if the backend requests the use of a # key_mangler, we should respect that key_mangler function. If a # key_mangler is not defined by the backend, use the sha1_mangle_key # mangler provided by dogpile.cache. This ensures we always use a fixed # size cache-key. if region.key_mangler is None: region.key_mangler = _sha1_mangle_key for class_path in conf.cache.proxies: # NOTE(morganfainberg): if we have any proxy wrappers, we should # ensure they are added to the cache region's backend. Since # configure_from_config doesn't handle the wrap argument, we need # to manually add the Proxies. For information on how the # ProxyBackends work, see the dogpile.cache documents on # "changing-backend-behavior" cls = importutils.import_class(class_path) _LOG.debug("Adding cache-proxy '%s' to backend.", class_path) region.wrap(cls) return region def _get_should_cache_fn(conf, group): """Build a function that returns a config group's caching status. For any given object that has caching capabilities, a boolean config option for that object's group should exist and default to ``True``. This function will use that value to tell the caching decorator if caching for that object is enabled. To properly use this with the decorator, pass this function the configuration group and assign the result to a variable. Pass the new variable to the caching decorator as the named argument ``should_cache_fn``. :param conf: config object, must have had :func:`configure` called on it. :type conf: oslo_config.cfg.ConfigOpts :param group: name of the configuration group to examine :type group: string :returns: function reference """ def should_cache(value): if not conf.cache.enabled: return False conf_group = getattr(conf, group) return getattr(conf_group, 'caching', True) return should_cache def _get_expiration_time_fn(conf, group): """Build a function that returns a config group's expiration time status. For any given object that has caching capabilities, an int config option called ``cache_time`` for that driver's group should exist and typically default to ``None``. This function will use that value to tell the caching decorator of the TTL override for caching the resulting objects. If the value of the config option is ``None`` the default value provided in the ``[cache] expiration_time`` option will be used by the decorator. The default may be set to something other than ``None`` in cases where the caching TTL should not be tied to the global default(s). To properly use this with the decorator, pass this function the configuration group and assign the result to a variable. Pass the new variable to the caching decorator as the named argument ``expiration_time``. :param group: name of the configuration group to examine :type group: string :rtype: function reference """ def get_expiration_time(): conf_group = getattr(conf, group) return getattr(conf_group, 'cache_time', None) return get_expiration_time def get_memoization_decorator(conf, region, group, expiration_group=None): """Build a function based on the `cache_on_arguments` decorator. The memoization decorator that gets created by this function is a :meth:`dogpile.cache.region.CacheRegion.cache_on_arguments` decorator, where * The ``should_cache_fn`` is set to a function that returns True if both the ``[cache] enabled`` option is true and [`group`] ``caching`` is True. * The ``expiration_time`` is set from the [`expiration_group`] ``cache_time`` option if ``expiration_group`` is passed in and the value is set, or [`group`] ``cache_time`` if ``expiration_group`` is not passed in and the value is set, or ``[cache] expiration_time`` otherwise. Example usage:: import oslo_cache.core MEMOIZE = oslo_cache.core.get_memoization_decorator( conf, region, group='group1') @MEMOIZE def function(arg1, arg2): ... ALTERNATE_MEMOIZE = oslo_cache.core.get_memoization_decorator( conf, region, group='group2', expiration_group='group3') @ALTERNATE_MEMOIZE def function2(arg1, arg2): ... :param conf: config object, must have had :func:`configure` called on it. :type conf: oslo_config.cfg.ConfigOpts :param region: region as created by :func:`create_region`. :type region: dogpile.cache.region.CacheRegion :param group: name of the configuration group to examine :type group: string :param expiration_group: name of the configuration group to examine for the expiration option. This will fall back to using ``group`` if the value is unspecified or ``None`` :type expiration_group: string :rtype: function reference """ if expiration_group is None: expiration_group = group should_cache = _get_should_cache_fn(conf, group) expiration_time = _get_expiration_time_fn(conf, expiration_group) memoize = region.cache_on_arguments(should_cache_fn=should_cache, expiration_time=expiration_time) # Make sure the actual "should_cache" and "expiration_time" methods are # available. This is potentially interesting/useful to pre-seed cache # values. memoize.should_cache = should_cache memoize.get_expiration_time = expiration_time return memoize def configure(conf): """Configure the library. Register the required oslo.cache config options into an oslo.config CONF object. This must be called before :py:func:`configure_cache_region`. :param conf: The configuration object. :type conf: oslo_config.cfg.ConfigOpts """ _opts.configure(conf) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/exception.py0000664000175000017500000000140200000000000020226 0ustar00zuulzuul00000000000000# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class ConfigurationError(Exception): """Raised when the cache isn't configured correctly.""" class QueueEmpty(Exception): """Raised when a connection cannot be acquired.""" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/0000775000175000017500000000000000000000000017120 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/de/0000775000175000017500000000000000000000000017510 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/locale/de/LC_MESSAGES/0000775000175000017500000000000000000000000021275 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/de/LC_MESSAGES/oslo_cache.po0000664000175000017500000000350300000000000023735 0ustar00zuulzuul00000000000000# Tom Cocozzello , 2015. #zanata # Alex Eng , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache 1.9.1.dev1\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2016-06-12 08:30+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-06-03 05:50+0000\n" "Last-Translator: Alex Eng \n" "Language-Team: German\n" "Language: de\n" "X-Generator: Zanata 3.7.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "Ungültiger Wert %s für ssl_cert_reqs, muss lauten \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "" "Verbindung konnte von Pool-ID %(id)s nach %(seconds)s nicht abgerufen werden." msgid "cache_collection name is required" msgstr "Ein Name für cache_collection ist erforderlich" msgid "database db_name is required" msgstr "Die Datenbank db_name ist erforderlich" msgid "db_hosts value is required" msgstr "Ein Wert für db_hosts ist erforderlich" msgid "integer value expected for mongo_ttl_seconds" msgstr "Ganzzahlwert für mongo_ttl_seconds erwartet" msgid "integer value expected for w (write concern attribute)" msgstr "Ganzzahlwert für Attribut 'w' ('write concern'-Attribut) erwartet" msgid "no ssl support available" msgstr "Keine SSL-Unterstützung verfügbar" msgid "region not type dogpile.cache.CacheRegion" msgstr "Region weist nicht den Typ 'dogpile.cache.CacheRegion' auf" msgid "replicaset_name required when use_replica is True" msgstr "replicaset_name erforderlich, wenn use_replica 'True' ist" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/en_GB/0000775000175000017500000000000000000000000020072 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/locale/en_GB/LC_MESSAGES/0000775000175000017500000000000000000000000021657 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/en_GB/LC_MESSAGES/oslo_cache.po0000664000175000017500000000413000000000000024314 0ustar00zuulzuul00000000000000# Andi Chandler , 2016. #zanata # Andi Chandler , 2022. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2022-05-11 15:54+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2022-06-13 07:43+0000\n" "Last-Translator: Andi Chandler \n" "Language-Team: English (United Kingdom)\n" "Language: en_GB\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgid "" "Retry client is only supported by the 'dogpile.cache.pymemcache' backend." msgstr "" "Retry client is only supported by the 'dogpile.cache.pymemcache' backend." msgid "" "Socket keepalive is only supported by the 'dogpile.cache.pymemcache' backend." msgstr "" "Socket keepalive is only supported by the 'dogpile.cache.pymemcache' backend." #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgid "cache_collection name is required" msgstr "cache_collection name is required" msgid "database db_name is required" msgstr "database db_name is required" msgid "db_hosts value is required" msgstr "db_hosts value is required" msgid "integer value expected for mongo_ttl_seconds" msgstr "integer value expected for mongo_ttl_seconds" msgid "integer value expected for w (write concern attribute)" msgstr "integer value expected for w (write concern attribute)" msgid "no ssl support available" msgstr "no SSL support available" msgid "region not type dogpile.cache.CacheRegion" msgstr "region not type dogpile.cache.CacheRegion" msgid "replicaset_name required when use_replica is True" msgstr "replicaset_name required when use_replica is True" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/es/0000775000175000017500000000000000000000000017527 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/locale/es/LC_MESSAGES/0000775000175000017500000000000000000000000021314 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/es/LC_MESSAGES/oslo_cache.po0000664000175000017500000000370000000000000023753 0ustar00zuulzuul00000000000000# OpenStack Infra , 2015. #zanata # Tom Cocozzello , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache 1.10.1.dev2\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2016-07-11 22:41+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:19+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: Spanish\n" "Language: es\n" "X-Generator: Zanata 3.7.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "Valor ssl_cert_reqs no válido de %s, debe ser uno de \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "" "No se puede obtener una conexión del ID de agrupación %(id)s después de " "%(seconds)s segundos." msgid "cache_collection name is required" msgstr "el nombre de cache_collection es necesario" msgid "database db_name is required" msgstr "base de datos db_name es necesario" msgid "db_hosts value is required" msgstr "El valor db_hosts es necesario" msgid "integer value expected for mongo_ttl_seconds" msgstr "se esperaba un valor entero para mongo_ttl_seconds" msgid "integer value expected for w (write concern attribute)" msgstr "se esperaba un valor entero para w (atributo en cuestión write)" msgid "no ssl support available" msgstr "Soporte SSL no disponible" msgid "region not type dogpile.cache.CacheRegion" msgstr "región no tipo dogpile.cache.CacheRegion" msgid "replicaset_name required when use_replica is True" msgstr "se necesita replicaset_name cuando use_replica es True (verdadero)" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/fr/0000775000175000017500000000000000000000000017527 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/locale/fr/LC_MESSAGES/0000775000175000017500000000000000000000000021314 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/fr/LC_MESSAGES/oslo_cache.po0000664000175000017500000000366400000000000023764 0ustar00zuulzuul00000000000000# OpenStack Infra , 2015. #zanata # Tom Cocozzello , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache 1.10.1.dev2\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2016-07-11 22:41+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:20+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: French\n" "Language: fr\n" "X-Generator: Zanata 3.7.3\n" "Plural-Forms: nplurals=2; plural=(n > 1)\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "Valeur de ssl_cert_reqs non valide (%s), doit être l'une des valeurs " "suivantes: \"NONE\", \"OPTIONAL\", \"REQUIRED\"" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "" "Impossible d'établir une connexion à partir de l'ID de pool %(id)s après " "%(seconds)s secondes." msgid "cache_collection name is required" msgstr "Nom cache_collection est requis" msgid "database db_name is required" msgstr "db_name database est requis" msgid "db_hosts value is required" msgstr "Valeur db_hosts est requis" msgid "integer value expected for mongo_ttl_seconds" msgstr "valeur entière attendue pour mongo_ttl_seconds" msgid "integer value expected for w (write concern attribute)" msgstr "valeur entière attendue pour w (attribut d'écriture)" msgid "no ssl support available" msgstr "pas de support du ssl" msgid "region not type dogpile.cache.CacheRegion" msgstr "la région n'est pas de type dogpile.cache.CacheRegion" msgid "replicaset_name required when use_replica is True" msgstr "replicaset_name requis si use_replica a la valeur True" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/it/0000775000175000017500000000000000000000000017534 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/locale/it/LC_MESSAGES/0000775000175000017500000000000000000000000021321 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/it/LC_MESSAGES/oslo_cache.po0000664000175000017500000000356100000000000023765 0ustar00zuulzuul00000000000000# Tom Cocozzello , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache 1.10.1.dev2\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2016-07-11 22:41+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:21+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: Italian\n" "Language: it\n" "X-Generator: Zanata 3.7.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "Valore ssl_cert_reqs di %s non valido; deve essere uno tra \"NONE\", " "\"OPTIONAL\", \"REQUIRED\"" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "" "Impossibile ottenere una connessione dall'ID pool %(id)s dopo %(seconds)s " "secondi." msgid "cache_collection name is required" msgstr "Il nome cache_collection è obbligatorio" msgid "database db_name is required" msgstr "Il database db_name è obbligatorio" msgid "db_hosts value is required" msgstr "Il valore db_hosts è obbligatorio" msgid "integer value expected for mongo_ttl_seconds" msgstr "valore intero previsto per mongo_ttl_seconds" msgid "integer value expected for w (write concern attribute)" msgstr "valore intero previsto per w (attributo di scrittura)" msgid "no ssl support available" msgstr "nessun supporto ssl disponibile" msgid "region not type dogpile.cache.CacheRegion" msgstr "regione non tipo dogpile.cache.CacheRegion" msgid "replicaset_name required when use_replica is True" msgstr "replicaset_name è obbligatorio quando use_replica è True" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/ko_KR/0000775000175000017500000000000000000000000020125 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/locale/ko_KR/LC_MESSAGES/0000775000175000017500000000000000000000000021712 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/ko_KR/LC_MESSAGES/oslo_cache.po0000664000175000017500000000356300000000000024360 0ustar00zuulzuul00000000000000# Lucas Palm , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2018-02-08 23:08+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:22+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: Korean (South Korea)\n" "Language: ko_KR\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=1; plural=0\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "%s의 ssl_cert_reqs 값이 올바르지 않음, \"NONE\", \"OPTIONAL\", \"REQUIRED\" " "중 하나여야 함 " #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "풀 id %(id)s에서 %(seconds)s분 후에 연결할 수 없습니다." msgid "cache_collection name is required" msgstr "cache_collection 이름이 필요함" msgid "database db_name is required" msgstr "database db_name이 필요함" msgid "db_hosts value is required" msgstr "db_hosts 값이 필요함" msgid "integer value expected for mongo_ttl_seconds" msgstr "mongo_ttl_seconds 에 대해 정수 값이 예상됨 " msgid "integer value expected for w (write concern attribute)" msgstr "w(write concern 속성)에 대해 정수 값이 예상됨" msgid "no ssl support available" msgstr "사용 가능한 ssl 지원이 없음" msgid "region not type dogpile.cache.CacheRegion" msgstr "리젼이 dogpile.cache.CacheRegion 유형이 아님 " msgid "replicaset_name required when use_replica is True" msgstr "use_replica가 True인 경우 replicaset_name이 필요함 " ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/pt_BR/0000775000175000017500000000000000000000000020126 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1663947 oslo.cache-3.7.0/oslo_cache/locale/pt_BR/LC_MESSAGES/0000775000175000017500000000000000000000000021713 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/pt_BR/LC_MESSAGES/oslo_cache.po0000664000175000017500000000367400000000000024364 0ustar00zuulzuul00000000000000# Lucas Palm , 2015. #zanata # OpenStack Infra , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2018-02-08 23:08+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:23+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: Portuguese (Brazil)\n" "Language: pt_BR\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "valor ssl_cert_reqs inválido de %s, deve ser um de \"NONE\", \"OPTIMAL\", " "\"REQUIRED\"" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "" "Não é possível obter uma conexão do ID do conjunto %(id)s após %(seconds)s " "segundos." msgid "cache_collection name is required" msgstr "nome cache_collection é necessário" msgid "database db_name is required" msgstr "banco de dados db_name é necessário" msgid "db_hosts value is required" msgstr "valor db_hosts é necessário" msgid "integer value expected for mongo_ttl_seconds" msgstr "valor de número inteiro esperado para mongo_ttl_seconds" msgid "integer value expected for w (write concern attribute)" msgstr "valor inteiro esperado para w (atributo relativo a gravação)" msgid "no ssl support available" msgstr "suporte ssl não disponível" msgid "region not type dogpile.cache.CacheRegion" msgstr "região não é do tipo dogpile.cache.CacheRegion" msgid "replicaset_name required when use_replica is True" msgstr "replicaset_name necessário quando use_replica for True" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/ru/0000775000175000017500000000000000000000000017546 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/locale/ru/LC_MESSAGES/0000775000175000017500000000000000000000000021333 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/ru/LC_MESSAGES/oslo_cache.po0000664000175000017500000000455500000000000024003 0ustar00zuulzuul00000000000000# Lucas Palm , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache 1.10.1.dev2\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2016-07-11 22:41+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:24+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: Russian\n" "Language: ru\n" "X-Generator: Zanata 3.7.3\n" "Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "Недопустимое значение ssl_cert_reqs, %s, необходимо указать одно из " "значений: \"NONE\", \"OPTIONAL\", \"REQUIRED\"" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "" "Не удалось получить соединение из пула с ИД %(id)s за %(seconds)s секунд." msgid "cache_collection name is required" msgstr "имя cache_collection является обязательным" msgid "database db_name is required" msgstr "db_name базы данных является обязательным" msgid "db_hosts value is required" msgstr "Значение db_hosts является обязательным" msgid "integer value expected for mongo_ttl_seconds" msgstr "для атрибута mongo_ttl_seconds ожидается целочисленное значение" msgid "integer value expected for w (write concern attribute)" msgstr "для w (атрибут участия в записи) ожидается целочисленное значение" msgid "no ssl support available" msgstr "отсутствует поддержка ssl" msgid "region not type dogpile.cache.CacheRegion" msgstr "регион не относится к типу dogpile.cache.CacheRegion" msgid "replicaset_name required when use_replica is True" msgstr "" "replicaset_name является обязательным, если для use_replica задано значение " "True" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/tr_TR/0000775000175000017500000000000000000000000020152 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/locale/tr_TR/LC_MESSAGES/0000775000175000017500000000000000000000000021737 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/tr_TR/LC_MESSAGES/oslo_cache.po0000664000175000017500000000352500000000000024403 0ustar00zuulzuul00000000000000# OpenStack Infra , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2018-02-08 23:08+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:25+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: Turkish (Turkey)\n" "Language: tr_TR\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=2; plural=(n>1)\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "%s değerinde geçersiz ssl_cert_reqs, \"HİÇBİRİ\", \"İSTEĞE BAĞLI\", " "\"GEREKLİ\" den biri olmalı" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "%(seconds)s saniye sonra havuz %(id)s'den bağlantı alınamadı." msgid "cache_collection name is required" msgstr "cache_collection ismi gerekli" msgid "database db_name is required" msgstr "veri tabanı db_name gerekli" msgid "db_hosts value is required" msgstr "db_hosts değeri gerekli" msgid "integer value expected for mongo_ttl_seconds" msgstr "mongo_ttl_seconds için tam sayı değer bekleniyor" msgid "integer value expected for w (write concern attribute)" msgstr "w için tam sayı değer bekleniyor (yazma ilgisi özniteliği)" msgid "no ssl support available" msgstr "ssl desteği yok" msgid "region not type dogpile.cache.CacheRegion" msgstr "bölge dogpile.cache.CacheRegion türünde değil" msgid "replicaset_name required when use_replica is True" msgstr "use_replica True olduğunda replicaset_name gereklidir" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/zh_CN/0000775000175000017500000000000000000000000020121 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/locale/zh_CN/LC_MESSAGES/0000775000175000017500000000000000000000000021706 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/zh_CN/LC_MESSAGES/oslo_cache.po0000664000175000017500000000342600000000000024352 0ustar00zuulzuul00000000000000# Lucas Palm , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2018-02-08 23:08+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:27+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: Chinese (China)\n" "Language: zh_CN\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=1; plural=0\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "ssl_cert_reqs 值 %s 无效,必须是下列其中一项:“NONE”、“OPTIONAL”和“REQUIRED”" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "在 %(seconds)s 秒之后,无法根据池标识 %(id)s 获取连接。" msgid "cache_collection name is required" msgstr "需要 cache_collection 名称" msgid "database db_name is required" msgstr "需要数据库 db_name" msgid "db_hosts value is required" msgstr "需要 db_hosts 值" msgid "integer value expected for mongo_ttl_seconds" msgstr "mongo_ttl_seconds 需要整数值" msgid "integer value expected for w (write concern attribute)" msgstr "w(写相关属性)需要整数值" msgid "no ssl support available" msgstr "未提供 ssl 支持" msgid "region not type dogpile.cache.CacheRegion" msgstr "区域的类型不是 dogpile.cache.CacheRegion" msgid "replicaset_name required when use_replica is True" msgstr "当 use_replica 为 True 时,需要 replicaset_name" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/oslo_cache/locale/zh_TW/0000775000175000017500000000000000000000000020153 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/locale/zh_TW/LC_MESSAGES/0000775000175000017500000000000000000000000021740 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/locale/zh_TW/LC_MESSAGES/oslo_cache.po0000664000175000017500000000342200000000000024400 0ustar00zuulzuul00000000000000# Lucas Palm , 2015. #zanata # Alex Eng , 2016. #zanata # KATO Tomoyuki , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2018-02-08 23:08+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-07-02 08:26+0000\n" "Last-Translator: KATO Tomoyuki \n" "Language-Team: Chinese (Taiwan)\n" "Language: zh_TW\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=1; plural=0\n" #, python-format msgid "" "Invalid ssl_cert_reqs value of %s, must be one of \"NONE\", \"OPTIONAL\", " "\"REQUIRED\"" msgstr "" "%s 的 ssl_cert_reqs 值無效,必須是 \"NONE\"、\"OPTIONAL\" 及 \"REQUIRED\" 的" "其中之一" #, python-format msgid "" "Unable to get a connection from pool id %(id)s after %(seconds)s seconds." msgstr "在 %(seconds)s 秒之後,無法從儲存區 ID %(id)s 取得連線。" msgid "cache_collection name is required" msgstr "需要 cache_collection 名稱" msgid "database db_name is required" msgstr "需要資料庫 db_name" msgid "db_hosts value is required" msgstr "需要 db_hosts 值" msgid "integer value expected for mongo_ttl_seconds" msgstr "mongo_ttl_seconds 預期整數值" msgid "integer value expected for w (write concern attribute)" msgstr "w(WriteConcern 屬性)預期整數值" msgid "no ssl support available" msgstr "無法使用 SSL 支援" msgid "region not type dogpile.cache.CacheRegion" msgstr "區域不是 dogpile.cache.CacheRegion 類型" msgid "replicaset_name required when use_replica is True" msgstr "use_replica 為 True 時需要 replicaset_name" ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/testing.py0000664000175000017500000000456400000000000017721 0ustar00zuulzuul00000000000000# Copyright 2013 Metacloud # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Items useful for external testing.""" import copy from dogpile.cache import proxy from oslo_cache import core as cache __all__ = [ 'CacheIsolatingProxy', ] NO_VALUE = cache.NO_VALUE def _copy_value(value): if value is not NO_VALUE: value = copy.deepcopy(value) return value # NOTE(morganfainberg): WARNING - It is not recommended to use the Memory # backend for dogpile.cache in a real deployment under any circumstances. The # backend does no cleanup of expired values and therefore will leak memory. The # backend is not implemented in a way to share data across processes (e.g. # Keystone in HTTPD. This proxy is a hack to get around the lack of isolation # of values in memory. Currently it blindly stores and retrieves the values # from the cache, and modifications to dicts/lists/etc returned can result in # changes to the cached values. In short, do not use the dogpile.cache.memory # backend unless you are running tests or expecting odd/strange results. class CacheIsolatingProxy(proxy.ProxyBackend): """Proxy that forces a memory copy of stored values. The default in-memory cache-region does not perform a copy on values it is meant to cache. Therefore if the value is modified after set or after get, the cached value also is modified. This proxy does a copy as the last thing before storing data. In your application's tests, you'll want to set this as a proxy for the in-memory cache, like this:: self.config_fixture.config( group='cache', backend='dogpile.cache.memory', enabled=True, proxies=['oslo_cache.testing.CacheIsolatingProxy']) """ def get(self, key): return _copy_value(self.proxied.get(key)) def set(self, key, value): self.proxied.set(key, _copy_value(value)) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/0000775000175000017500000000000000000000000017023 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/__init__.py0000664000175000017500000000116600000000000021140 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_cache import core from oslo_config import cfg core.configure(cfg.CONF) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/functional/0000775000175000017500000000000000000000000021165 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/__init__.py0000664000175000017500000000000000000000000023264 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_bmemcached/0000775000175000017500000000000000000000000026063 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_bmemcached/__init__.py0000664000175000017500000000000000000000000030162 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_bmemcached/test_cache_backend.py0000664000175000017500000000215500000000000032211 0ustar00zuulzuul00000000000000# Copyright 2020 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_cache.tests.functional import test_base class TestDogpileCacheBMemcachedBackend(test_base.BaseTestCaseCacheBackend): def setUp(self): self.config_fixture.config( group="cache", backend="dogpile.cache.bmemcached", memcache_servers="localhost:11212", ) # NOTE(hberaud): super must be called after all to ensure that # config fixture is properly initialized with value related to # the current backend in use. super().setUp() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_pymemcache/0000775000175000017500000000000000000000000026126 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_pymemcache/__init__.py0000664000175000017500000000000000000000000030225 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_pymemcache/test_cache_backend.py0000664000175000017500000000215500000000000032254 0ustar00zuulzuul00000000000000# Copyright 2021 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_cache.tests.functional import test_base class TestDogpileCachePyMemcacheBackend(test_base.BaseTestCaseCacheBackend): def setUp(self): self.config_fixture.config( group="cache", backend="dogpile.cache.pymemcache", memcache_servers="localhost:11212", ) # NOTE(hberaud): super must be called after all to ensure that # config fixture is properly initialized with value related to # the current backend in use. super().setUp() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_redis/0000775000175000017500000000000000000000000025121 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_redis/__init__.py0000664000175000017500000000000000000000000027220 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_redis/test_cache_backend.py0000664000175000017500000000213600000000000031246 0ustar00zuulzuul00000000000000# Copyright 2024 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_cache.tests.functional import test_base class TestRedisCacheBackend(test_base.BaseTestCaseCacheBackend): def setUp(self): self.config_fixture.config( group='cache', backend='dogpile.cache.redis', redis_server='127.0.0.1:6379', ) # NOTE(hberaud): super must be called after all to ensure that # config fixture is properly initialized with value related to # the current backend in use. super().setUp() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_redis_sentinel/0000775000175000017500000000000000000000000027022 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_redis_sentinel/__init__.py0000664000175000017500000000000000000000000031121 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/dogpile_cache_redis_sentinel/test_cache_backend.py0000664000175000017500000000224500000000000033150 0ustar00zuulzuul00000000000000# Copyright 2024 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_cache.tests.functional import test_base class TestRedisSentinelCacheBackend(test_base.BaseTestCaseCacheBackend): def setUp(self): self.config_fixture.config( group='cache', backend='dogpile.cache.redis_sentinel', redis_sentinels=['127.0.0.1:6380'], redis_sentinel_service_name='pifpaf' ) # NOTE(hberaud): super must be called after all to ensure that # config fixture is properly initialized with value related to # the current backend in use. super().setUp() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/functional/etcd3gw/0000775000175000017500000000000000000000000022525 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/etcd3gw/__init__.py0000664000175000017500000000000000000000000024624 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/etcd3gw/test_cache_backend.py0000664000175000017500000000215600000000000026654 0ustar00zuulzuul00000000000000# Copyright 2020 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_cache.tests.functional import test_base class TestEtcdCacheBackend(test_base.BaseTestCaseCacheBackend): def setUp(self): self.config_fixture.config( group='cache', backend='oslo_cache.etcd3gw', backend_argument=['host:127.0.0.1', 'port:2379'] ) # NOTE(hberaud): super must be called after all to ensure that # config fixture is properly initialized with value related to # the current backend in use. super().setUp() ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/functional/memcache_pool/0000775000175000017500000000000000000000000023760 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/memcache_pool/__init__.py0000664000175000017500000000000000000000000026057 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/memcache_pool/test_cache_backend.py0000664000175000017500000000364500000000000030113 0ustar00zuulzuul00000000000000# Copyright 2020 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from oslo_cache.tests.functional import test_base class TestMemcachePoolCacheBackend(test_base.BaseTestCaseCacheBackend): def setUp(self): MEMCACHED_PORT = os.getenv("OSLO_CACHE_TEST_MEMCACHED_PORT", "11211") self.config_fixture.config( group='cache', backend='oslo_cache.memcache_pool', enabled=True, memcache_servers=[f'localhost:{MEMCACHED_PORT}'] ) # NOTE(hberaud): super must be called after all to ensure that # config fixture is properly initialized with value related to # the current backend in use. super(TestMemcachePoolCacheBackend, self).setUp() class TestBMemcachePoolCacheBackend(test_base.BaseTestCaseCacheBackend): def setUp(self): MEMCACHED_PORT = os.getenv("OSLO_CACHE_TEST_MEMCACHED_PORT", "11211") # If the cache support the sasl, the memcache_sasl_enabled # should be True. self.config_fixture.config( group='cache', backend='oslo_cache.memcache_pool', enabled=True, memcache_servers=[f'localhost:{MEMCACHED_PORT}'], memcache_sasl_enabled=False, memcache_username='sasl_name', memcache_password='sasl_pswd' ) super(TestBMemcachePoolCacheBackend, self).setUp() ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/functional/test_base.py0000664000175000017500000002204400000000000023512 0ustar00zuulzuul00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import fixture from oslo_utils import uuidutils from oslotest import base from oslo_cache import core as cache NO_VALUE = cache.NO_VALUE class BaseTestCaseCacheBackend(base.BaseTestCase): def setUp(self): super().setUp() self.conf = self.config_fixture.conf self.region = cache.create_region() self.region_kwargs = cache.create_region( function=cache.kwarg_function_key_generator ) cache.configure_cache_region(self.conf, self.region) cache.configure_cache_region(self.conf, self.region_kwargs) @property def config_fixture(self): if not hasattr(self, "_config_fixture"): self._config_fixture = self.useFixture(fixture.Config()) # default config for all tests self._config_fixture.config( group="cache", enabled=True, ) return self._config_fixture def test_backend_get_missing_data(self): random_key = uuidutils.generate_uuid(dashed=False) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, self.region.get(random_key)) def test_backend_set_data(self): random_key = uuidutils.generate_uuid(dashed=False) self.region.set(random_key, "dummyValue") self.assertEqual("dummyValue", self.region.get(random_key)) def test_backend_set_none_as_data(self): random_key = uuidutils.generate_uuid(dashed=False) self.region.set(random_key, None) self.assertIsNone(self.region.get(random_key)) def test_backend_set_blank_as_data(self): random_key = uuidutils.generate_uuid(dashed=False) self.region.set(random_key, "") self.assertEqual("", self.region.get(random_key)) def test_backend_set_same_key_multiple_times(self): random_key = uuidutils.generate_uuid(dashed=False) self.region.set(random_key, "dummyValue") self.assertEqual("dummyValue", self.region.get(random_key)) dict_value = {'key1': 'value1'} self.region.set(random_key, dict_value) self.assertEqual(dict_value, self.region.get(random_key)) self.region.set(random_key, "dummyValue2") self.assertEqual("dummyValue2", self.region.get(random_key)) def test_backend_multi_set_data(self): random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: 'dummyValue2', random_key3: 'dummyValue3'} self.region.set_multi(mapping) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, self.region.get(random_key)) self.assertFalse(self.region.get(random_key)) self.assertEqual("dummyValue1", self.region.get(random_key1)) self.assertEqual("dummyValue2", self.region.get(random_key2)) self.assertEqual("dummyValue3", self.region.get(random_key3)) def test_backend_multi_get_data(self): random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: '', random_key3: 'dummyValue3'} self.region.set_multi(mapping) keys = [random_key, random_key1, random_key2, random_key3] results = self.region.get_multi(keys) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, results[0]) self.assertEqual("dummyValue1", results[1]) self.assertEqual("", results[2]) self.assertEqual("dummyValue3", results[3]) def test_backend_multi_set_should_update_existing(self): random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: 'dummyValue2', random_key3: 'dummyValue3'} self.region.set_multi(mapping) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, self.region.get(random_key)) self.assertEqual("dummyValue1", self.region.get(random_key1)) self.assertEqual("dummyValue2", self.region.get(random_key2)) self.assertEqual("dummyValue3", self.region.get(random_key3)) mapping = {random_key1: 'dummyValue4', random_key2: 'dummyValue5'} self.region.set_multi(mapping) self.assertEqual(NO_VALUE, self.region.get(random_key)) self.assertEqual("dummyValue4", self.region.get(random_key1)) self.assertEqual("dummyValue5", self.region.get(random_key2)) self.assertEqual("dummyValue3", self.region.get(random_key3)) def test_backend_multi_set_get_with_blanks_none(self): random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) random_key4 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: None, random_key3: '', random_key4: 'dummyValue4'} self.region.set_multi(mapping) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, self.region.get(random_key)) self.assertEqual("dummyValue1", self.region.get(random_key1)) self.assertIsNone(self.region.get(random_key2)) self.assertEqual("", self.region.get(random_key3)) self.assertEqual("dummyValue4", self.region.get(random_key4)) keys = [random_key, random_key1, random_key2, random_key3, random_key4] results = self.region.get_multi(keys) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, results[0]) self.assertEqual("dummyValue1", results[1]) self.assertIsNone(results[2]) self.assertEqual("", results[3]) self.assertEqual("dummyValue4", results[4]) mapping = {random_key1: 'dummyValue5', random_key2: 'dummyValue6'} self.region.set_multi(mapping) self.assertEqual(NO_VALUE, self.region.get(random_key)) self.assertEqual("dummyValue5", self.region.get(random_key1)) self.assertEqual("dummyValue6", self.region.get(random_key2)) self.assertEqual("", self.region.get(random_key3)) def test_backend_delete_data(self): random_key = uuidutils.generate_uuid(dashed=False) self.region.set(random_key, "dummyValue") self.assertEqual("dummyValue", self.region.get(random_key)) self.region.delete(random_key) # should return NO_VALUE as key no longer exists in cache self.assertEqual(NO_VALUE, self.region.get(random_key)) def test_backend_multi_delete_data(self): random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: 'dummyValue2', random_key3: 'dummyValue3'} self.region.set_multi(mapping) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, self.region.get(random_key)) self.assertEqual("dummyValue1", self.region.get(random_key1)) self.assertEqual("dummyValue2", self.region.get(random_key2)) self.assertEqual("dummyValue3", self.region.get(random_key3)) self.assertEqual(NO_VALUE, self.region.get("InvalidKey")) keys = mapping.keys() self.region.delete_multi(keys) self.assertEqual(NO_VALUE, self.region.get("InvalidKey")) # should return NO_VALUE as keys no longer exist in cache self.assertEqual(NO_VALUE, self.region.get(random_key1)) self.assertEqual(NO_VALUE, self.region.get(random_key2)) self.assertEqual(NO_VALUE, self.region.get(random_key3)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/test_cache.py0000664000175000017500000000224500000000000021502 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Copyright 2013 Metacloud # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import fixture as config_fixture from oslotest import base class BaseTestCase(base.BaseTestCase): def setUp(self): super(BaseTestCase, self).setUp() self.config_fixture = self.useFixture(config_fixture.Config()) self.config_fixture.config( # TODO(morganfainberg): Make Cache Testing a separate test case # in tempest, and move it out of the base unit tests. group='cache', backend='dogpile.cache.memory', enabled=True, proxies=['oslo_cache.testing.CacheIsolatingProxy']) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/oslo_cache/tests/unit/0000775000175000017500000000000000000000000020002 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/unit/__init__.py0000664000175000017500000000000000000000000022101 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/unit/test_cache_backend_mongo.py0000664000175000017500000006660300000000000025337 0ustar00zuulzuul00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections.abc import copy import functools from dogpile.cache import region as dp_region from oslo_utils import uuidutils from oslo_cache.backends import mongo from oslo_cache import core from oslo_cache import exception from oslo_cache.tests import test_cache # Mock database structure sample where 'ks_cache' is database and # 'cache' is collection. Dogpile CachedValue data is divided in two # fields `value` (CachedValue.payload) and `meta` (CachedValue.metadata) ks_cache = { "cache": [ { "value": { "serviceType": "identity", "allVersionsUrl": "https://dummyUrl", "dateLastModified": "ISODDate(2014-02-08T18:39:13.237Z)", "serviceName": "Identity", "enabled": "True" }, "meta": { "v": 1, "ct": 1392371422.015121 }, "doc_date": "ISODate('2014-02-14T09:50:22.015Z')", "_id": "8251dc95f63842719c077072f1047ddf" }, { "value": "dummyValueX", "meta": { "v": 1, "ct": 1392371422.014058 }, "doc_date": "ISODate('2014-02-14T09:50:22.014Z')", "_id": "66730b9534d146f0804d23729ad35436" } ] } COLLECTIONS = {} SON_MANIPULATOR = None NO_VALUE = core.NO_VALUE class MockCursor(object): def __init__(self, collection, dataset_factory): super(MockCursor, self).__init__() self.collection = collection self._factory = dataset_factory self._dataset = self._factory() self._limit = None self._skip = None def __iter__(self): return self def __next__(self): if self._skip: for _ in range(self._skip): next(self._dataset) self._skip = None if self._limit is not None and self._limit <= 0: raise StopIteration() if self._limit is not None: self._limit -= 1 return next(self._dataset) next = __next__ def __getitem__(self, index): arr = [x for x in self._dataset] self._dataset = iter(arr) return arr[index] class MockCollection(object): def __init__(self, db, name): super(MockCollection, self).__init__() self.name = name self._collection_database = db self._documents = {} self.write_concern = {} def __getattr__(self, name): if name == 'database': return self._collection_database def ensure_index(self, key_or_list, *args, **kwargs): pass def index_information(self): return {} def find_one(self, spec_or_id=None, *args, **kwargs): if spec_or_id is None: spec_or_id = {} if not isinstance(spec_or_id, collections.abc.Mapping): spec_or_id = {'_id': spec_or_id} try: return next(self.find(spec_or_id, *args, **kwargs)) except StopIteration: return None def find(self, spec=None, *args, **kwargs): return MockCursor(self, functools.partial(self._get_dataset, spec)) def _get_dataset(self, spec): dataset = (self._copy_doc(document, dict) for document in self._iter_documents(spec)) return dataset def _iter_documents(self, spec=None): return (SON_MANIPULATOR.transform_outgoing(document, self) for document in self._documents.values() if self._apply_filter(document, spec)) def _apply_filter(self, document, query): for key, search in query.items(): doc_val = document.get(key) if isinstance(search, dict): op_dict = {'$in': lambda dv, sv: dv in sv} is_match = all( op_str in op_dict and op_dict[op_str](doc_val, search_val) for op_str, search_val in search.items() ) else: is_match = doc_val == search return is_match def _copy_doc(self, obj, container): if isinstance(obj, list): new = [] for item in obj: new.append(self._copy_doc(item, container)) return new if isinstance(obj, dict): new = container() for key, value in list(obj.items()): new[key] = self._copy_doc(value, container) return new else: return copy.copy(obj) def insert(self, data, manipulate=True, **kwargs): if isinstance(data, list): return [self._insert(element) for element in data] return self._insert(data) def save(self, data, manipulate=True, **kwargs): return self._insert(data) def _insert(self, data): if '_id' not in data: data['_id'] = uuidutils.generate_uuid(dashed=False) object_id = data['_id'] self._documents[object_id] = self._internalize_dict(data) return object_id def find_and_modify(self, spec, document, upsert=False, **kwargs): self.update(spec, document, upsert, **kwargs) def update(self, spec, document, upsert=False, **kwargs): existing_docs = [doc for doc in self._documents.values() if self._apply_filter(doc, spec)] if existing_docs: existing_doc = existing_docs[0] # should find only 1 match _id = existing_doc['_id'] existing_doc.clear() existing_doc['_id'] = _id existing_doc.update(self._internalize_dict(document)) elif upsert: existing_doc = self._documents[self._insert(document)] def _internalize_dict(self, d): return {k: copy.deepcopy(v) for k, v in d.items()} def remove(self, spec_or_id=None, search_filter=None): """Remove objects matching spec_or_id from the collection.""" if spec_or_id is None: spec_or_id = search_filter if search_filter else {} if not isinstance(spec_or_id, dict): spec_or_id = {'_id': spec_or_id} to_delete = list(self.find(spec=spec_or_id)) for doc in to_delete: doc_id = doc['_id'] del self._documents[doc_id] return { "connectionId": uuidutils.generate_uuid(dashed=False), "n": len(to_delete), "ok": 1.0, "err": None, } class MockMongoDB(object): def __init__(self, dbname): self._dbname = dbname def authenticate(self, username, password): pass def add_son_manipulator(self, manipulator): global SON_MANIPULATOR SON_MANIPULATOR = manipulator def __getattr__(self, name): if name == 'authenticate': return self.authenticate elif name == 'name': return self._dbname elif name == 'add_son_manipulator': return self.add_son_manipulator else: return get_collection(self._dbname, name) def __getitem__(self, name): return get_collection(self._dbname, name) class MockMongoClient(object): def __init__(self, *args, **kwargs): pass def __getattr__(self, dbname): return MockMongoDB(dbname) def get_collection(db_name, collection_name): mongo_collection = MockCollection(MockMongoDB(db_name), collection_name) return mongo_collection def pymongo_override(): global pymongo import pymongo if pymongo.MongoClient is not MockMongoClient: pymongo.MongoClient = MockMongoClient class MyTransformer(mongo.BaseTransform): """Added here just to check manipulator logic is used correctly.""" def transform_incoming(self, son, collection): return super(MyTransformer, self).transform_incoming(son, collection) def transform_outgoing(self, son, collection): return super(MyTransformer, self).transform_outgoing(son, collection) class MongoCache(test_cache.BaseTestCase): def setUp(self): super(MongoCache, self).setUp() global COLLECTIONS COLLECTIONS = {} mongo.MongoApi._DB = {} mongo.MongoApi._MONGO_COLLS = {} pymongo_override() # using typical configuration self.arguments = { 'db_hosts': 'localhost:27017', 'db_name': 'ks_cache', 'cache_collection': 'cache', 'username': 'test_user', 'password': 'test_password' } def test_missing_db_hosts(self): self.arguments.pop('db_hosts') region = dp_region.make_region() self.assertRaises(exception.ConfigurationError, region.configure, 'oslo_cache.mongo', arguments=self.arguments) def test_missing_db_name(self): self.arguments.pop('db_name') region = dp_region.make_region() self.assertRaises(exception.ConfigurationError, region.configure, 'oslo_cache.mongo', arguments=self.arguments) def test_missing_cache_collection_name(self): self.arguments.pop('cache_collection') region = dp_region.make_region() self.assertRaises(exception.ConfigurationError, region.configure, 'oslo_cache.mongo', arguments=self.arguments) def test_incorrect_write_concern(self): self.arguments['w'] = 'one value' region = dp_region.make_region() self.assertRaises(exception.ConfigurationError, region.configure, 'oslo_cache.mongo', arguments=self.arguments) def test_correct_write_concern(self): self.arguments['w'] = 1 region = dp_region.make_region().configure('oslo_cache.mongo', arguments=self.arguments) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue10") # There is no proxy so can access MongoCacheBackend directly self.assertEqual(1, region.backend.api.w) def test_incorrect_read_preference(self): self.arguments['read_preference'] = 'inValidValue' region = dp_region.make_region().configure('oslo_cache.mongo', arguments=self.arguments) # As per delayed loading of pymongo, read_preference value should # still be string and NOT enum self.assertEqual('inValidValue', region.backend.api.read_preference) random_key = uuidutils.generate_uuid(dashed=False) self.assertRaises(ValueError, region.set, random_key, "dummyValue10") def test_correct_read_preference(self): self.arguments['read_preference'] = 'secondaryPreferred' region = dp_region.make_region().configure('oslo_cache.mongo', arguments=self.arguments) # As per delayed loading of pymongo, read_preference value should # still be string and NOT enum self.assertEqual('secondaryPreferred', region.backend.api.read_preference) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue10") # Now as pymongo is loaded so expected read_preference value is enum. # There is no proxy so can access MongoCacheBackend directly self.assertEqual(3, region.backend.api.read_preference) def test_missing_replica_set_name(self): self.arguments['use_replica'] = True region = dp_region.make_region() self.assertRaises(exception.ConfigurationError, region.configure, 'oslo_cache.mongo', arguments=self.arguments) def test_provided_replica_set_name(self): self.arguments['use_replica'] = True self.arguments['replicaset_name'] = 'my_replica' dp_region.make_region().configure('oslo_cache.mongo', arguments=self.arguments) self.assertTrue(True) # reached here means no initialization error def test_incorrect_mongo_ttl_seconds(self): self.arguments['mongo_ttl_seconds'] = 'sixty' region = dp_region.make_region() self.assertRaises(exception.ConfigurationError, region.configure, 'oslo_cache.mongo', arguments=self.arguments) def test_cache_configuration_values_assertion(self): self.arguments['use_replica'] = True self.arguments['replicaset_name'] = 'my_replica' self.arguments['mongo_ttl_seconds'] = 60 self.arguments['ssl'] = False region = dp_region.make_region().configure('oslo_cache.mongo', arguments=self.arguments) # There is no proxy so can access MongoCacheBackend directly self.assertEqual('localhost:27017', region.backend.api.hosts) self.assertEqual('ks_cache', region.backend.api.db_name) self.assertEqual('cache', region.backend.api.cache_collection) self.assertEqual('test_user', region.backend.api.username) self.assertEqual('test_password', region.backend.api.password) self.assertEqual(True, region.backend.api.use_replica) self.assertEqual('my_replica', region.backend.api.replicaset_name) self.assertEqual(False, region.backend.api.conn_kwargs['ssl']) self.assertEqual(60, region.backend.api.ttl_seconds) def test_multiple_region_cache_configuration(self): arguments1 = copy.copy(self.arguments) arguments1['cache_collection'] = 'cache_region1' region1 = dp_region.make_region().configure('oslo_cache.mongo', arguments=arguments1) # There is no proxy so can access MongoCacheBackend directly self.assertEqual('localhost:27017', region1.backend.api.hosts) self.assertEqual('ks_cache', region1.backend.api.db_name) self.assertEqual('cache_region1', region1.backend.api.cache_collection) self.assertEqual('test_user', region1.backend.api.username) self.assertEqual('test_password', region1.backend.api.password) # Should be None because of delayed initialization self.assertIsNone(region1.backend.api._data_manipulator) random_key1 = uuidutils.generate_uuid(dashed=False) region1.set(random_key1, "dummyValue10") self.assertEqual("dummyValue10", region1.get(random_key1)) # Now should have initialized self.assertIsInstance(region1.backend.api._data_manipulator, mongo.BaseTransform) class_name = '%s.%s' % (MyTransformer.__module__, "MyTransformer") arguments2 = copy.copy(self.arguments) arguments2['cache_collection'] = 'cache_region2' arguments2['son_manipulator'] = class_name region2 = dp_region.make_region().configure('oslo_cache.mongo', arguments=arguments2) # There is no proxy so can access MongoCacheBackend directly self.assertEqual('localhost:27017', region2.backend.api.hosts) self.assertEqual('ks_cache', region2.backend.api.db_name) self.assertEqual('cache_region2', region2.backend.api.cache_collection) # Should be None because of delayed initialization self.assertIsNone(region2.backend.api._data_manipulator) random_key = uuidutils.generate_uuid(dashed=False) region2.set(random_key, "dummyValue20") self.assertEqual("dummyValue20", region2.get(random_key)) # Now should have initialized self.assertIsInstance(region2.backend.api._data_manipulator, MyTransformer) region1.set(random_key1, "dummyValue22") self.assertEqual("dummyValue22", region1.get(random_key1)) def test_typical_configuration(self): dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) self.assertTrue(True) # reached here means no initialization error def test_backend_get_missing_data(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, region.get(random_key)) def test_backend_set_data(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue") self.assertEqual("dummyValue", region.get(random_key)) def test_backend_set_data_with_string_as_valid_ttl(self): self.arguments['mongo_ttl_seconds'] = '3600' region = dp_region.make_region().configure('oslo_cache.mongo', arguments=self.arguments) self.assertEqual(3600, region.backend.api.ttl_seconds) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue") self.assertEqual("dummyValue", region.get(random_key)) def test_backend_set_data_with_int_as_valid_ttl(self): self.arguments['mongo_ttl_seconds'] = 1800 region = dp_region.make_region().configure('oslo_cache.mongo', arguments=self.arguments) self.assertEqual(1800, region.backend.api.ttl_seconds) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue") self.assertEqual("dummyValue", region.get(random_key)) def test_backend_set_none_as_data(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, None) self.assertIsNone(region.get(random_key)) def test_backend_set_blank_as_data(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "") self.assertEqual("", region.get(random_key)) def test_backend_set_same_key_multiple_times(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue") self.assertEqual("dummyValue", region.get(random_key)) dict_value = {'key1': 'value1'} region.set(random_key, dict_value) self.assertEqual(dict_value, region.get(random_key)) region.set(random_key, "dummyValue2") self.assertEqual("dummyValue2", region.get(random_key)) def test_backend_multi_set_data(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: 'dummyValue2', random_key3: 'dummyValue3'} region.set_multi(mapping) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, region.get(random_key)) self.assertFalse(region.get(random_key)) self.assertEqual("dummyValue1", region.get(random_key1)) self.assertEqual("dummyValue2", region.get(random_key2)) self.assertEqual("dummyValue3", region.get(random_key3)) def test_backend_multi_get_data(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: '', random_key3: 'dummyValue3'} region.set_multi(mapping) keys = [random_key, random_key1, random_key2, random_key3] results = region.get_multi(keys) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, results[0]) self.assertEqual("dummyValue1", results[1]) self.assertEqual("", results[2]) self.assertEqual("dummyValue3", results[3]) def test_backend_multi_set_should_update_existing(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: 'dummyValue2', random_key3: 'dummyValue3'} region.set_multi(mapping) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, region.get(random_key)) self.assertEqual("dummyValue1", region.get(random_key1)) self.assertEqual("dummyValue2", region.get(random_key2)) self.assertEqual("dummyValue3", region.get(random_key3)) mapping = {random_key1: 'dummyValue4', random_key2: 'dummyValue5'} region.set_multi(mapping) self.assertEqual(NO_VALUE, region.get(random_key)) self.assertEqual("dummyValue4", region.get(random_key1)) self.assertEqual("dummyValue5", region.get(random_key2)) self.assertEqual("dummyValue3", region.get(random_key3)) def test_backend_multi_set_get_with_blanks_none(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) random_key4 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: None, random_key3: '', random_key4: 'dummyValue4'} region.set_multi(mapping) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, region.get(random_key)) self.assertEqual("dummyValue1", region.get(random_key1)) self.assertIsNone(region.get(random_key2)) self.assertEqual("", region.get(random_key3)) self.assertEqual("dummyValue4", region.get(random_key4)) keys = [random_key, random_key1, random_key2, random_key3, random_key4] results = region.get_multi(keys) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, results[0]) self.assertEqual("dummyValue1", results[1]) self.assertIsNone(results[2]) self.assertEqual("", results[3]) self.assertEqual("dummyValue4", results[4]) mapping = {random_key1: 'dummyValue5', random_key2: 'dummyValue6'} region.set_multi(mapping) self.assertEqual(NO_VALUE, region.get(random_key)) self.assertEqual("dummyValue5", region.get(random_key1)) self.assertEqual("dummyValue6", region.get(random_key2)) self.assertEqual("", region.get(random_key3)) def test_backend_delete_data(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue") self.assertEqual("dummyValue", region.get(random_key)) region.delete(random_key) # should return NO_VALUE as key no longer exists in cache self.assertEqual(NO_VALUE, region.get(random_key)) def test_backend_multi_delete_data(self): region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) random_key = uuidutils.generate_uuid(dashed=False) random_key1 = uuidutils.generate_uuid(dashed=False) random_key2 = uuidutils.generate_uuid(dashed=False) random_key3 = uuidutils.generate_uuid(dashed=False) mapping = {random_key1: 'dummyValue1', random_key2: 'dummyValue2', random_key3: 'dummyValue3'} region.set_multi(mapping) # should return NO_VALUE as key does not exist in cache self.assertEqual(NO_VALUE, region.get(random_key)) self.assertEqual("dummyValue1", region.get(random_key1)) self.assertEqual("dummyValue2", region.get(random_key2)) self.assertEqual("dummyValue3", region.get(random_key3)) self.assertEqual(NO_VALUE, region.get("InvalidKey")) keys = mapping.keys() region.delete_multi(keys) self.assertEqual(NO_VALUE, region.get("InvalidKey")) # should return NO_VALUE as keys no longer exist in cache self.assertEqual(NO_VALUE, region.get(random_key1)) self.assertEqual(NO_VALUE, region.get(random_key2)) self.assertEqual(NO_VALUE, region.get(random_key3)) def test_additional_crud_method_arguments_support(self): """Additional arguments should works across find/insert/update.""" self.arguments['wtimeout'] = 30000 self.arguments['j'] = True self.arguments['continue_on_error'] = True self.arguments['secondary_acceptable_latency_ms'] = 60 region = dp_region.make_region().configure( 'oslo_cache.mongo', arguments=self.arguments ) # There is no proxy so can access MongoCacheBackend directly api_methargs = region.backend.api.meth_kwargs self.assertEqual(30000, api_methargs['wtimeout']) self.assertEqual(True, api_methargs['j']) self.assertEqual(True, api_methargs['continue_on_error']) self.assertEqual(60, api_methargs['secondary_acceptable_latency_ms']) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue1") self.assertEqual("dummyValue1", region.get(random_key)) region.set(random_key, "dummyValue2") self.assertEqual("dummyValue2", region.get(random_key)) random_key = uuidutils.generate_uuid(dashed=False) region.set(random_key, "dummyValue3") self.assertEqual("dummyValue3", region.get(random_key)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/unit/test_cache_basics.py0000664000175000017500000011507400000000000024012 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Copyright 2013 Metacloud # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import ssl import time from unittest import mock from dogpile.cache import proxy from oslo_config import cfg from oslo_utils import uuidutils from pymemcache import KeepaliveOpts from oslo_cache import _opts from oslo_cache import core as cache from oslo_cache import exception from oslo_cache.tests import test_cache NO_VALUE = cache.NO_VALUE TEST_GROUP = uuidutils.generate_uuid(dashed=False) TEST_GROUP2 = uuidutils.generate_uuid(dashed=False) def _copy_value(value): if value is not NO_VALUE: value = copy.deepcopy(value) return value class TestProxy(proxy.ProxyBackend): def get(self, key): value = _copy_value(self.proxied.get(key)) if value is not NO_VALUE: if isinstance(value[0], TestProxyValue): value[0].cached = True return value class TestProxyValue(object): def __init__(self, value): self.value = value self.cached = False class CacheRegionTest(test_cache.BaseTestCase): def setUp(self): super(CacheRegionTest, self).setUp() self.region = cache.create_region() cache.configure_cache_region(self.config_fixture.conf, self.region) self.region.wrap(TestProxy) self.region_kwargs = cache.create_region( function=cache.kwarg_function_key_generator) cache.configure_cache_region(self.config_fixture.conf, self.region_kwargs) self.region_kwargs.wrap(TestProxy) self.test_value = TestProxyValue('Decorator Test') def _add_test_caching_option(self): self.config_fixture.register_opt( cfg.BoolOpt('caching', default=True), group='cache') def _add_dummy_config_group(self): self.config_fixture.register_opt( cfg.IntOpt('cache_time'), group=TEST_GROUP) self.config_fixture.register_opt( cfg.IntOpt('cache_time'), group=TEST_GROUP2) def _get_cacheable_function(self, region=None): region = region if region else self.region memoize = cache.get_memoization_decorator( self.config_fixture.conf, region, group='cache') @memoize def cacheable_function(value=0, **kw): return value return cacheable_function def test_region_built_with_proxy_direct_cache_test(self): # Verify cache regions are properly built with proxies. test_value = TestProxyValue('Direct Cache Test') self.region.set('cache_test', test_value) cached_value = self.region.get('cache_test') self.assertTrue(cached_value.cached) def test_cache_region_no_error_multiple_config(self): # Verify configuring the CacheRegion again doesn't error. cache.configure_cache_region(self.config_fixture.conf, self.region) cache.configure_cache_region(self.config_fixture.conf, self.region) def _get_cache_fallthrough_fn(self, cache_time): memoize = cache.get_memoization_decorator( self.config_fixture.conf, self.region, group='cache', expiration_group=TEST_GROUP2) class _test_obj(object): def __init__(self, value): self.test_value = value @memoize def get_test_value(self): return self.test_value def _do_test(value): test_obj = _test_obj(value) # Ensure the value has been cached test_obj.get_test_value() # Get the now cached value cached_value = test_obj.get_test_value() self.assertTrue(cached_value.cached) self.assertEqual(value.value, cached_value.value) self.assertEqual(cached_value.value, test_obj.test_value.value) # Change the underlying value on the test object. test_obj.test_value = TestProxyValue( uuidutils.generate_uuid(dashed=False)) self.assertEqual(cached_value.value, test_obj.get_test_value().value) # override the system time to ensure the non-cached new value # is returned new_time = time.time() + (cache_time * 2) with mock.patch.object(time, 'time', return_value=new_time): overriden_cache_value = test_obj.get_test_value() self.assertNotEqual(cached_value.value, overriden_cache_value.value) self.assertEqual(test_obj.test_value.value, overriden_cache_value.value) return _do_test def test_cache_no_fallthrough_expiration_time_fn(self): self._add_dummy_config_group() # Since we do not re-configure the cache region, for ease of testing # this value is set the same as the expiration_time default in the # [cache] group cache_time = 600 expiration_time = cache._get_expiration_time_fn( self.config_fixture.conf, TEST_GROUP) do_test = self._get_cache_fallthrough_fn(cache_time) # Run the test with the dummy group cache_time value self.config_fixture.config(cache_time=cache_time, group=TEST_GROUP) test_value = TestProxyValue(uuidutils.generate_uuid(dashed=False)) self.assertEqual(cache_time, expiration_time()) do_test(value=test_value) def test_cache_fallthrough_expiration_time_fn(self): self._add_dummy_config_group() # Since we do not re-configure the cache region, for ease of testing # this value is set the same as the expiration_time default in the # [cache] group cache_time = 599 expiration_time = cache._get_expiration_time_fn( self.config_fixture.conf, TEST_GROUP) do_test = self._get_cache_fallthrough_fn(cache_time) # Run the test with the dummy group cache_time value set to None and # the global value set. self.config_fixture.config(cache_time=None, group=TEST_GROUP) test_value = TestProxyValue( uuidutils.generate_uuid(dashed=False)) self.assertIsNone(expiration_time()) do_test(value=test_value) def test_should_cache_fn_global_cache_enabled(self): # Verify should_cache_fn generates a sane function for subsystem and # functions as expected with caching globally enabled. cacheable_function = self._get_cacheable_function() self.config_fixture.config(group='cache', enabled=True) cacheable_function(self.test_value) cached_value = cacheable_function(self.test_value) self.assertTrue(cached_value.cached) def test_should_cache_fn_global_cache_disabled(self): # Verify should_cache_fn generates a sane function for subsystem and # functions as expected with caching globally disabled. cacheable_function = self._get_cacheable_function() self.config_fixture.config(group='cache', enabled=False) cacheable_function(self.test_value) cached_value = cacheable_function(self.test_value) self.assertFalse(cached_value.cached) def test_should_cache_fn_global_cache_disabled_group_cache_enabled(self): # Verify should_cache_fn generates a sane function for subsystem and # functions as expected with caching globally disabled and the specific # group caching enabled. cacheable_function = self._get_cacheable_function() self._add_test_caching_option() self.config_fixture.config(group='cache', enabled=False) self.config_fixture.config(group='cache', caching=True) cacheable_function(self.test_value) cached_value = cacheable_function(self.test_value) self.assertFalse(cached_value.cached) def test_should_cache_fn_global_cache_enabled_group_cache_disabled(self): # Verify should_cache_fn generates a sane function for subsystem and # functions as expected with caching globally enabled and the specific # group caching disabled. cacheable_function = self._get_cacheable_function() self._add_test_caching_option() self.config_fixture.config(group='cache', enabled=True) self.config_fixture.config(group='cache', caching=False) cacheable_function(self.test_value) cached_value = cacheable_function(self.test_value) self.assertFalse(cached_value.cached) def test_should_cache_fn_global_cache_enabled_group_cache_enabled(self): # Verify should_cache_fn generates a sane function for subsystem and # functions as expected with caching globally enabled and the specific # group caching enabled. cacheable_function = self._get_cacheable_function() self._add_test_caching_option() self.config_fixture.config(group='cache', enabled=True) self.config_fixture.config(group='cache', caching=True) cacheable_function(self.test_value) cached_value = cacheable_function(self.test_value) self.assertTrue(cached_value.cached) def test_cache_dictionary_config_builder(self): """Validate we build a sane dogpile.cache dictionary config.""" self.config_fixture.config(group='cache', config_prefix='test_prefix', backend='oslo_cache.dict', expiration_time=86400, backend_argument=['arg1:test', 'arg2:test:test', 'arg3.invalid']) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertEqual( self.config_fixture.conf.cache.backend, config_dict['test_prefix.backend']) self.assertEqual( self.config_fixture.conf.cache.expiration_time, config_dict['test_prefix.expiration_time']) self.assertEqual('test', config_dict['test_prefix.arguments.arg1']) self.assertEqual('test:test', config_dict['test_prefix.arguments.arg2']) self.assertNotIn('test_prefix.arguments.arg3', config_dict) def test_cache_dictionary_config_builder_global_disabled(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=False, config_prefix='test_prefix', backend='oslo_cache.dict') self.assertFalse(self.config_fixture.conf.cache.enabled) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertEqual( _opts._DEFAULT_BACKEND, config_dict['test_prefix.backend']) def test_cache_dictionary_config_builder_tls_disabled(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', tls_cafile='path_to_ca_file', tls_keyfile='path_to_key_file', tls_certfile='path_to_cert_file', tls_allowed_ciphers='allowed_ciphers') with mock.patch.object(ssl, 'create_default_context'): config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertFalse(self.config_fixture.conf.cache.tls_enabled) ssl.create_default_context.assert_not_called() self.assertNotIn('test_prefix.arguments.tls_context', config_dict) def test_cache_dictionary_config_builder_tls_disabled_redis(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.redis', tls_cafile='path_to_ca_file', tls_keyfile='path_to_key_file', tls_certfile='path_to_cert_file', tls_allowed_ciphers='allowed_ciphers') config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertEqual( 'redis://localhost:6379', config_dict['test_prefix.arguments.url']) self.assertFalse(self.config_fixture.conf.cache.tls_enabled) self.assertNotIn('test_prefix.arguments.connection_kwargs', config_dict) def test_cache_dictionary_config_builder_tls_disabled_redis_sentinel(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.redis_sentinel', tls_cafile='path_to_ca_file', tls_keyfile='path_to_key_file', tls_certfile='path_to_cert_file') config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertFalse(self.config_fixture.conf.cache.tls_enabled) self.assertNotIn('test_prefix.arguments.connection_kwargs', config_dict) self.assertNotIn('test_prefix.arguments.sentinel_kwargs', config_dict) def test_cache_dictionary_config_builder_tls_enabled(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', tls_enabled=True) fake_context = mock.Mock() with mock.patch.object(ssl, 'create_default_context', return_value=fake_context): config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue(self.config_fixture.conf.cache.tls_enabled) ssl.create_default_context.assert_called_with(cafile=None) fake_context.load_cert_chain.assert_not_called() fake_context.set_ciphers.assert_not_called() self.assertEqual( fake_context, config_dict['test_prefix.arguments.tls_context'], ) def test_cache_dictionary_config_builder_tls_enabled_redis(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.redis', tls_enabled=True, tls_cafile='path_to_ca_file', tls_keyfile='path_to_key_file', tls_certfile='path_to_cert_file') config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue(self.config_fixture.conf.cache.tls_enabled) self.assertIn('test_prefix.arguments.connection_kwargs', config_dict) self.assertEqual( 'rediss://localhost:6379', config_dict['test_prefix.arguments.url']) self.assertEqual( { 'ssl_ca_certs': 'path_to_ca_file', 'ssl_keyfile': 'path_to_key_file', 'ssl_certfile': 'path_to_cert_file' }, config_dict['test_prefix.arguments.connection_kwargs']) self.assertNotIn('test_prefix.arguments.sentinel_kwargs', config_dict) def test_cache_dictionary_config_builder_tls_enabled_redis_sentinel(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.redis_sentinel', tls_enabled=True, tls_cafile='path_to_ca_file', tls_keyfile='path_to_key_file', tls_certfile='path_to_cert_file') config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue(self.config_fixture.conf.cache.tls_enabled) self.assertIn('test_prefix.arguments.connection_kwargs', config_dict) self.assertEqual( { 'ssl': True, 'ssl_ca_certs': 'path_to_ca_file', 'ssl_keyfile': 'path_to_key_file', 'ssl_certfile': 'path_to_cert_file' }, config_dict['test_prefix.arguments.connection_kwargs']) self.assertIn('test_prefix.arguments.sentinel_kwargs', config_dict) self.assertEqual( { 'ssl': True, 'ssl_ca_certs': 'path_to_ca_file', 'ssl_keyfile': 'path_to_key_file', 'ssl_certfile': 'path_to_cert_file' }, config_dict['test_prefix.arguments.sentinel_kwargs']) @mock.patch('oslo_cache.core._LOG') def test_cache_dictionary_config_builder_fips_mode_supported(self, log): """Validate the FIPS mode is supported.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', tls_enabled=True, enforce_fips_mode=True) # Ensure that we emulate FIPS_mode even if it doesn't exist with mock.patch.object(ssl, 'FIPS_mode', create=True, return_value=True): # Ensure that we are able to set FIPS_mode with mock.patch.object(ssl, 'FIPS_mode_set', create=True): cache._build_cache_config(self.config_fixture.conf) log.info.assert_called_once_with( "Enforcing the use of the OpenSSL FIPS mode") @mock.patch('oslo_cache.core._LOG') def test_cache_dictionary_config_builder_fips_mode_unsupported(self, log): """Validate the FIPS mode is not supported.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', tls_enabled=True, enforce_fips_mode=True) with mock.patch.object(cache, 'ssl') as ssl_: del ssl_.FIPS_mode # We do this test only if FIPS mode is not supported to # ensure that we hard fail. self.assertRaises(exception.ConfigurationError, cache._build_cache_config, self.config_fixture.conf) def test_cache_dictionary_config_builder_fips_mode_unsupported_redis(self): """Validate the FIPS mode is not supported.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.redis', tls_enabled=True, enforce_fips_mode=True) self.assertRaises(exception.ConfigurationError, cache._build_cache_config, self.config_fixture.conf) def test_cache_dictionary_config_builder_tls_enabled_unsupported(self): """Validate the tls_enabled opiton is not supported..""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='oslo_cache.dict', tls_enabled=True) with mock.patch.object(ssl, 'create_default_context'): self.assertRaises(exception.ConfigurationError, cache._build_cache_config, self.config_fixture.conf) ssl.create_default_context.assert_not_called() def test_cache_dictionary_config_builder_tls_enabled_with_config(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', tls_enabled=True, tls_cafile='path_to_ca_file', tls_keyfile='path_to_key_file', tls_certfile='path_to_cert_file', tls_allowed_ciphers='allowed_ciphers') fake_context = mock.Mock() with mock.patch.object(ssl, 'create_default_context', return_value=fake_context): config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue(self.config_fixture.conf.cache.tls_enabled) ssl.create_default_context.assert_called_with( cafile='path_to_ca_file', ) fake_context.load_cert_chain.assert_called_with( 'path_to_cert_file', 'path_to_key_file', ) fake_context.set_ciphers.assert_called_with( 'allowed_ciphers' ) self.assertEqual( fake_context, config_dict['test_prefix.arguments.tls_context'], ) def test_cache_pymemcache_socket_kalive_enabled_with_wrong_backend(self): """Validate we build a config without the retry option when retry is disabled. """ self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='oslo_cache.dict', enable_socket_keepalive=True) self.assertRaises( exception.ConfigurationError, cache._build_cache_config, self.config_fixture.conf ) def test_cache_pymemcache_socket_keepalive_disabled(self): """Validate we build a dogpile.cache dict config without keepalive.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', socket_keepalive_idle=2, socket_keepalive_interval=2, socket_keepalive_count=2) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertFalse( self.config_fixture.conf.cache.enable_socket_keepalive) self.assertNotIn( 'test_prefix.arguments.socket_keepalive', config_dict) def test_cache_pymemcache_socket_keepalive_enabled(self): """Validate we build a dogpile.cache dict config with keepalive.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', enable_socket_keepalive=True) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue( self.config_fixture.conf.cache.enable_socket_keepalive) self.assertIsInstance( config_dict['test_prefix.arguments.socket_keepalive'], KeepaliveOpts ) def test_cache_pymemcache_socket_keepalive_with_config(self): """Validate we build a socket keepalive with the right config.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', enable_socket_keepalive=True, socket_keepalive_idle=12, socket_keepalive_interval=38, socket_keepalive_count=42) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue( self.config_fixture.conf.cache.enable_socket_keepalive) self.assertTrue( config_dict['test_prefix.arguments.socket_keepalive'], KeepaliveOpts ) self.assertEqual( 12, config_dict['test_prefix.arguments.socket_keepalive'].idle ) self.assertEqual( 38, config_dict['test_prefix.arguments.socket_keepalive'].intvl ) self.assertEqual( 42, config_dict['test_prefix.arguments.socket_keepalive'].cnt ) def test_cache_pymemcache_retry_enabled_with_wrong_backend(self): """Validate we build a config without the retry option when retry is disabled. """ self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='oslo_cache.dict', enable_retry_client=True, retry_attempts=2, retry_delay=2) self.assertRaises( exception.ConfigurationError, cache._build_cache_config, self.config_fixture.conf ) def test_cache_pymemcache_retry_disabled(self): """Validate we build a config without the retry option when retry is disabled. """ self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', retry_attempts=2, retry_delay=2) config_dict = cache._build_cache_config(self.config_fixture.conf) opts = ['enable_retry_client', 'retry_attempts', 'retry_delay'] for el in opts: self.assertNotIn('test_prefix.arguments.{}'.format(el), config_dict) def test_cache_pymemcache_retry_enabled(self): """Validate we build a dogpile.cache dict config with retry.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', enable_retry_client=True) config_dict = cache._build_cache_config(self.config_fixture.conf) opts = ['enable_retry_client', 'retry_attempts', 'retry_delay'] for el in opts: self.assertIn('test_prefix.arguments.{}'.format(el), config_dict) def test_cache_pymemcache_retry_with_opts(self): """Validate we build a valid config for the retry client.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', enable_retry_client=True, retry_attempts=42, retry_delay=42) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue( self.config_fixture.conf.cache.enable_retry_client) self.assertEqual( config_dict['test_prefix.arguments.retry_attempts'], 42 ) self.assertEqual( config_dict['test_prefix.arguments.retry_delay'], 42 ) def test_cache_pymemcache_retry_with_extra_opts(self): """Validate we build a valid config for the retry client.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.pymemcache', enable_retry_client=True, retry_attempts=42, retry_delay=42, hashclient_retry_attempts=100, hashclient_retry_delay=100, dead_timeout=100) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue( self.config_fixture.conf.cache.enable_retry_client) self.assertEqual( config_dict['test_prefix.arguments.retry_attempts'], 42 ) self.assertEqual( config_dict['test_prefix.arguments.retry_delay'], 42 ) self.assertEqual( config_dict['test_prefix.arguments.hashclient_retry_attempts'], 100 ) self.assertEqual( config_dict['test_prefix.arguments.hashclient_retry_delay'], 100 ) self.assertEqual( config_dict['test_prefix.arguments.dead_timeout'], 100 ) def test_cache_dictionary_config_builder_flush_on_reconnect_enabled(self): """Validate we build a sane dogpile.cache dictionary config.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='oslo_cache.dict', memcache_pool_flush_on_reconnect=True) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertTrue(self.config_fixture.conf.cache. memcache_pool_flush_on_reconnect) self.assertTrue(config_dict['test_prefix.arguments' '.pool_flush_on_reconnect']) def test_cache_dictionary_config_builder_flush_on_reconnect_disabled(self): """Validate we build a sane dogpile.cache dictionary config.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='oslo_cache.dict', memcache_pool_flush_on_reconnect=False) config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertFalse(self.config_fixture.conf.cache. memcache_pool_flush_on_reconnect) self.assertFalse(config_dict['test_prefix.arguments' '.pool_flush_on_reconnect']) def test_cache_dictionary_config_builder_redis(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', config_prefix='test_prefix', backend='dogpile.cache.redis', redis_server='[::1]:6379', redis_username='user', redis_password='secrete') config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertEqual( 'redis://user:secrete@[::1]:6379', config_dict['test_prefix.arguments.url']) self.assertEqual( 1.0, config_dict['test_prefix.arguments.socket_timeout']) def test_cache_dictionary_config_builder_redis_with_auth(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', config_prefix='test_prefix', backend='dogpile.cache.redis', redis_server='[::1]:6379', redis_username='user', redis_password='secrete') config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertEqual( 'redis://user:secrete@[::1]:6379', config_dict['test_prefix.arguments.url']) def test_cache_dictionary_config_builder_redis_sentinel(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.redis_sentinel') config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertFalse(self.config_fixture.conf.cache.tls_enabled) self.assertEqual( 'mymaster', config_dict['test_prefix.arguments.service_name']) self.assertEqual([ ('localhost', 26379) ], config_dict['test_prefix.arguments.sentinels']) self.assertEqual( 1.0, config_dict['test_prefix.arguments.socket_timeout']) self.assertNotIn('test_prefix.arguments.connection_kwargs', config_dict) self.assertNotIn('test_prefix.arguments.sentinel_kwargs', config_dict) def test_cache_dictionary_config_builder_redis_sentinel_with_auth(self): """Validate the backend is reset to default if caching is disabled.""" self.config_fixture.config(group='cache', enabled=True, config_prefix='test_prefix', backend='dogpile.cache.redis_sentinel', redis_username='user', redis_password='secrete', redis_sentinels=[ '127.0.0.1:26379', '[::1]:26379', 'localhost:26379' ], redis_sentinel_service_name='cluster') config_dict = cache._build_cache_config(self.config_fixture.conf) self.assertFalse(self.config_fixture.conf.cache.tls_enabled) self.assertEqual( 'cluster', config_dict['test_prefix.arguments.service_name']) self.assertEqual([ ('127.0.0.1', 26379), ('::1', 26379), ('localhost', 26379), ], config_dict['test_prefix.arguments.sentinels']) self.assertEqual( 'secrete', config_dict['test_prefix.arguments.password']) self.assertEqual({ 'username': 'user' }, config_dict['test_prefix.arguments.connection_kwargs']) self.assertEqual({ 'username': 'user' }, config_dict['test_prefix.arguments.sentinel_kwargs']) def test_cache_debug_proxy(self): single_value = 'Test Value' single_key = 'testkey' multi_values = {'key1': 1, 'key2': 2, 'key3': 3} self.region.set(single_key, single_value) self.assertEqual(single_value, self.region.get(single_key)) self.region.delete(single_key) self.assertEqual(NO_VALUE, self.region.get(single_key)) self.region.set_multi(multi_values) cached_values = self.region.get_multi(multi_values.keys()) for value in multi_values.values(): self.assertIn(value, cached_values) self.assertEqual(len(multi_values.values()), len(cached_values)) self.region.delete_multi(multi_values.keys()) for value in self.region.get_multi(multi_values.keys()): self.assertEqual(NO_VALUE, value) def test_configure_non_region_object_raises_error(self): self.assertRaises(exception.ConfigurationError, cache.configure_cache_region, self.config_fixture.conf, "bogus") def test_kwarg_function_key_generator_no_kwargs(self): cacheable_function = self._get_cacheable_function( region=self.region_kwargs) self.config_fixture.config(group='cache', enabled=True) cacheable_function(self.test_value) cached_value = cacheable_function(self.test_value) self.assertTrue(cached_value.cached) def test_kwarg_function_key_generator_with_kwargs(self): cacheable_function = self._get_cacheable_function( region=self.region_kwargs) self.config_fixture.config(group='cache', enabled=True) cacheable_function(value=self.test_value) cached_value = cacheable_function(value=self.test_value) self.assertTrue(cached_value.cached) class UTF8KeyManglerTests(test_cache.BaseTestCase): def test_key_is_utf8_encoded(self): key = 'fäké1' encoded = cache._sha1_mangle_key(key) self.assertIsNotNone(encoded) def test_key_is_bytestring(self): key = b'\xcf\x84o\xcf\x81\xce\xbdo\xcf\x82' encoded = cache._sha1_mangle_key(key) self.assertIsNotNone(encoded) def test_key_is_string(self): key = 'fake' encoded = cache._sha1_mangle_key(key) self.assertIsNotNone(encoded) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/unit/test_connection_pool.py0000664000175000017500000001543400000000000024612 0ustar00zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import queue import threading import time from unittest import mock import testtools from testtools import matchers from oslo_cache import _bmemcache_pool from oslo_cache import _memcache_pool from oslo_cache import exception from oslo_cache.tests import test_cache class _TestConnectionPool(_memcache_pool.ConnectionPool): destroyed_value = 'destroyed' def _create_connection(self): return mock.MagicMock() def _destroy_connection(self, conn): conn(self.destroyed_value) class TestConnectionPool(test_cache.BaseTestCase): def setUp(self): super(TestConnectionPool, self).setUp() self.unused_timeout = 10 self.maxsize = 2 self.connection_pool = _TestConnectionPool( maxsize=self.maxsize, unused_timeout=self.unused_timeout) self.addCleanup(self.cleanup_instance('connection_pool')) def cleanup_instance(self, *names): """Create a function suitable for use with self.addCleanup. :returns: a callable that uses a closure to delete instance attributes """ def cleanup(): for name in names: if hasattr(self, name): delattr(self, name) return cleanup def test_get_context_manager(self): self.assertThat(self.connection_pool.queue, matchers.HasLength(0)) with self.connection_pool.acquire() as conn: self.assertEqual(1, self.connection_pool._acquired) self.assertEqual(0, self.connection_pool._acquired) self.assertThat(self.connection_pool.queue, matchers.HasLength(1)) self.assertEqual(conn, self.connection_pool.queue[0].connection) def test_cleanup_pool(self): self.test_get_context_manager() newtime = time.time() + self.unused_timeout * 2 non_expired_connection = _memcache_pool._PoolItem( ttl=(newtime * 2), connection=mock.MagicMock()) self.connection_pool.queue.append(non_expired_connection) self.assertThat(self.connection_pool.queue, matchers.HasLength(2)) with mock.patch.object(time, 'time', return_value=newtime): conn = self.connection_pool.queue[0].connection with self.connection_pool.acquire(): pass conn.assert_has_calls( [mock.call(self.connection_pool.destroyed_value)]) self.assertThat(self.connection_pool.queue, matchers.HasLength(1)) self.assertEqual(0, non_expired_connection.connection.call_count) def test_acquire_conn_exception_returns_acquired_count(self): class TestException(Exception): pass with mock.patch.object(_TestConnectionPool, '_create_connection', side_effect=TestException): with testtools.ExpectedException(TestException): with self.connection_pool.acquire(): pass self.assertThat(self.connection_pool.queue, matchers.HasLength(0)) self.assertEqual(0, self.connection_pool._acquired) def test_connection_pool_limits_maximum_connections(self): # NOTE(morganfainberg): To ensure we don't lockup tests until the # job limit, explicitly call .get_nowait() and .put_nowait() in this # case. conn1 = self.connection_pool.get_nowait() conn2 = self.connection_pool.get_nowait() # Use a nowait version to raise an Empty exception indicating we would # not get another connection until one is placed back into the queue. self.assertRaises(queue.Empty, self.connection_pool.get_nowait) # Place the connections back into the pool. self.connection_pool.put_nowait(conn1) self.connection_pool.put_nowait(conn2) # Make sure we can get a connection out of the pool again. self.connection_pool.get_nowait() def test_connection_pool_maximum_connection_get_timeout(self): connection_pool = _TestConnectionPool( maxsize=1, unused_timeout=self.unused_timeout, conn_get_timeout=0) def _acquire_connection(): with connection_pool.acquire(): pass # Make sure we've consumed the only available connection from the pool conn = connection_pool.get_nowait() self.assertRaises(exception.QueueEmpty, _acquire_connection) # Put the connection back and ensure we can acquire the connection # after it is available. connection_pool.put_nowait(conn) _acquire_connection() class TestMemcacheClientOverrides(test_cache.BaseTestCase): def test_client_stripped_of_threading_local(self): """threading.local overrides are restored for _MemcacheClient""" client_class = _memcache_pool._MemcacheClient # get the genuine thread._local from MRO thread_local = client_class.__mro__[2] self.assertTrue(thread_local is threading.local) for field in thread_local.__dict__.keys(): if field not in ('__dict__', '__weakref__'): self.assertNotEqual(id(getattr(thread_local, field, None)), id(getattr(client_class, field, None))) def test_can_create_with_kwargs(self): """Test for lp 1812935 Note that in order to reproduce the bug, it is necessary to add the following to the top of oslo_cache/tests/__init__.py:: import eventlet eventlet.monkey_patch() This should happen before any other imports in that file. """ client = _memcache_pool._MemcacheClient('foo', check_keys=False) # Make sure kwargs are properly processed by the client self.assertFalse(client.do_check_key) # Make sure our __new__ override still results in the right type self.assertIsInstance(client, _memcache_pool._MemcacheClient) class TestBMemcacheClient(test_cache.BaseTestCase): def test_can_create_with_kwargs(self): client = _bmemcache_pool._BMemcacheClient('foo', password='123456') # Make sure kwargs are properly processed by the client self.assertEqual('123456', client.password) # Make sure our __new__ override still results in the right type self.assertIsInstance(client, _bmemcache_pool._BMemcacheClient) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/tests/unit/test_dict_backend.py0000664000175000017500000001013200000000000024002 0ustar00zuulzuul00000000000000# Copyright 2015 Mirantis Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from dogpile.cache import region as dp_region from oslo_cache import core from oslo_cache.tests import test_cache from oslo_config import fixture as config_fixture from oslo_utils import fixture as time_fixture NO_VALUE = core.NO_VALUE KEY = 'test_key' VALUE = 'test_value' class CacheDictBackendTest(test_cache.BaseTestCase): def setUp(self): super(CacheDictBackendTest, self).setUp() self.config_fixture = self.useFixture(config_fixture.Config()) self.config_fixture.config(group='cache', backend='oslo_cache.dict') self.time_fixture = self.useFixture(time_fixture.TimeFixture()) self.region = dp_region.make_region() self.region.configure( 'oslo_cache.dict', arguments={'expiration_time': 0.5}) def test_dict_backend(self): self.assertIs(NO_VALUE, self.region.get(KEY)) self.region.set(KEY, VALUE) self.assertEqual(VALUE, self.region.get(KEY)) self.region.delete(KEY) self.assertIs(NO_VALUE, self.region.get(KEY)) def test_dict_backend_expiration_time(self): self.region.set(KEY, VALUE) self.assertEqual(VALUE, self.region.get(KEY)) self.time_fixture.advance_time_seconds(1) self.assertIs(NO_VALUE, self.region.get(KEY)) def test_dict_backend_clear_cache(self): self.region.set(KEY, VALUE) self.time_fixture.advance_time_seconds(1) self.assertEqual(1, len(self.region.backend.cache)) self.region.backend._clear() self.assertEqual(0, len(self.region.backend.cache)) def test_dict_backend_zero_expiration_time(self): self.region = dp_region.make_region() self.region.configure( 'oslo_cache.dict', arguments={'expiration_time': 0}) self.region.set(KEY, VALUE) self.time_fixture.advance_time_seconds(1) self.assertEqual(VALUE, self.region.get(KEY)) self.assertEqual(1, len(self.region.backend.cache)) self.region.backend._clear() self.assertEqual(VALUE, self.region.get(KEY)) self.assertEqual(1, len(self.region.backend.cache)) def test_dict_backend_multi_keys(self): self.region.set('key1', 'value1') self.region.set('key2', 'value2') self.time_fixture.advance_time_seconds(1) self.region.set('key3', 'value3') self.assertEqual(1, len(self.region.backend.cache)) self.assertIs(NO_VALUE, self.region.get('key1')) self.assertIs(NO_VALUE, self.region.get('key2')) self.assertEqual('value3', self.region.get('key3')) def test_dict_backend_multi_keys_in_one_call(self): single_value = 'Test Value' single_key = 'testkey' multi_values = {'key1': 1, 'key2': 2, 'key3': 3} self.region.set(single_key, single_value) self.assertEqual(single_value, self.region.get(single_key)) self.region.delete(single_key) self.assertEqual(NO_VALUE, self.region.get(single_key)) self.region.set_multi(multi_values) cached_values = self.region.get_multi(multi_values.keys()) for value in multi_values.values(): self.assertIn(value, cached_values) self.assertEqual(len(multi_values.values()), len(cached_values)) self.region.delete_multi(multi_values.keys()) for value in self.region.get_multi(multi_values.keys()): self.assertEqual(NO_VALUE, value) def test_dict_backend_rewrite_value(self): self.region.set(KEY, 'value1') self.region.set(KEY, 'value2') self.assertEqual('value2', self.region.get(KEY)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/oslo_cache/version.py0000664000175000017500000000126200000000000017721 0ustar00zuulzuul00000000000000# Copyright 2016 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pbr.version version_info = pbr.version.VersionInfo('oslo.cache') ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/playbooks/0000775000175000017500000000000000000000000015565 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/playbooks/tests/0000775000175000017500000000000000000000000016727 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1703951 oslo.cache-3.7.0/playbooks/tests/functional/0000775000175000017500000000000000000000000021071 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/playbooks/tests/functional/Debian.yaml0000664000175000017500000000017300000000000023140 0ustar00zuulzuul00000000000000--- backend_services_map: redis: - redis-server - redis-sentinel memcached: - memcached etcd: - etcd ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/playbooks/tests/functional/RedHat.yaml0000664000175000017500000000016400000000000023125 0ustar00zuulzuul00000000000000--- backend_services_map: redis: - redis - redis-sentinel memcached: - memcached etcd: - etcd ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/playbooks/tests/functional/pre.yml0000664000175000017500000000146000000000000022403 0ustar00zuulzuul00000000000000- hosts: all vars: oslo_cache_backend_daemon: "{{ tox_environment.PIFPAF_DAEMON }}" roles: - role: bindep bindep_profile: "tests-functional-{{ oslo_cache_backend_daemon }}" tasks: - name: Include OS-specific variables include_vars: "{{ ansible_os_family }}.yaml" # NOTE(yoctozepto): Debian and Ubuntu have this nasty policy of starting # installed services for us. We don't rely on system-wide service and use # pifpaf. Unfortunately, default port may conflict with system-wide service. # So, for sanity and resource conservation, let's stop it before tests run. - name: "Stop backend services" service: name: "{{ item }}" state: stopped enabled: no become: yes loop: "{{ backend_services_map[oslo_cache_backend_daemon] }}" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1583943 oslo.cache-3.7.0/releasenotes/0000775000175000017500000000000000000000000016253 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1743953 oslo.cache-3.7.0/releasenotes/notes/0000775000175000017500000000000000000000000017403 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/add-dogpile.cache.pymemcache-backend-627d31a76013f8e1.yaml0000664000175000017500000000014000000000000031246 0ustar00zuulzuul00000000000000--- features: - | Added a new memcached driver that uses pymemcache through dogpile.cache.././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/add_reno-3b4ae0789e9c45b4.yaml0000664000175000017500000000007100000000000024264 0ustar00zuulzuul00000000000000--- other: - Switch to reno for managing release notes.././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/bug-1743036-320ed918d5fb4325.yaml0000664000175000017500000000076100000000000024031 0ustar00zuulzuul00000000000000--- other: - > [`bug 1743036 `_] The `backend_argument` value(s) for `url` when configuring memcache did not properly handle multiple servers. This is because the URL was passed as a string (comma delimited) instead of a list to the memcache library/client. The `url` argument is now special cased and will split the string on a comma so that it mirrors the behavior of the ListOpt used by `memcache_servers` option.././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/bug-1819957-ccff6b0ec9d1cbf2.yaml0000664000175000017500000000243100000000000024503 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1819957 `_] If a memcache server disappears and then reconnects when multiple memcache servers are used (specific to the python-memcached based backends) it is possible that the server will contain stale data. The default is now to supply the ``flush_on_reconnect`` optional argument to the backend. This means that when the service connects to a memcache server, it will flush all cached data in the server. This change only impacts the pooled backend as it is the most likely (with heavy use of greenlet) to be impacted by the problem and is the recommended production configuration. See the help from python-memcached: @param flush_on_reconnect: optional flag which prevents a scenario that can cause stale data to be read: If there's more than one memcached server and the connection to one is interrupted, keys that mapped to that server will get reassigned to another. If the first server comes back, those keys will map to it again. If it still has its data, get()s can read stale data that was overwritten on another server. This flag is off by default for backwards compatibility. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/bug-1888394-5a53e7a9cb25375b.yaml0000664000175000017500000000177600000000000024143 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1888394 `_] If a memcache server disappears and then reconnects when multiple memcache servers are used (specific to the python-memcached based backends) it is possible that the server will contain stale data. To avoid this, param flush_on_reconnect was used in code. But unfortunately this option is causing another issue. If memcache server disappears, or client had broken connection to memcache server, clients start to flush server on reconnect. This means that network connections will go UP and can cause server to be overloaded until memcache will be unresponsive. Simply said this option can cause loops of flushes and overloaded memcached servers. This change is moving optional parameter ``flush_on_reconnect`` to oslo.cache config. features: - Configuration option ``memcache_pool_flush_on_reconnect`` added to control if flush will be sent to memcached server after reconnect. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/bug-1991250-23bc3463273e5a91.yaml0000664000175000017500000000046200000000000023746 0ustar00zuulzuul00000000000000--- fixes: - | [`bug 1991250 `_] The python-binary-memcached package is only required if sasl_enabled=True. When sasl_enabled=False (default), the memcache_pool backend can be used without the python-binary-memcached package installed. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/drop-python-2-7-73d3113c69d724d6.yaml0000664000175000017500000000021100000000000025127 0ustar00zuulzuul00000000000000--- upgrade: - | Python 2.7 support has been dropped. The minimum version of Python now supported by oslo.cache is Python 3.6. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/enable-sasl-protocol-46d11530b87e7832.yaml0000664000175000017500000000016100000000000026311 0ustar00zuulzuul00000000000000--- features: - | Add the feature to support SASL for olso.cache to improve the security of authority. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/enforce_fips_mode-c3296a0cc1fb7ad9.yaml0000664000175000017500000000044000000000000026304 0ustar00zuulzuul00000000000000--- features: - | Adding a new option, ``[cache] enforce_fips_mode``, to the rabbitmq driver to enforce the OpenSSL FIPS mode if supported by the version of Python. security: - | We are now able to enforce the OpenSSL FIPS mode by using ``[cache] enforce_fips_mode``. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/etcd3gw_driver-8ba4511ae9553a91.yaml0000664000175000017500000000021000000000000025325 0ustar00zuulzuul00000000000000--- features: - | Added a new etcd3gw driver that uses the etcd 3.x grpc-gateway HTTP "/v3alpha" API to cache key/value pairs.././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/fix-memcache-pool-backend-b9e6aaab08075d68.yaml0000664000175000017500000000044300000000000027450 0ustar00zuulzuul00000000000000--- fixes: - | Fix the memcache_pool backend broken in oslo.cache's version 2.1.0 by switching from a python-memcache based client to a pymemcache based client. Reintroducing the client based on python-memcached as the default client for the memcache_pool dogpile backend. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/lower_socket_timeout-ff5680a6be23bdb2.yaml0000664000175000017500000000057200000000000027112 0ustar00zuulzuul00000000000000--- upgrade: - | The default value for ``memcache_socket_timeout`` has been lowered from 3 seconds to 1 second. The positive side-effect of this can be found in downstream `changes `_. If you deployment relies explicitly on a timeout of 3 seconds, please set that override in your configuration. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/memcache_socket_timeout-a7db772f052c107e.yaml0000664000175000017500000000015700000000000027362 0ustar00zuulzuul00000000000000--- features: - | `memcache_socket_timeout` is changed to float, given the same default value '3.0'. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/pymemcache_hashclient_configure-f6f48c5ca38bce47.yaml0000664000175000017500000000050700000000000031236 0ustar00zuulzuul00000000000000--- features: - | Add new options (``hashclient_retry_attempts``, ``hashclient_retry_delay``, ``dead_timeout``) to allow to configure pymemcache's HashClient use through dogpile.cache's pymemcache backend. Those options expose pymemcache params that allow to configure the failover for memcached cluster. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/pymemcache_retry_mecchanisms-fa969d1ac6f64096.yaml0000664000175000017500000000023600000000000030434 0ustar00zuulzuul00000000000000--- features: - | Add new options (``enable_retry_client``, ``retry_attempts``, ``retry_delay``) to add retry mechanisms to the pymemcache backend. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/pymemcache_socket_keepalive-f91c69770961e2b6.yaml0000664000175000017500000000034700000000000030077 0ustar00zuulzuul00000000000000--- features: - | New options (``enable_socket_keepalive``, ``socket_keepalive_idle``, ``socket_keepalive_interval``, ``socket_keepalive_count``) allow to use and configure pymemcache's socket keepalive capabilities. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/redis-backend-opts-27915f2b672512c9.yaml0000664000175000017500000000075400000000000025755 0ustar00zuulzuul00000000000000--- features: - | The following new options are added. These options are used to customize connections in the ``dogpile.cache.redis`` backend. - ``redis_server`` - ``redis_username`` - ``redis_password`` - ``redis_socket_timeout`` upgrade: - | The ``[cache] memcache_socket_timeout`` option no longer takes affect in when the ``dogpile.cache.redis`` backend, which is the documented behavior. Use the ``[cache] redis_socket_timeout`` option instead. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/redis-sentinel-18ba4a0da83dabc7.yaml0000664000175000017500000000011400000000000025627 0ustar00zuulzuul00000000000000--- features: - | Now Redis Sentinel is supported as a cache backend. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/redis-ssl-ca14b4b99c2e5a84.yaml0000664000175000017500000000047100000000000024472 0ustar00zuulzuul00000000000000--- features: - | When the ``dogpile.cache.redis`` backend is used and the ``[cache] tls_enable`` option is set to True, now the following ``[cache]`` options set tls certificates and keys used for TLS communication with Redis. - ``tls_cafile`` - ``tls_certfile`` - ``tls_keyfile`` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/notes/switch-from-python-memcached-to-pymemcache-566e70b224f92b73.yaml0000664000175000017500000000036000000000000032565 0ustar00zuulzuul00000000000000--- fixes: - | Switch from python-memcached to pymemcache for memcache_pool. This avoids issues with thread.local usage and fixes errors seen with inheritance. This is only applicable with dogpile.cache memcached backend. ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/releasenotes/source/0000775000175000017500000000000000000000000017553 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/2023.1.rst0000664000175000017500000000020200000000000021024 0ustar00zuulzuul00000000000000=========================== 2023.1 Series Release Notes =========================== .. release-notes:: :branch: stable/2023.1 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/2023.2.rst0000664000175000017500000000020200000000000021025 0ustar00zuulzuul00000000000000=========================== 2023.2 Series Release Notes =========================== .. release-notes:: :branch: stable/2023.2 ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/releasenotes/source/_static/0000775000175000017500000000000000000000000021201 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/_static/.placeholder0000664000175000017500000000000000000000000023452 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/releasenotes/source/_templates/0000775000175000017500000000000000000000000021710 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/_templates/.placeholder0000664000175000017500000000000000000000000024161 0ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/conf.py0000664000175000017500000002152500000000000021057 0ustar00zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'openstackdocstheme', 'reno.sphinxext', ] # openstackdocstheme options openstackdocs_repo_name = 'openstack/oslo.cache' openstackdocs_bug_project = 'oslo.cache' openstackdocs_bug_tag = '' openstackdocs_auto_name = False # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'oslo.cache Release Notes' copyright = '2016, oslo.cache Developers' # Release notes do not need a version in the title, they span # multiple versions. # The full version, including alpha/beta/rc tags. release = '' # The short X.Y version. version = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'oslo.cacheReleaseNotesDoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'oslo.cacheReleaseNotes.tex', 'oslo.cache Release Notes Documentation', 'oslo.cache Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'oslo.cacheReleaseNotes', 'oslo.cache Release Notes Documentation', ['oslo.cache Developers'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'oslo.cacheReleaseNotes', 'oslo.cache Release Notes Documentation', 'oslo.cache Developers', 'oslo.cacheReleaseNotes', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Options for Internationalization output ------------------------------ locale_dirs = ['locale/'] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/index.rst0000664000175000017500000000040400000000000021412 0ustar00zuulzuul00000000000000======================== oslo.cache Release Notes ======================== .. toctree:: :maxdepth: 1 unreleased 2023.2 2023.1 zed yoga xena wallaby victoria ussuri train stein rocky queens pike ocata newton ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/releasenotes/source/locale/0000775000175000017500000000000000000000000021012 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/releasenotes/source/locale/de/0000775000175000017500000000000000000000000021402 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/releasenotes/source/locale/de/LC_MESSAGES/0000775000175000017500000000000000000000000023167 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/locale/de/LC_MESSAGES/releasenotes.po0000664000175000017500000000250700000000000026224 0ustar00zuulzuul00000000000000# Andreas Jaeger , 2017. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache Release Notes 1.25.1\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2017-07-29 12:18+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2017-07-17 05:27+0000\n" "Last-Translator: Andreas Jaeger \n" "Language-Team: German\n" "Language: de\n" "X-Generator: Zanata 3.9.6\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" msgid "1.22.0" msgstr "1.22.0" msgid "1.9.0" msgstr "1.9.0" msgid "" "Added a new etcd3gw driver that uses the etcd 3.x grpc-gateway HTTP \"/" "v3alpha\" API to cache key/value pairs." msgstr "" "Ein neuer etcd3gw Treiber wurde hinzugefügt, der das etcd 3.x grpc-gateway " "HTTP \"/v3alpha\" API nutzt um key/value Paare zu cachen." msgid "New Features" msgstr "Neue Funktionen" msgid "Newton Series Release Notes" msgstr "Newton Serie Releasenotes" msgid "Ocata Series Release Notes" msgstr "Ocata Serie Releasenotes" msgid "Other Notes" msgstr "Andere Notes" msgid "Switch to reno for managing release notes." msgstr "Reno wird für die Verwaltung der Releasenotes verwendet." msgid "Unreleased Release Notes" msgstr "oslo.cache Release Notes" msgid "oslo.cache Release Notes" msgstr "oslo.cache Release Notes" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/releasenotes/source/locale/en_GB/0000775000175000017500000000000000000000000021764 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/releasenotes/source/locale/en_GB/LC_MESSAGES/0000775000175000017500000000000000000000000023551 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/locale/en_GB/LC_MESSAGES/releasenotes.po0000664000175000017500000003230400000000000026604 0ustar00zuulzuul00000000000000# Andi Chandler , 2016. #zanata # Andi Chandler , 2017. #zanata # Andi Chandler , 2018. #zanata # Andi Chandler , 2019. #zanata # Andi Chandler , 2020. #zanata # Andi Chandler , 2022. #zanata # Andi Chandler , 2023. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache Release Notes\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2023-06-27 14:34+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2023-06-21 08:15+0000\n" "Last-Translator: Andi Chandler \n" "Language-Team: English (United Kingdom)\n" "Language: en_GB\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" msgid "1.22.0" msgstr "1.22.0" msgid "1.28.1-2" msgstr "1.28.1-2" msgid "1.30.0" msgstr "1.30.0" msgid "1.30.2" msgstr "1.30.2" msgid "1.30.4" msgstr "1.30.4" msgid "1.31.1" msgstr "1.31.1" msgid "1.33.3" msgstr "1.33.3" msgid "1.34.0" msgstr "1.34.0" msgid "1.37.0" msgstr "1.37.0" msgid "1.37.1" msgstr "1.37.1" msgid "1.37.1-5" msgstr "1.37.1-5" msgid "1.9.0" msgstr "1.9.0" msgid "2.0.0" msgstr "2.0.0" msgid "2.1.0" msgstr "2.1.0" msgid "2.10.0" msgstr "2.10.0" msgid "2.2.0" msgstr "2.2.0" msgid "2.3.1" msgstr "2.3.1" msgid "2.3.1-5" msgstr "2.3.1-5" msgid "2.6.2" msgstr "2.6.2" msgid "2.6.3" msgstr "2.6.3" msgid "2.7.0" msgstr "2.7.0" msgid "2.7.1" msgstr "2.7.1" msgid "2.8.2" msgstr "2.8.2" msgid "2023.1 Series Release Notes" msgstr "2023.1 Series Release Notes" msgid "3.1.0" msgstr "3.1.0" msgid "3.4.0" msgstr "3.4.0" msgid "@param flush_on_reconnect: optional flag which prevents a" msgstr "@param flush_on_reconnect: optional flag which prevents a" msgid "" "Add new options (``enable_retry_client``, ``retry_attempts``, " "``retry_delay``) to add retry mechanisms to the pymemcache backend." msgstr "" "Add new options (``enable_retry_client``, ``retry_attempts``, " "``retry_delay``) to add retry mechanisms to the pymemcache backend." msgid "" "Add new options (``hashclient_retry_attempts``, ``hashclient_retry_delay``, " "``dead_timeout``) to allow to configure pymemcache's HashClient use through " "dogpile.cache's pymemcache backend. Those options expose pymemcache params " "that allow to configure the failover for memcached cluster." msgstr "" "Add new options (``hashclient_retry_attempts``, ``hashclient_retry_delay``, " "``dead_timeout``) to allow to configure pymemcache's HashClient use through " "dogpile.cache's pymemcache backend. Those options expose pymemcache params " "that allow configuring the failover for memcached cluster." msgid "" "Add the feature to support SASL for olso.cache to improve the security of " "authority." msgstr "" "Add the feature to support SASL for olso.cache to improve the security of " "authority." msgid "" "Added a new etcd3gw driver that uses the etcd 3.x grpc-gateway HTTP \"/" "v3alpha\" API to cache key/value pairs." msgstr "" "Added a new etcd3gw driver that uses the etcd 3.x grpc-gateway HTTP \"/" "v3alpha\" API to cache key/value pairs." msgid "" "Added a new memcached driver that uses pymemcache through dogpile.cache." msgstr "" "Added a new memcached driver that uses pymemcache through dogpile.cache." msgid "Bug Fixes" msgstr "Bug Fixes" msgid "" "But unfortunatelly this option is causing another issue. If memcache server " "disappears, or client had broken connection to memcache server, clients " "start to flush server on reconnect." msgstr "" "But unfortunately, this option is causing another issue. If memcache server " "disappears, or the client had a broken connection to memcache server, the " "clients start to flush the server on reconnect." msgid "" "Configuration option ``memcache_pool_flush_on_reconnect`` added to control " "if flush will be sent to memcached server after reconnect." msgstr "" "Configuration option ``memcache_pool_flush_on_reconnect`` added to control " "if flush will be sent to memcached server after reconnect." msgid "" "Fix the memcache_pool backend broken in oslo.cache's version 2.1.0 by " "switching from a python-memcache based client to a pymemcache based client. " "Reintroducing the client based on python-memcached as the default client for " "the memcache_pool dogpile backend." msgstr "" "Fix the memcache_pool backend broken in oslo.cache's version 2.1.0 by " "switching from a python-memcache based client to a pymemcache based client. " "Reintroducing the client based on python-memcached as the default client for " "the memcache_pool dogpile backend." msgid "" "If willing to use this driver, make sure you have both dogpile.cache>=1.1.2 " "and pymemcache>=3.4.0 installed." msgstr "" "If willing to use this driver, make sure you have both dogpile.cache>=1.1.2 " "and pymemcache>=3.4.0 installed." msgid "New Features" msgstr "New Features" msgid "" "New options (``enable_socket_keepalive``, ``socket_keepalive_idle``, " "``socket_keepalive_interval``, ``socket_keepalive_count``) allow to use and " "configure pymemcache's socket keepalive capabilities." msgstr "" "New options (``enable_socket_keepalive``, ``socket_keepalive_idle``, " "``socket_keepalive_interval``, ``socket_keepalive_count``) allow to use and " "configure pymemcache's socket keepalive capabilities." msgid "Newton Series Release Notes" msgstr "Newton Series Release Notes" msgid "Ocata Series Release Notes" msgstr "Ocata Series Release Notes" msgid "Other Notes" msgstr "Other Notes" msgid "Pike Series Release Notes" msgstr "Pike Series Release Notes" msgid "" "Python 2.7 support has been dropped. The minimum version of Python now " "supported by oslo.cache is Python 3.6." msgstr "" "Python 2.7 support has been dropped. The minimum version of Python now " "supported by oslo.cache is Python 3.6." msgid "Queens Series Release Notes" msgstr "Queens Series Release Notes" msgid "Rocky Series Release Notes" msgstr "Rocky Series Release Notes" msgid "See the help from python-memcached:" msgstr "See the help from python-memcached:" msgid "" "Simply said this option can cause loop of flushs and overloaded memcached " "servers. This change is moving optional parameter `flush_on_reconnect` to " "oslo.cache config." msgstr "" "Simply said this option can cause loop of flushs and overloaded memcached " "servers. This change is moving the optional parameter `flush_on_reconnect` " "to oslo.cache config." msgid "Stein Series Release Notes" msgstr "Stein Series Release Notes" msgid "" "Switch from python-memcached to pymemcache for memcache_pool. This avoids " "issues with thread.local usage and fixes errors seen with inheritance. This " "is only applicable with dogpile.cache memcached backend." msgstr "" "Switch from python-memcached to pymemcache for memcache_pool. This avoids " "issues with thread.local usage and fixes errors seen with inheritance. This " "is only applicable with dogpile.cache memcached backend." msgid "Switch to reno for managing release notes." msgstr "Switch to reno for managing release notes." msgid "" "The default value for ``memcache_socket_timeout`` has been lowered from 3 " "seconds to 1 second. The positive side-effect of this can be found in " "downstream `changes `_. If you deployment relies explicitly on a timeout of 3 seconds, " "please set that override in your configuration." msgstr "" "The default value for ``memcache_socket_timeout`` has been lowered from 3 " "seconds to 1 second. The positive side-effect of this can be found in " "downstream `changes `_. If you deployment relies explicitly on a timeout of 3 seconds, " "please set that override in your configuration." msgid "" "This means that network connections will go UP and can cause server to be " "overloaded until memcache will be unresponsive." msgstr "" "This means that network connections will go UP and can cause the server to " "be overloaded until memcache will become unresponsive." msgid "Train Series Release Notes" msgstr "Train Series Release Notes" msgid "Unreleased Release Notes" msgstr "Unreleased Release Notes" msgid "Upgrade Notes" msgstr "Upgrade Notes" msgid "Ussuri Series Release Notes" msgstr "Ussuri Series Release Notes" msgid "Victoria Series Release Notes" msgstr "Victoria Series Release Notes" msgid "Wallaby Series Release Notes" msgstr "Wallaby Series Release Notes" msgid "Xena Series Release Notes" msgstr "Xena Series Release Notes" msgid "Yoga Series Release Notes" msgstr "Yoga Series Release Notes" msgid "Zed Series Release Notes" msgstr "Zed Series Release Notes" msgid "" "[`bug 1743036 `_] The " "`backend_argument` value(s) for `url` when configuring memcache did not " "properly handle multiple servers. This is because the URL was passed as a " "string (comma delimited) instead of a list to the memcache library/client. " "The `url` argument is now special cased and will split the string on a comma " "so that it mirrors the behavior of the ListOpt used by `memcache_servers` " "option." msgstr "" "[`bug 1743036 `_] The " "`backend_argument` value(s) for `url` when configuring memcache did not " "properly handle multiple servers. This is because the URL was passed as a " "string (comma delimited) instead of a list to the memcache library/client. " "The `url` argument is now special cased and will split the string on a comma " "so that it mirrors the behaviour of the ListOpt used by `memcache_servers` " "option." msgid "" "[`bug 1819957 `_] If a " "memcache server disappears and then reconnects when multiple memcache " "servers are used (specific to the python-memcached based backends) it is " "possible that the server will contain stale data. The default is now to " "supply the ``flush_on_reconnect`` optional argument to the backend. This " "means that when the service connects to a memcache server, it will flush all " "cached data in the server. This change only impacts the pooled backend as it " "is the most likely (with heavy use of greenlet) to be impacted by the " "problem and is the recommended production configuration." msgstr "" "[`bug 1819957 `_] If a " "memcache server disappears and then reconnects when multiple memcache " "servers are used (specific to the python-memcached based backends) it is " "possible that the server will contain stale data. The default is now to " "supply the ``flush_on_reconnect`` optional argument to the backend. This " "means that when the service connects to a memcache server, it will flush all " "cached data in the server. This change only impacts the pooled backend as it " "is the most likely (with heavy use of greenlet) to be impacted by the " "problem and is the recommended production configuration." msgid "" "[`bug 1888394 `_] If a " "memcache server disappears and then reconnects when multiple memcache " "servers are used (specific to the python-memcached based backends) it is " "possible that the server will contain stale data. To avoid this, param " "flush_on_reconnect was used in code." msgstr "" "[`bug 1888394 `_] If a " "memcache server disappears and then reconnects when multiple memcache " "servers are used (specific to the python-memcached based backends) it is " "possible that the server will contain stale data. To avoid this, param " "flush_on_reconnect was used in the code." msgid "" "[`bug 1991250 `_] The " "python-binary-memcached package is only required if sasl_enabled=True. When " "sasl_enabled=False (default), the memcache_pool backend can be used without " "the python-binary-memcached package installed." msgstr "" "[`bug 1991250 `_] The " "python-binary-memcached package is only required if sasl_enabled=True. When " "sasl_enabled=False (default), the memcache_pool backend can be used without " "the python-binary-memcached package installed." msgid "" "`memcache_socket_timeout` is changed to float, given the same default value " "'3.0'." msgstr "" "`memcache_socket_timeout` is changed to float, given the same default value " "'3.0'." msgid "oslo.cache Release Notes" msgstr "oslo.cache Release Notes" msgid "" "scenario that can cause stale data to be read: If there's more than one " "memcached server and the connection to one is interrupted, keys that mapped " "to that server will get reassigned to another. If the first server comes " "back, those keys will map to it again. If it still has its data, get()s can " "read stale data that was overwritten on another server. This flag is off by " "default for backwards compatibility." msgstr "" "scenario that can cause stale data to be read: If there's more than one " "memcached server and the connection to one is interrupted, keys that mapped " "to that server will get reassigned to another. If the first server comes " "back, those keys will map to it again. If it still has its data, get()s can " "read stale data that was overwritten on another server. This flag is off by " "default for backwards compatibility." ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/releasenotes/source/locale/fr/0000775000175000017500000000000000000000000021421 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/releasenotes/source/locale/fr/LC_MESSAGES/0000775000175000017500000000000000000000000023206 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/locale/fr/LC_MESSAGES/releasenotes.po0000664000175000017500000000170300000000000026240 0ustar00zuulzuul00000000000000# Gérald LONLAS , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache Release Notes 1.14.1\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2016-10-14 13:22+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-10-22 05:58+0000\n" "Last-Translator: Gérald LONLAS \n" "Language-Team: French\n" "Language: fr\n" "X-Generator: Zanata 3.7.3\n" "Plural-Forms: nplurals=2; plural=(n > 1)\n" msgid "1.9.0" msgstr "1.9.0" msgid "Newton Series Release Notes" msgstr "Note de release pour Newton" msgid "Other Notes" msgstr "Autres notes" msgid "Switch to reno for managing release notes." msgstr "Commence à utiliser reno pour la gestion des notes de release" msgid "Unreleased Release Notes" msgstr "Note de release pour les changements non déployées" msgid "oslo.cache Release Notes" msgstr "Note de release pour oslo.cache" ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1623945 oslo.cache-3.7.0/releasenotes/source/locale/ko_KR/0000775000175000017500000000000000000000000022017 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/releasenotes/source/locale/ko_KR/LC_MESSAGES/0000775000175000017500000000000000000000000023604 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/locale/ko_KR/LC_MESSAGES/releasenotes.po0000664000175000017500000000267700000000000026651 0ustar00zuulzuul00000000000000# ByungYeol Woo , 2017. #zanata msgid "" msgstr "" "Project-Id-Version: oslo.cache Release Notes\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2018-02-08 23:08+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2017-10-18 02:15+0000\n" "Last-Translator: ByungYeol Woo \n" "Language-Team: Korean (South Korea)\n" "Language: ko_KR\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=1; plural=0\n" msgid "1.22.0" msgstr "1.22.0" msgid "1.9.0" msgstr "1.9.0" msgid "" "Added a new etcd3gw driver that uses the etcd 3.x grpc-gateway HTTP \"/" "v3alpha\" API to cache key/value pairs." msgstr "" "etcd 3.x grpc-gateway HTTP \"/v3alpha\" API to cache key/value pairs를 사용하" "는 새로운 etcd3gw 드라이버가 추가되었습니다." msgid "New Features" msgstr "새로운 기능" msgid "Newton Series Release Notes" msgstr "Newton 시리즈 릴리즈 노트" msgid "Ocata Series Release Notes" msgstr "Ocata 시리즈 릴리즈 노트" msgid "Other Notes" msgstr "기타 노트" msgid "Pike Series Release Notes" msgstr "Pike 시리즈 릴리즈 노트" msgid "Switch to reno for managing release notes." msgstr "릴리즈 노트를 관리하려면 reno로 전환하십시오." msgid "Unreleased Release Notes" msgstr "릴리즈되지 않은 릴리즈 노트" msgid "oslo.cache Release Notes" msgstr "oslo.cache 릴리즈 노트" ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/newton.rst0000664000175000017500000000021600000000000021616 0ustar00zuulzuul00000000000000============================= Newton Series Release Notes ============================= .. release-notes:: :branch: origin/stable/newton ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/ocata.rst0000664000175000017500000000023000000000000021367 0ustar00zuulzuul00000000000000=================================== Ocata Series Release Notes =================================== .. release-notes:: :branch: origin/stable/ocata ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/pike.rst0000664000175000017500000000021700000000000021235 0ustar00zuulzuul00000000000000=================================== Pike Series Release Notes =================================== .. release-notes:: :branch: stable/pike ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/queens.rst0000664000175000017500000000022300000000000021602 0ustar00zuulzuul00000000000000=================================== Queens Series Release Notes =================================== .. release-notes:: :branch: stable/queens ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/rocky.rst0000664000175000017500000000022100000000000021427 0ustar00zuulzuul00000000000000=================================== Rocky Series Release Notes =================================== .. release-notes:: :branch: stable/rocky ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/stein.rst0000664000175000017500000000022100000000000021422 0ustar00zuulzuul00000000000000=================================== Stein Series Release Notes =================================== .. release-notes:: :branch: stable/stein ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/train.rst0000664000175000017500000000017600000000000021426 0ustar00zuulzuul00000000000000========================== Train Series Release Notes ========================== .. release-notes:: :branch: stable/train ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/unreleased.rst0000664000175000017500000000014400000000000022433 0ustar00zuulzuul00000000000000========================== Unreleased Release Notes ========================== .. release-notes:: ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/ussuri.rst0000664000175000017500000000020200000000000021631 0ustar00zuulzuul00000000000000=========================== Ussuri Series Release Notes =========================== .. release-notes:: :branch: stable/ussuri ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/victoria.rst0000664000175000017500000000021200000000000022120 0ustar00zuulzuul00000000000000============================= Victoria Series Release Notes ============================= .. release-notes:: :branch: stable/victoria ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/wallaby.rst0000664000175000017500000000020600000000000021736 0ustar00zuulzuul00000000000000============================ Wallaby Series Release Notes ============================ .. release-notes:: :branch: stable/wallaby ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/xena.rst0000664000175000017500000000017200000000000021240 0ustar00zuulzuul00000000000000========================= Xena Series Release Notes ========================= .. release-notes:: :branch: stable/xena ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/yoga.rst0000664000175000017500000000020000000000000021234 0ustar00zuulzuul00000000000000========================= Yoga Series Release Notes ========================= .. release-notes:: :branch: unmaintained/yoga ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/releasenotes/source/zed.rst0000664000175000017500000000016600000000000021072 0ustar00zuulzuul00000000000000======================== Zed Series Release Notes ======================== .. release-notes:: :branch: stable/zed ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/requirements.txt0000664000175000017500000000022500000000000017045 0ustar00zuulzuul00000000000000dogpile.cache>=1.1.5 # BSD oslo.config>=8.1.0 # Apache-2.0 oslo.i18n>=5.0.0 # Apache-2.0 oslo.log>=4.2.1 # Apache-2.0 oslo.utils>=4.2.0 # Apache-2.0 ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/setup.cfg0000664000175000017500000000262500000000000015410 0ustar00zuulzuul00000000000000[metadata] name = oslo.cache summary = Cache storage for OpenStack projects. description_file = README.rst author = OpenStack author_email = openstack-discuss@lists.openstack.org home_page = https://docs.openstack.org/oslo.cache/latest python_requires = >=3.8 classifier = Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 3 :: Only Programming Language :: Python :: 3 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 [files] packages = oslo_cache [entry_points] oslo.config.opts = oslo.cache = oslo_cache._opts:list_opts dogpile.cache = oslo_cache.mongo = oslo_cache.backends.mongo:MongoCacheBackend oslo_cache.memcache_pool = oslo_cache.backends.memcache_pool:PooledMemcachedBackend oslo_cache.dict = oslo_cache.backends.dictionary:DictCacheBackend oslo_cache.etcd3gw = oslo_cache.backends.etcd3gw:Etcd3gwCacheBackend [extras] dogpile = python-memcached>=1.56 # PSF pymemcache>=3.5.0 # Apache-2.0 python-binary-memcached>=0.29.0 # MIT redis>=3.0.0 # MIT mongo = pymongo!=3.1,>=3.0.2 # Apache-2.0 etcd3gw = etcd3gw>=0.2.0 # Apache-2.0 [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/setup.py0000664000175000017500000000127100000000000015275 0ustar00zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/test-requirements.txt0000664000175000017500000000040700000000000020024 0ustar00zuulzuul00000000000000oslotest>=3.2.0 # Apache-2.0 pifpaf>=0.10.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0 pymemcache>=3.5.0 # Apache-2.0 python-binary-memcached>=0.29.0 # MIT python-memcached>=1.56 # PSF pymongo!=3.1,>=3.0.2 # Apache-2.0 etcd3gw>=0.2.0 # Apache-2.0 redis>=3.0.0 # MIT ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1708932956.1783955 oslo.cache-3.7.0/tools/0000775000175000017500000000000000000000000014722 5ustar00zuulzuul00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/tools/setup-etcd-env.sh0000775000175000017500000000132400000000000020124 0ustar00zuulzuul00000000000000#!/bin/bash set -eux if [ -z "$(which etcd)" ]; then ETCD_VERSION=3.1.3 case `uname -s` in Darwin) OS=darwin SUFFIX=zip ;; Linux) OS=linux SUFFIX=tar.gz ;; *) echo "Unsupported OS" exit 1 esac case `uname -m` in x86_64) MACHINE=amd64 ;; *) echo "Unsupported machine" exit 1 esac TARBALL_NAME=etcd-v${ETCD_VERSION}-$OS-$MACHINE test ! -d "$TARBALL_NAME" && curl -L https://github.com/coreos/etcd/releases/download/v${ETCD_VERSION}/${TARBALL_NAME}.${SUFFIX} | tar xz export PATH=$PATH:$TARBALL_NAME fi $* ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1708932926.0 oslo.cache-3.7.0/tox.ini0000664000175000017500000000306500000000000015101 0ustar00zuulzuul00000000000000[tox] minversion = 3.2.0 envlist = py3,pep8 [testenv] allowlist_externals = find deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = find . -type f -name "*.pyc" -delete stestr run --slowest {posargs} [testenv:functional] setenv = STESTR_TEST_PATH=./oslo_cache/tests/functional/{env:OSLO_BACKEND} commands = find . -type f -name "*.pyc" -delete pifpaf -e OSLO_CACHE_TEST run {env:PIFPAF_DAEMON} {env:PIFPAF_OPTS} -- stestr run --slowest [testenv:pep8] deps = pre-commit commands = pre-commit run -a [testenv:venv] commands = {posargs} [testenv:docs] allowlist_externals = rm deps = -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/doc/requirements.txt commands = rm -fr doc/build sphinx-build -W --keep-going -b html doc/source doc/build/html [testenv:cover] setenv = PYTHON=coverage run --source oslo_cache --parallel-mode commands = coverage erase stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml coverage report --show-missing [flake8] show-source = True ignore = H405,W504,F405 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build [hacking] import_exceptions = [testenv:releasenotes] allowlist_externals = rm deps = {[testenv:docs]deps} commands = rm -rf releasenotes/build sphinx-build -a -E -W -d releasenotes/build/doctrees --keep-going -b html releasenotes/source releasenotes/build/html