././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1585243094.9942646 persistent-4.6.4/0000755000076500000240000000000000000000000014453 5ustar00jmaddenstaff00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/.coveragerc0000644000076500000240000000036200000000000016575 0ustar00jmaddenstaff00000000000000[run] source = persistent omit = persistent/_ring_build.py [report] exclude_lines = pragma: no cover class I[A-Z]\w+\((Interface|I[A-Z].*)\): raise NotImplementedError raise AssertionError if __name__ == '__main__': ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/.manylinux-install.sh0000755000076500000240000000116700000000000020565 0ustar00jmaddenstaff00000000000000#!/usr/bin/env bash set -e -x # Compile wheels for PYBIN in /opt/python/*/bin; do if [[ "${PYBIN}" == *"cp27"* ]] || \ [[ "${PYBIN}" == *"cp35"* ]] || \ [[ "${PYBIN}" == *"cp36"* ]] || \ [[ "${PYBIN}" == *"cp37"* ]] || \ [[ "${PYBIN}" == *"cp38"* ]]; then "${PYBIN}/pip" install -U pip setuptools cffi "${PYBIN}/pip" install -e /io/ "${PYBIN}/pip" wheel /io/ -w wheelhouse/ rm -rf /io/build /io/*.egg-info fi done # Bundle external shared libraries into the wheels for whl in wheelhouse/persistent*.whl; do auditwheel repair "$whl" -w /io/wheelhouse/ done ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/.manylinux.sh0000755000076500000240000000016100000000000017112 0ustar00jmaddenstaff00000000000000#!/usr/bin/env bash set -e -x docker run --rm -v "$(pwd)":/io $DOCKER_IMAGE $PRE_CMD /io/.manylinux-install.sh ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/.travis.yml0000644000076500000240000000777000000000000016577 0ustar00jmaddenstaff00000000000000language: python env: global: - TWINE_USERNAME: zope.wheelbuilder # this sets $PYPIPASSWORD - secure: "NTWzDr5p8KRPNt+sniTot7csbzC87rzir/XfLtENE0GpQ49FlKw3lBhsDqAPoD8Ea5lwiHXmC/C/ci1UZhFvVEkAoQ2qJlMRnhqUdRJSrqcitmRt0fT6mLaTd+Lr+DxKlBxpssobrEm2G42V/G1s0Ggym04OqF8T+s6MF5ywgJM=" # We want to require the C extensions to build and function # everywhere (except where we specifically opt-out, currently just # PyPy, where they build but don't quite work). - PURE_PYTHON: 0 python: - 2.7 - 3.5 - 3.6 - 3.7 - 3.8 - 3.9-dev jobs: include: # Don't test C extensions on PyPy. - python: pypy env: PURE_PYTHON=1 - python: pypy3 env: PURE_PYTHON=1 # Special Linux builds - name: "Python: 2.7, pure (no C extensions)" python: 2.7 env: PURE_PYTHON=1 # Test for https://github.com/zopefoundation/persistent/issues/86 - name: "Python: 3.6, -fno-wrapv" python: 3.6 env: CFLAGS="-fno-wrapv" - name: "Python: 3.7, pure (no C extensions)" python: 3.7 env: PURE_PYTHON=1 - name: "Documentation" python: 3.6 install: - pip install -U -e .[docs] script: - sphinx-build -b html -d docs/_build/doctrees docs docs/_build/html - sphinx-build -b doctest -d docs/_build/doctrees docs docs/_build/doctest after_success: # manylinux wheel builds - name: 64-bit manylinux wheels (all Pythons) services: docker env: DOCKER_IMAGE=quay.io/pypa/manylinux2010_x86_64 install: docker pull $DOCKER_IMAGE script: bash .manylinux.sh - name: 32-bit manylinux wheels (all Pythons) services: docker env: DOCKER_IMAGE=quay.io/pypa/manylinux2010_i686 PRE_CMD=linux32 install: docker pull $DOCKER_IMAGE script: bash .manylinux.sh # It's important to use 'macpython' builds to get the least # restrictive wheel tag. It's also important to avoid # 'homebrew 3' because it floats instead of being a specific version. - name: Python 2.7 wheels for MacOS os: osx language: generic # We require at least 2.7.15 to upload wheels. # See https://github.com/zopefoundation/BTrees/issues/113 env: TERRYFY_PYTHON='macpython 2.7.17' - name: Python 3.5 wheels for MacOS os: osx language: generic env: TERRYFY_PYTHON='macpython 3.5' - name: Python 3.6 wheels for MacOS os: osx language: generic # NB: 3.6.0 causes https://github.com/nedbat/coveragepy/issues/703 # NB: 3.6.1 had that ABI regression (fixed in 3.6.2) and would be a bad # version to use env: TERRYFY_PYTHON='macpython 3.6.2' - name: Python 3.7 wheels for MacOS os: osx language: generic env: TERRYFY_PYTHON='macpython 3.7.0' - name: Python 3.8 wheels for MacOS os: osx language: generic env: TERRYFY_PYTHON='macpython 3.8.0' before_install: - | if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then git clone https://github.com/MacPython/terryfy source terryfy/travis_tools.sh get_python_environment $TERRYFY_PYTHON venv fi install: - python -m pip install -U pip setuptools cffi wheel coverage coveralls - python -m pip install -U -e .[test] script: - python --version # make sure we can build a wheel - python setup.py bdist_wheel # coverage makes PyPy run about 3x slower, but the tests only take # .4s to begin with (the whole process takes about 1.5), so that's # still only 4.5s, which is maneagable. - python -m coverage run -m zope.testrunner --test-path=. --auto-color --auto-progress after_success: - python -m coveralls - | if [[ $TRAVIS_TAG && "$TRAVIS_OS_NAME" == "osx" ]]; then pip install twine python setup.py bdist_wheel TWINE_PASSWORD=$PYPIPASSWORD twine upload --skip-existing dist/* fi - | if [[ $TRAVIS_TAG && -n "$DOCKER_IMAGE" ]]; then pip install twine TWINE_PASSWORD=$PYPIPASSWORD twine upload --skip-existing wheelhouse/* fi notifications: email: false cache: pip ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/CHANGES.rst0000644000076500000240000003613100000000000016261 0ustar00jmaddenstaff00000000000000``persistent`` Changelog ======================== 4.6.4 (2020-03-26) ------------------ - Fix an overly specific test failure using zope.interface 5. See `issue 144 `_. - Fix two reference leaks that could theoretically occur as the result of obscure errors. See `issue 143 `_. 4.6.3 (2020-03-18) ------------------ - Fix a crash in the test suite under a 32-bit CPython on certain 32-bit platforms. See `issue 137 `_. Fix by `Jerry James `_. 4.6.2 (2020-03-12) ------------------ - Fix an ``AssertionError`` clearing a non-empty ``PersistentMapping`` that has no connection. See `issue 139 `_. 4.6.1 (2020-03-06) ------------------ - Stop installing C header files on PyPy (which is what persistent before 4.6.0 used to do), fixes `issue 135 `_. 4.6.0 (2020-03-05) ------------------ - Fix slicing of ``PersistentList`` to always return instances of the same class. It was broken on Python 3 prior to 3.7.4. - Fix copying of ``PersistentList`` and ``PersistentMapping`` using ``copy.copy`` to also copy the underlying data object. This was broken prior to Python 3.7.4. - Update the handling of the ``PURE_PYTHON`` environment variable. Now, a value of "0" requires that the C extensions be used; any other non-empty value prevents the extensions from being used. Also, all C extensions are required together or none of them will be used. This prevents strange errors that arise from a mismatch of Python and C implementations. See `issue 131 `_. Note that some private implementation details such as the names of the pure-Python implementations have changed. - Fix ``PersistentList`` to mark itself as changed after calling ``clear`` (if needed). See `PR 115 `_. - Fix ``PersistentMapping.update`` to accept keyword arguments like the native ``UserDict``. Previously, most uses of keyword arguments resulted in ``TypeError``; in the undocumented and extremely unlikely event of a single keyword argument called ``b`` that happens to be a dictionary, the behaviour will change. Also adjust the signatures of ``setdefault`` and ``pop`` to match the native version. - Fix ``PersistentList.clear``, ``PersistentMapping.clear`` and ``PersistentMapping.popitem`` to no longer mark the object as changed if it was empty. - Add preliminary support for Python 3.9a3+. See `issue 124 `_. - Fix the Python implementation of the PickleCache to be able to store objects that cannot be weakly referenced. See `issue 133 `_. Note that ``ctypes`` is required to use the Python implementation (except on PyPy). 4.5.1 (2019-11-06) ------------------ - Add support for Python 3.8. - Update documentation to Python 3. 4.5.0 (2019-05-09) ------------------ - Fully test the C implementation of the PickleCache, and fix discrepancies between it and the Python implementation: - The C implementation now raises ``ValueError`` instead of ``AssertionError`` for certain types of bad inputs. - The Python implementation uses the C wording for error messages. - The C implementation properly implements ``IPickleCache``; methods unique to the Python implementation were moved to ``IExtendedPickleCache``. - The Python implementation raises ``AttributeError`` if a persistent class doesn't have a ``p_jar`` attribute. See `issue 102 `_. - Allow sweeping cache without ``cache_size``. ``cache_size_bytes`` works with ``cache_size=0``, no need to set ``cache_size`` to a large value. - Require ``CFFI`` on CPython for pure-Python operation. This drops support for Jython (which was untested). See `issue 77 `_. - Fix DeprecationWarning about ``PY_SSIZE_T_CLEAN``. See `issue 108 `_. - Drop support for Python 3.4. 4.4.3 (2018-10-22) ------------------ - Fix the repr of the persistent objects to include the module name when using the C extension. This matches the pure-Python behaviour and the behaviour prior to 4.4.0. See `issue 92 `_. - Change the repr of persistent objects to format the OID as in integer in hexadecimal notation if it is an 8-byte byte string, as ZODB does. This eliminates some issues in doctests. See `issue 95 `_. 4.4.2 (2018-08-28) ------------------ - Explicitly use unsigned constants for packing and unpacking C timestamps, fixing an arithmetic issue for GCC when optimizations are enabled and ``-fwrapv`` is *not* enabled. See `issue 86 `_. 4.4.1 (2018-08-23) ------------------ - Fix installation of source packages on PyPy. See `issue 88 `_. 4.4.0 (2018-08-22) ------------------ - Use unsigned constants when doing arithmetic on C timestamps, possibly avoiding some overflow issues with some compilers or compiler settings. See `issue 86 `_. - Change the default representation of ``Persistent`` objects to include the representation of their OID and jar, if set. Also add the ability for subclasses to implement ``_p_repr()`` instead of overriding ``__repr__`` for better exception handling. See `issue 11 `_. - Reach and maintain 100% test coverage. - Simplify ``__init__.py``, including removal of an attempted legacy import of ``persistent.TimeStamp``. See `PR 80 `_. - Add support for Python 3.7 and drop support for Python 3.3. - Build the CFFI modules (used on PyPy or when PURE_PYTHON is set) `at installation or wheel building time `_ when CFFI is available. This replaces `the deprecated way `_ of building them at import time. If binary wheels are distributed, it eliminates the need to have a functioning C compiler to use PyPy. See `issue 75 `_. - Fix deleting the ``_p_oid`` of a pure-Python persistent object when it is in a cache. - Fix deleting special (``_p``) attributes of a pure-Python persistent object that overrides ``__delattr__`` and correctly calls ``_p_delattr``. - Remove some internal compatibility shims that are no longer necessary. See `PR 82 `_. - Make the return value of ``TimeStamp.second()`` consistent across C and Python implementations when the ``TimeStamp`` was created from 6 arguments with floating point seconds. Also make it match across trips through ``TimeStamp.raw()``. Previously, the C version could initially have erroneous rounding and too much false precision, while the Python version could have too much precision. The raw/repr values have not changed. See `issue 41 `_. 4.3.0 (2018-07-30) ------------------ - Fix the possibility of a rare crash in the C extension when deallocating items. See https://github.com/zopefoundation/persistent/issues/66 - Change cPickleCache's comparison of object sizes to determine whether an object can go in the cache to use ``PyObject_TypeCheck()``. This matches what the pure Python implementation does and is a stronger test that the object really is compatible with the cache. Previously, an object could potentially include ``cPersistent_HEAD`` and *not* set ``tp_base`` to ``cPersistenceCAPI->pertype`` and still be eligible for the pickle cache; that is no longer the case. See `issue 69 `_. 4.2.4.2 (2017-04-23) -------------------- - Packaging-only release: fix Python 2.7 ``manylinux`` wheels. 4.2.4.1 (2017-04-21) -------------------- - Packaging-only release: get ``manylinux`` wheel built automatically. 4.2.4 (2017-03-20) ------------------ - Avoid raising a ``SystemError: error return without exception set`` when loading an object with slots whose jar generates an exception (such as a ZODB ``POSKeyError``) in ``setstate``. 4.2.3 (2017-03-08) ------------------ - Fix the hashcode of Python ``TimeStamp`` objects on 64-bit Python on Windows. See https://github.com/zopefoundation/persistent/pull/55 - Stop calling ``gc.collect`` every time ``PickleCache.incrgc`` is called (every transaction boundary) in pure-Python mode (PyPy). This means that the reported size of the cache may be wrong (until the next GC), but it is much faster. This should not have any observable effects for user code. - Stop clearing the dict and slots of objects added to ``PickleCache.new_ghost`` (typically these values are passed to ``__new__`` from the pickle data) in pure-Python mode (PyPy). This matches the behaviour of the C code. - Add support for Python 3.6. - Fix ``__setstate__`` interning when ``state`` parameter is not a built-in dict 4.2.2 (2016-11-29) ------------------ - Drop use of ``ctypes`` for determining maximum integer size, to increase pure-Python compatibility. See https://github.com/zopefoundation/persistent/pull/31 - Ensure that ``__slots__`` attributes are cleared when a persistent object is ghostified. (This excluses classes that override ``__new__``. See https://github.com/zopefoundation/persistent/wiki/Notes_on_state_new_and_slots if you're curious.) 4.2.1 (2016-05-26) ------------------ - Fix the hashcode of C ``TimeStamp`` objects on 64-bit Python 3 on Windows. 4.2.0 (2016-05-05) ------------------ - Fixed the Python(/PYPY) implementation ``TimeStamp.timeTime`` method to have subsecond precision. - When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning the user's global wheel cache. - Add support for Python 3.5. - Drop support for Python 2.6 and 3.2. 4.1.1 (2015-06-02) ------------------ - Fix manifest and re-upload to fix stray files included in 4.1.0. 4.1.0 (2015-05-19) ------------------ - Make the Python implementation of ``Persistent`` and ``PickleCache`` behave more similarly to the C implementation. In particular, the Python version can now run the complete ZODB and ZEO test suites. - Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms. 4.0.9 (2015-04-08) ------------------ - Make the C and Python ``TimeStamp`` objects behave more alike. The Python version now produces the same ``repr`` and ``.raw()`` output as the C version, and has the same hashcode. In addition, the Python version is now supports ordering and equality like the C version. - Intern keys of object state in ``__setstate__`` to reduce memory usage when unpickling multiple objects with the same attributes. - Add support for PyPy3. - 100% branch coverage. 4.0.8 (2014-03-20) ------------------ - Add support for Python 3.4. - In pure-Python ``Persistent``, avoid loading state in ``_p_activate`` for non-ghost objects (which could corrupt their state). (PR #9) - In pure-Python, and don't throw ``POSKeyError`` if ``_p_activate`` is called on an object that has never been committed. (PR #9) - In pure-Python ``Persistent``, avoid calling a subclass's ``__setattr__`` at instance creation time. (PR #8) - Make it possible to delete ``_p_jar`` / ``_p_oid`` of a pure-Python ``Persistent`` object which has been removed from the jar's cache (fixes aborting a ZODB Connection that has added objects). (PR #7) 4.0.7 (2014-02-20) ------------------ - Avoid a KeyError from ``_p_accessed()`` on newly-created objects under pure-Python: these objects may be assigned to a jar, but not yet added to its cache. (PR #6) - Avoid a failure in ``Persistent.__setstate__`` when the state dict contains exactly two keys. (PR #5) - Fix a hang in ``picklecache`` invalidation if OIDs are manually passed out-of-order. (PR #4) - Add ``PURE_PYTHON`` environment variable support: if set, the C extensions will not be built, imported, or tested. 4.0.6 (2013-01-03) ------------------ - Updated Trove classifiers. 4.0.5 (2012-12-14) ------------------ - Fixed the C-extensions under Py3k (previously they compiled but were not importable). 4.0.4 (2012-12-11) ------------------ - Added support for Python 3.3. - C extenstions now build under Python 3.2, passing the same tests as the pure-Python reference implementation. 4.0.3 (2012-11-19) ------------------ - Fixed: In the C implimentation, an integer was compared with a pointer, with undefined results and a compiler warning. - Fixed: the Python implementation of the ``_p_estimated_size`` propety didn't support deletion. - Simplified implementation of the ``_p_estimated_size`` property to only accept integers. A TypeError is raised if an incorrect type is provided. 4.0.2 (2012-08-27) ------------------ - Correct initialization functions in renamed ``_timestamp`` extension. 4.0.1 (2012-08-26) ------------------ - Worked around test failure due to overflow to long on 32-bit systems. - Renamed ``TimeStamp`` extension module to avoid clash with pure-Python ``timestamp`` module on case-insensitive filesystems. N.B: the canonical way to import the ``TimeStamp`` class is now:: from persistent.timestamp import TimeStamp which will yield the class from the extension module (if available), falling back to the pure-Python reference implementation. 4.0.0 (2012-08-11) ------------------ Platform Changes ################ - Added explicit support for Python 3.2 and PyPy. - Note that the C implementations of Persistent, PickleCache, and Timestamp are not built (yet) on these platforms. - Dropped support for Python < 2.6. Testing Changes ############### - 100% unit test coverage. - Removed all ``ZODB``-dependent tests: - Rewrote some to avoid the dependency - Cloned the remainder into new ``ZODB.tests`` modules. - Refactored some doctests refactored as unittests. - Completed pure-Python reference implementations of 'Persistent', 'PickleCache', and 'TimeStamp'. - All covered platforms tested under ``tox``. - Added support for continuous integration using ``tox`` and ``jenkins``. - Added ``setup.py dev`` alias (installs ``nose`` and ``coverage``). - Dropped dependency on ``zope.testing`` / ``zope.testrunner``: tests now run with ``setup.py test``. Documentation Changes ##################### - Refactored many Doctests as Sphinx documentation (snippets are exercised via 'tox'). - Added ``setup.py docs`` alias (installs ``Sphinx`` and ``repoze.sphinx.autointerface``). ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/COPYRIGHT.txt0000644000076500000240000000004000000000000016556 0ustar00jmaddenstaff00000000000000Zope Foundation and Contributors././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/LICENSE.txt0000644000076500000240000000402600000000000016300 0ustar00jmaddenstaff00000000000000Zope Public License (ZPL) Version 2.1 A copyright notice accompanies this license document that identifies the copyright holders. This license has been certified as open source. It has also been designated as GPL compatible by the Free Software Foundation (FSF). Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions in source code must retain the accompanying copyright notice, this list of conditions, and the following disclaimer. 2. Redistributions in binary form must reproduce the accompanying copyright notice, this list of conditions, and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Names of the copyright holders must not be used to endorse or promote products derived from this software without prior written permission from the copyright holders. 4. The right to distribute this software or to use it for any purpose does not give you the right to use Servicemarks (sm) or Trademarks (tm) of the copyright holders. Use of them is covered by separate agreement with the copyright holders. 5. If any files are modified, you must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. Disclaimer THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/MANIFEST.in0000644000076500000240000000057300000000000016216 0ustar00jmaddenstaff00000000000000prune terryfy include *.txt include *.rst include *.sh include *.yml recursive-include docs * recursive-include persistent * global-exclude *.dll global-exclude *.pyc global-exclude *.pyo global-exclude *.so global-exclude coverage.xml prune docs/_build prune persistent/__pycache__ include .coveragerc include .travis.yml include buildout.cfg include tox.ini include *.py ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1585243094.9944527 persistent-4.6.4/PKG-INFO0000644000076500000240000005327400000000000015563 0ustar00jmaddenstaff00000000000000Metadata-Version: 2.1 Name: persistent Version: 4.6.4 Summary: Translucent persistent objects Home-page: https://github.com/zopefoundation/persistent/ Author: Zope Corporation Author-email: zodb-dev@zope.org License: ZPL 2.1 Description: ``persistent``: automatic persistence for Python objects ========================================================= .. image:: https://travis-ci.org/zopefoundation/persistent.svg?branch=master :target: https://travis-ci.org/zopefoundation/persistent .. image:: https://coveralls.io/repos/github/zopefoundation/persistent/badge.svg?branch=master :target: https://coveralls.io/github/zopefoundation/persistent?branch=master .. image:: https://readthedocs.org/projects/persistent/badge/?version=latest :target: http://persistent.readthedocs.org/en/latest/ :alt: Documentation Status .. image:: https://img.shields.io/pypi/v/persistent.svg :target: https://pypi.org/project/persistent :alt: Latest release .. image:: https://img.shields.io/pypi/pyversions/persistent.svg :target: https://pypi.org/project/persistent :alt: Python versions This package contains a generic persistence implementation for Python. It forms the core protocol for making objects interact "transparently" with a database such as the ZODB. Please see the Sphinx documentation (``docs/index.rst``) for further information, or view the documentation at Read The Docs, for either the latest (``http://persistent.readthedocs.io/en/latest/``) or stable release (``http://persistent.readthedocs.io/en/stable/``). .. note:: Use of this standalone ``persistent`` release is not recommended or supported with ZODB < 3.11. ZODB 3.10 and earlier bundle their own version of the ``persistent`` package. ``persistent`` Changelog ======================== 4.6.4 (2020-03-26) ------------------ - Fix an overly specific test failure using zope.interface 5. See `issue 144 `_. - Fix two reference leaks that could theoretically occur as the result of obscure errors. See `issue 143 `_. 4.6.3 (2020-03-18) ------------------ - Fix a crash in the test suite under a 32-bit CPython on certain 32-bit platforms. See `issue 137 `_. Fix by `Jerry James `_. 4.6.2 (2020-03-12) ------------------ - Fix an ``AssertionError`` clearing a non-empty ``PersistentMapping`` that has no connection. See `issue 139 `_. 4.6.1 (2020-03-06) ------------------ - Stop installing C header files on PyPy (which is what persistent before 4.6.0 used to do), fixes `issue 135 `_. 4.6.0 (2020-03-05) ------------------ - Fix slicing of ``PersistentList`` to always return instances of the same class. It was broken on Python 3 prior to 3.7.4. - Fix copying of ``PersistentList`` and ``PersistentMapping`` using ``copy.copy`` to also copy the underlying data object. This was broken prior to Python 3.7.4. - Update the handling of the ``PURE_PYTHON`` environment variable. Now, a value of "0" requires that the C extensions be used; any other non-empty value prevents the extensions from being used. Also, all C extensions are required together or none of them will be used. This prevents strange errors that arise from a mismatch of Python and C implementations. See `issue 131 `_. Note that some private implementation details such as the names of the pure-Python implementations have changed. - Fix ``PersistentList`` to mark itself as changed after calling ``clear`` (if needed). See `PR 115 `_. - Fix ``PersistentMapping.update`` to accept keyword arguments like the native ``UserDict``. Previously, most uses of keyword arguments resulted in ``TypeError``; in the undocumented and extremely unlikely event of a single keyword argument called ``b`` that happens to be a dictionary, the behaviour will change. Also adjust the signatures of ``setdefault`` and ``pop`` to match the native version. - Fix ``PersistentList.clear``, ``PersistentMapping.clear`` and ``PersistentMapping.popitem`` to no longer mark the object as changed if it was empty. - Add preliminary support for Python 3.9a3+. See `issue 124 `_. - Fix the Python implementation of the PickleCache to be able to store objects that cannot be weakly referenced. See `issue 133 `_. Note that ``ctypes`` is required to use the Python implementation (except on PyPy). 4.5.1 (2019-11-06) ------------------ - Add support for Python 3.8. - Update documentation to Python 3. 4.5.0 (2019-05-09) ------------------ - Fully test the C implementation of the PickleCache, and fix discrepancies between it and the Python implementation: - The C implementation now raises ``ValueError`` instead of ``AssertionError`` for certain types of bad inputs. - The Python implementation uses the C wording for error messages. - The C implementation properly implements ``IPickleCache``; methods unique to the Python implementation were moved to ``IExtendedPickleCache``. - The Python implementation raises ``AttributeError`` if a persistent class doesn't have a ``p_jar`` attribute. See `issue 102 `_. - Allow sweeping cache without ``cache_size``. ``cache_size_bytes`` works with ``cache_size=0``, no need to set ``cache_size`` to a large value. - Require ``CFFI`` on CPython for pure-Python operation. This drops support for Jython (which was untested). See `issue 77 `_. - Fix DeprecationWarning about ``PY_SSIZE_T_CLEAN``. See `issue 108 `_. - Drop support for Python 3.4. 4.4.3 (2018-10-22) ------------------ - Fix the repr of the persistent objects to include the module name when using the C extension. This matches the pure-Python behaviour and the behaviour prior to 4.4.0. See `issue 92 `_. - Change the repr of persistent objects to format the OID as in integer in hexadecimal notation if it is an 8-byte byte string, as ZODB does. This eliminates some issues in doctests. See `issue 95 `_. 4.4.2 (2018-08-28) ------------------ - Explicitly use unsigned constants for packing and unpacking C timestamps, fixing an arithmetic issue for GCC when optimizations are enabled and ``-fwrapv`` is *not* enabled. See `issue 86 `_. 4.4.1 (2018-08-23) ------------------ - Fix installation of source packages on PyPy. See `issue 88 `_. 4.4.0 (2018-08-22) ------------------ - Use unsigned constants when doing arithmetic on C timestamps, possibly avoiding some overflow issues with some compilers or compiler settings. See `issue 86 `_. - Change the default representation of ``Persistent`` objects to include the representation of their OID and jar, if set. Also add the ability for subclasses to implement ``_p_repr()`` instead of overriding ``__repr__`` for better exception handling. See `issue 11 `_. - Reach and maintain 100% test coverage. - Simplify ``__init__.py``, including removal of an attempted legacy import of ``persistent.TimeStamp``. See `PR 80 `_. - Add support for Python 3.7 and drop support for Python 3.3. - Build the CFFI modules (used on PyPy or when PURE_PYTHON is set) `at installation or wheel building time `_ when CFFI is available. This replaces `the deprecated way `_ of building them at import time. If binary wheels are distributed, it eliminates the need to have a functioning C compiler to use PyPy. See `issue 75 `_. - Fix deleting the ``_p_oid`` of a pure-Python persistent object when it is in a cache. - Fix deleting special (``_p``) attributes of a pure-Python persistent object that overrides ``__delattr__`` and correctly calls ``_p_delattr``. - Remove some internal compatibility shims that are no longer necessary. See `PR 82 `_. - Make the return value of ``TimeStamp.second()`` consistent across C and Python implementations when the ``TimeStamp`` was created from 6 arguments with floating point seconds. Also make it match across trips through ``TimeStamp.raw()``. Previously, the C version could initially have erroneous rounding and too much false precision, while the Python version could have too much precision. The raw/repr values have not changed. See `issue 41 `_. 4.3.0 (2018-07-30) ------------------ - Fix the possibility of a rare crash in the C extension when deallocating items. See https://github.com/zopefoundation/persistent/issues/66 - Change cPickleCache's comparison of object sizes to determine whether an object can go in the cache to use ``PyObject_TypeCheck()``. This matches what the pure Python implementation does and is a stronger test that the object really is compatible with the cache. Previously, an object could potentially include ``cPersistent_HEAD`` and *not* set ``tp_base`` to ``cPersistenceCAPI->pertype`` and still be eligible for the pickle cache; that is no longer the case. See `issue 69 `_. 4.2.4.2 (2017-04-23) -------------------- - Packaging-only release: fix Python 2.7 ``manylinux`` wheels. 4.2.4.1 (2017-04-21) -------------------- - Packaging-only release: get ``manylinux`` wheel built automatically. 4.2.4 (2017-03-20) ------------------ - Avoid raising a ``SystemError: error return without exception set`` when loading an object with slots whose jar generates an exception (such as a ZODB ``POSKeyError``) in ``setstate``. 4.2.3 (2017-03-08) ------------------ - Fix the hashcode of Python ``TimeStamp`` objects on 64-bit Python on Windows. See https://github.com/zopefoundation/persistent/pull/55 - Stop calling ``gc.collect`` every time ``PickleCache.incrgc`` is called (every transaction boundary) in pure-Python mode (PyPy). This means that the reported size of the cache may be wrong (until the next GC), but it is much faster. This should not have any observable effects for user code. - Stop clearing the dict and slots of objects added to ``PickleCache.new_ghost`` (typically these values are passed to ``__new__`` from the pickle data) in pure-Python mode (PyPy). This matches the behaviour of the C code. - Add support for Python 3.6. - Fix ``__setstate__`` interning when ``state`` parameter is not a built-in dict 4.2.2 (2016-11-29) ------------------ - Drop use of ``ctypes`` for determining maximum integer size, to increase pure-Python compatibility. See https://github.com/zopefoundation/persistent/pull/31 - Ensure that ``__slots__`` attributes are cleared when a persistent object is ghostified. (This excluses classes that override ``__new__``. See https://github.com/zopefoundation/persistent/wiki/Notes_on_state_new_and_slots if you're curious.) 4.2.1 (2016-05-26) ------------------ - Fix the hashcode of C ``TimeStamp`` objects on 64-bit Python 3 on Windows. 4.2.0 (2016-05-05) ------------------ - Fixed the Python(/PYPY) implementation ``TimeStamp.timeTime`` method to have subsecond precision. - When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning the user's global wheel cache. - Add support for Python 3.5. - Drop support for Python 2.6 and 3.2. 4.1.1 (2015-06-02) ------------------ - Fix manifest and re-upload to fix stray files included in 4.1.0. 4.1.0 (2015-05-19) ------------------ - Make the Python implementation of ``Persistent`` and ``PickleCache`` behave more similarly to the C implementation. In particular, the Python version can now run the complete ZODB and ZEO test suites. - Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms. 4.0.9 (2015-04-08) ------------------ - Make the C and Python ``TimeStamp`` objects behave more alike. The Python version now produces the same ``repr`` and ``.raw()`` output as the C version, and has the same hashcode. In addition, the Python version is now supports ordering and equality like the C version. - Intern keys of object state in ``__setstate__`` to reduce memory usage when unpickling multiple objects with the same attributes. - Add support for PyPy3. - 100% branch coverage. 4.0.8 (2014-03-20) ------------------ - Add support for Python 3.4. - In pure-Python ``Persistent``, avoid loading state in ``_p_activate`` for non-ghost objects (which could corrupt their state). (PR #9) - In pure-Python, and don't throw ``POSKeyError`` if ``_p_activate`` is called on an object that has never been committed. (PR #9) - In pure-Python ``Persistent``, avoid calling a subclass's ``__setattr__`` at instance creation time. (PR #8) - Make it possible to delete ``_p_jar`` / ``_p_oid`` of a pure-Python ``Persistent`` object which has been removed from the jar's cache (fixes aborting a ZODB Connection that has added objects). (PR #7) 4.0.7 (2014-02-20) ------------------ - Avoid a KeyError from ``_p_accessed()`` on newly-created objects under pure-Python: these objects may be assigned to a jar, but not yet added to its cache. (PR #6) - Avoid a failure in ``Persistent.__setstate__`` when the state dict contains exactly two keys. (PR #5) - Fix a hang in ``picklecache`` invalidation if OIDs are manually passed out-of-order. (PR #4) - Add ``PURE_PYTHON`` environment variable support: if set, the C extensions will not be built, imported, or tested. 4.0.6 (2013-01-03) ------------------ - Updated Trove classifiers. 4.0.5 (2012-12-14) ------------------ - Fixed the C-extensions under Py3k (previously they compiled but were not importable). 4.0.4 (2012-12-11) ------------------ - Added support for Python 3.3. - C extenstions now build under Python 3.2, passing the same tests as the pure-Python reference implementation. 4.0.3 (2012-11-19) ------------------ - Fixed: In the C implimentation, an integer was compared with a pointer, with undefined results and a compiler warning. - Fixed: the Python implementation of the ``_p_estimated_size`` propety didn't support deletion. - Simplified implementation of the ``_p_estimated_size`` property to only accept integers. A TypeError is raised if an incorrect type is provided. 4.0.2 (2012-08-27) ------------------ - Correct initialization functions in renamed ``_timestamp`` extension. 4.0.1 (2012-08-26) ------------------ - Worked around test failure due to overflow to long on 32-bit systems. - Renamed ``TimeStamp`` extension module to avoid clash with pure-Python ``timestamp`` module on case-insensitive filesystems. N.B: the canonical way to import the ``TimeStamp`` class is now:: from persistent.timestamp import TimeStamp which will yield the class from the extension module (if available), falling back to the pure-Python reference implementation. 4.0.0 (2012-08-11) ------------------ Platform Changes ################ - Added explicit support for Python 3.2 and PyPy. - Note that the C implementations of Persistent, PickleCache, and Timestamp are not built (yet) on these platforms. - Dropped support for Python < 2.6. Testing Changes ############### - 100% unit test coverage. - Removed all ``ZODB``-dependent tests: - Rewrote some to avoid the dependency - Cloned the remainder into new ``ZODB.tests`` modules. - Refactored some doctests refactored as unittests. - Completed pure-Python reference implementations of 'Persistent', 'PickleCache', and 'TimeStamp'. - All covered platforms tested under ``tox``. - Added support for continuous integration using ``tox`` and ``jenkins``. - Added ``setup.py dev`` alias (installs ``nose`` and ``coverage``). - Dropped dependency on ``zope.testing`` / ``zope.testrunner``: tests now run with ``setup.py test``. Documentation Changes ##################### - Refactored many Doctests as Sphinx documentation (snippets are exercised via 'tox'). - Added ``setup.py docs`` alias (installs ``Sphinx`` and ``repoze.sphinx.autointerface``). Platform: any Classifier: Development Status :: 6 - Mature Classifier: License :: OSI Approved :: Zope Public License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Framework :: ZODB Classifier: Topic :: Database Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Operating System :: Microsoft :: Windows Classifier: Operating System :: Unix Provides-Extra: test Provides-Extra: testing Provides-Extra: docs ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/README.rst0000644000076500000240000000301100000000000016135 0ustar00jmaddenstaff00000000000000``persistent``: automatic persistence for Python objects ========================================================= .. image:: https://travis-ci.org/zopefoundation/persistent.svg?branch=master :target: https://travis-ci.org/zopefoundation/persistent .. image:: https://coveralls.io/repos/github/zopefoundation/persistent/badge.svg?branch=master :target: https://coveralls.io/github/zopefoundation/persistent?branch=master .. image:: https://readthedocs.org/projects/persistent/badge/?version=latest :target: http://persistent.readthedocs.org/en/latest/ :alt: Documentation Status .. image:: https://img.shields.io/pypi/v/persistent.svg :target: https://pypi.org/project/persistent :alt: Latest release .. image:: https://img.shields.io/pypi/pyversions/persistent.svg :target: https://pypi.org/project/persistent :alt: Python versions This package contains a generic persistence implementation for Python. It forms the core protocol for making objects interact "transparently" with a database such as the ZODB. Please see the Sphinx documentation (``docs/index.rst``) for further information, or view the documentation at Read The Docs, for either the latest (``http://persistent.readthedocs.io/en/latest/``) or stable release (``http://persistent.readthedocs.io/en/stable/``). .. note:: Use of this standalone ``persistent`` release is not recommended or supported with ZODB < 3.11. ZODB 3.10 and earlier bundle their own version of the ``persistent`` package. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/appveyor.yml0000644000076500000240000000242700000000000017050 0ustar00jmaddenstaff00000000000000environment: global: TWINE_USERNAME: zope.wheelbuilder TWINE_PASSWORD: secure: UcdTh6W78cRLVGfKRFoa5A== PURE_PYTHON: 0 matrix: - python: 27 - python: 27-x64 - python: 35 - python: 35-x64 - python: 36 - python: 36-x64 - python: 37 - python: 37-x64 - python: 38 - python: 38-x64 install: - "SET PATH=C:\\Python%PYTHON%;c:\\Python%PYTHON%\\scripts;%PATH%" - ps: | $env:PYTHON = "C:\\Python${env:PYTHON}" if (-not (Test-Path $env:PYTHON)) { curl -o install_python.ps1 https://raw.githubusercontent.com/matthew-brett/multibuild/11a389d78892cf90addac8f69433d5e22bfa422a/install_python.ps1 .\install_python.ps1 } - ps: if (-not (Test-Path $env:PYTHON)) { throw "No $env:PYTHON" } - echo "C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x64 > "C:\Program Files (x86)\Microsoft Visual Studio 10.0\VC\bin\amd64\vcvars64.bat" - python -m pip install -U pip setuptools wheel cffi - pip install -e .[test] build_script: - python -W ignore setup.py -q bdist_wheel test_script: - python -m zope.testrunner --test-path=. artifacts: - path: 'dist\*.whl' name: wheel deploy_script: - ps: if ($env:APPVEYOR_REPO_TAG -eq $TRUE) { pip install twine; twine upload dist/* } deploy: on ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/bootstrap.py0000644000076500000240000001644200000000000017051 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2006 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Bootstrap a buildout-based project Simply run this script in a directory containing a buildout.cfg. The script accepts buildout command-line options, so you can use the -c option to specify an alternate configuration file. """ import os import shutil import sys import tempfile from optparse import OptionParser __version__ = '2015-07-01' # See zc.buildout's changelog if this version is up to date. tmpeggs = tempfile.mkdtemp(prefix='bootstrap-') usage = '''\ [DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options] Bootstraps a buildout-based project. Simply run this script in a directory containing a buildout.cfg, using the Python that you want bin/buildout to use. Note that by using --find-links to point to local resources, you can keep this script from going over the network. ''' parser = OptionParser(usage=usage) parser.add_option("--version", action="store_true", default=False, help=("Return bootstrap.py version.")) parser.add_option("-t", "--accept-buildout-test-releases", dest='accept_buildout_test_releases', action="store_true", default=False, help=("Normally, if you do not specify a --version, the " "bootstrap script and buildout gets the newest " "*final* versions of zc.buildout and its recipes and " "extensions for you. If you use this flag, " "bootstrap and buildout will get the newest releases " "even if they are alphas or betas.")) parser.add_option("-c", "--config-file", help=("Specify the path to the buildout configuration " "file to be used.")) parser.add_option("-f", "--find-links", help=("Specify a URL to search for buildout releases")) parser.add_option("--allow-site-packages", action="store_true", default=False, help=("Let bootstrap.py use existing site packages")) parser.add_option("--buildout-version", help="Use a specific zc.buildout version") parser.add_option("--setuptools-version", help="Use a specific setuptools version") parser.add_option("--setuptools-to-dir", help=("Allow for re-use of existing directory of " "setuptools versions")) options, args = parser.parse_args() if options.version: print("bootstrap.py version %s" % __version__) sys.exit(0) ###################################################################### # load/install setuptools try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen ez = {} if os.path.exists('ez_setup.py'): exec(open('ez_setup.py').read(), ez) else: exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez) if not options.allow_site_packages: # ez_setup imports site, which adds site packages # this will remove them from the path to ensure that incompatible versions # of setuptools are not in the path import site # inside a virtualenv, there is no 'getsitepackages'. # We can't remove these reliably if hasattr(site, 'getsitepackages'): for sitepackage_path in site.getsitepackages(): # Strip all site-packages directories from sys.path that # are not sys.prefix; this is because on Windows # sys.prefix is a site-package directory. if sitepackage_path != sys.prefix: sys.path[:] = [x for x in sys.path if sitepackage_path not in x] setup_args = dict(to_dir=tmpeggs, download_delay=0) if options.setuptools_version is not None: setup_args['version'] = options.setuptools_version if options.setuptools_to_dir is not None: setup_args['to_dir'] = options.setuptools_to_dir ez['use_setuptools'](**setup_args) import setuptools import pkg_resources # This does not (always?) update the default working set. We will # do it. for path in sys.path: if path not in pkg_resources.working_set.entries: pkg_resources.working_set.add_entry(path) ###################################################################### # Install buildout ws = pkg_resources.working_set setuptools_path = ws.find( pkg_resources.Requirement.parse('setuptools')).location # Fix sys.path here as easy_install.pth added before PYTHONPATH cmd = [sys.executable, '-c', 'import sys; sys.path[0:0] = [%r]; ' % setuptools_path + 'from setuptools.command.easy_install import main; main()', '-mZqNxd', tmpeggs] find_links = os.environ.get( 'bootstrap-testing-find-links', options.find_links or ('http://downloads.buildout.org/' if options.accept_buildout_test_releases else None) ) if find_links: cmd.extend(['-f', find_links]) requirement = 'zc.buildout' version = options.buildout_version if version is None and not options.accept_buildout_test_releases: # Figure out the most recent final version of zc.buildout. import setuptools.package_index _final_parts = '*final-', '*final' def _final_version(parsed_version): try: return not parsed_version.is_prerelease except AttributeError: # Older setuptools for part in parsed_version: if (part[:1] == '*') and (part not in _final_parts): return False return True index = setuptools.package_index.PackageIndex( search_path=[setuptools_path]) if find_links: index.add_find_links((find_links,)) req = pkg_resources.Requirement.parse(requirement) if index.obtain(req) is not None: best = [] bestv = None for dist in index[req.project_name]: distv = dist.parsed_version if _final_version(distv): if bestv is None or distv > bestv: best = [dist] bestv = distv elif distv == bestv: best.append(dist) if best: best.sort() version = best[-1].version if version: requirement = '=='.join((requirement, version)) cmd.append(requirement) import subprocess if subprocess.call(cmd) != 0: raise Exception( "Failed to execute command:\n%s" % repr(cmd)[1:-1]) ###################################################################### # Import and run buildout ws.add_entry(tmpeggs) ws.require(requirement) import zc.buildout.buildout if not [a for a in args if '=' not in a]: args.append('bootstrap') # if -c was provided, we push it back into args for buildout' main function if options.config_file is not None: args[0:0] = ['-c', options.config_file] zc.buildout.buildout.main(args) shutil.rmtree(tmpeggs) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/buildout.cfg0000644000076500000240000000030400000000000016760 0ustar00jmaddenstaff00000000000000[buildout] develop = . parts = test scripts [test] recipe = zc.recipe.testrunner eggs = persistent [test] [scripts] recipe = zc.recipe.egg eggs = persistent [test] interpreter = py ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1585243094.9823337 persistent-4.6.4/docs/0000755000076500000240000000000000000000000015403 5ustar00jmaddenstaff00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/Makefile0000644000076500000240000001077600000000000017056 0ustar00jmaddenstaff00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/persistent.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/persistent.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/persistent" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/persistent" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1585243094.9835837 persistent-4.6.4/docs/api/0000755000076500000240000000000000000000000016154 5ustar00jmaddenstaff00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/api/attributes.rst0000644000076500000240000001465100000000000021103 0ustar00jmaddenstaff00000000000000Customizing Attribute Access ============================ Hooking :meth:`__getattr__` --------------------------- The __getattr__ method works pretty much the same for persistent classes as it does for other classes. No special handling is needed. If an object is a ghost, then it will be activated before __getattr__ is called. In this example, our objects returns a tuple with the attribute name, converted to upper case and the value of _p_changed, for any attribute that isn't handled by the default machinery. .. doctest:: >>> from persistent.tests.attrhooks import OverridesGetattr >>> o = OverridesGetattr() >>> o._p_changed False >>> o._p_oid >>> o._p_jar >>> o.spam ('SPAM', False) >>> o.spam = 1 >>> o.spam 1 We'll save the object, so it can be deactivated: .. doctest:: >>> from persistent.tests.attrhooks import _resettingJar >>> jar = _resettingJar() >>> jar.add(o) >>> o._p_deactivate() >>> o._p_changed And now, if we ask for an attribute it doesn't have, .. doctest:: >>> o.eggs ('EGGS', False) And we see that the object was activated before calling the :meth:`__getattr__` method. Hooking All Access ------------------ In this example, we'll provide an example that shows how to override the :meth:`__getattribute__`, :meth:`__setattr__`, and :meth:`__delattr__` methods. We'll create a class that stores it's attributes in a secret dictionary within the instance dictionary. The class will have the policy that variables with names starting with ``tmp_`` will be volatile. Our sample class takes initial values as keyword arguments to the constructor: .. doctest:: >>> from persistent.tests.attrhooks import VeryPrivate >>> o = VeryPrivate(x=1) Hooking :meth:`__getattribute__`` ################################# The :meth:`__getattribute__` method is called for all attribute accesses. It overrides the attribute access support inherited from Persistent. .. doctest:: >>> o._p_changed False >>> o._p_oid >>> o._p_jar >>> o.x 1 >>> o.y Traceback (most recent call last): ... AttributeError: y Next, we'll save the object in a database so that we can deactivate it: .. doctest:: >>> from persistent.tests.attrhooks import _rememberingJar >>> jar = _rememberingJar() >>> jar.add(o) >>> o._p_deactivate() >>> o._p_changed And we'll get some data: .. doctest:: >>> o.x 1 which activates the object: .. doctest:: >>> o._p_changed False It works for missing attributes too: .. doctest:: >>> o._p_deactivate() >>> o._p_changed >>> o.y Traceback (most recent call last): ... AttributeError: y >>> o._p_changed False Hooking :meth:`__setattr__`` ############################ The :meth:`__setattr__` method is called for all attribute assignments. It overrides the attribute assignment support inherited from Persistent. Implementors of :meth:`__setattr__` methods: 1. Must call Persistent._p_setattr first to allow it to handle some attributes and to make sure that the object is activated if necessary, and 2. Must set _p_changed to mark objects as changed. .. doctest:: >>> o = VeryPrivate() >>> o._p_changed False >>> o._p_oid >>> o._p_jar >>> o.x Traceback (most recent call last): ... AttributeError: x >>> o.x = 1 >>> o.x 1 Because the implementation doesn't store attributes directly in the instance dictionary, we don't have a key for the attribute: .. doctest:: >>> 'x' in o.__dict__ False Next, we'll give the object a "remembering" jar so we can deactivate it: .. doctest:: >>> jar = _rememberingJar() >>> jar.add(o) >>> o._p_deactivate() >>> o._p_changed We'll modify an attribute .. doctest:: >>> o.y = 2 >>> o.y 2 which reactivates it, and marks it as modified, because our implementation marked it as modified: .. doctest:: >>> o._p_changed True Now, if fake a commit: .. doctest:: >>> jar.fake_commit() >>> o._p_changed False And deactivate the object: .. doctest:: >>> o._p_deactivate() >>> o._p_changed and then set a variable with a name starting with ``tmp_``, The object will be activated, but not marked as modified, because our :meth:`__setattr__` implementation doesn't mark the object as changed if the name starts with ``tmp_``: .. doctest:: >>> o.tmp_foo = 3 >>> o._p_changed False >>> o.tmp_foo 3 Hooking :meth:`__delattr__`` ############################ The __delattr__ method is called for all attribute deletions. It overrides the attribute deletion support inherited from Persistent. Implementors of :meth:`__delattr__` methods: 1. Must call Persistent._p_delattr first to allow it to handle some attributes and to make sure that the object is activated if necessary, and 2. Must set _p_changed to mark objects as changed. .. doctest:: >>> o = VeryPrivate(x=1, y=2, tmp_z=3) >>> o._p_changed False >>> o._p_oid >>> o._p_jar >>> o.x 1 >>> del o.x >>> o.x Traceback (most recent call last): ... AttributeError: x Next, we'll save the object in a jar so that we can deactivate it: .. doctest:: >>> jar = _rememberingJar() >>> jar.add(o) >>> o._p_deactivate() >>> o._p_changed If we delete an attribute: .. doctest:: >>> del o.y The object is activated. It is also marked as changed because our implementation marked it as changed. .. doctest:: >>> o._p_changed True >>> o.y Traceback (most recent call last): ... AttributeError: y >>> o.tmp_z 3 Now, if fake a commit: .. doctest:: >>> jar.fake_commit() >>> o._p_changed False And deactivate the object: .. doctest:: >>> o._p_deactivate() >>> o._p_changed and then delete a variable with a name starting with ``tmp_``, The object will be activated, but not marked as modified, because our :meth:`__delattr__` implementation doesn't mark the object as changed if the name starts with ``tmp_``: .. doctest:: >>> del o.tmp_z >>> o._p_changed False >>> o.tmp_z Traceback (most recent call last): ... AttributeError: tmp_z If we attempt to delete ``_p_oid``, we find that we can't, and the object is also not activated or changed: .. doctest:: >>> del o._p_oid Traceback (most recent call last): ... ValueError: can't delete _p_oid of cached object >>> o._p_changed False We are allowed to delete ``_p_changed``, which sets it to ``None``: >>> del o._p_changed >>> o._p_changed is None True ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/api/cache.rst0000644000076500000240000000344700000000000017761 0ustar00jmaddenstaff00000000000000Caching Persistent Objects ========================== Creating Objects ``de novo`` ---------------------------- Creating ghosts from scratch, as opposed to ghostifying a non-ghost is rather tricky. :class:`~persistent.interfaces.IPeristent` doesn't really provide the right interface given that: - :meth:`_p_deactivate` and :meth:`_p_invalidate` are overridable, and could assume that the object's state is properly initialized. - Assigning :attr:`_p_changed` to None just calls :meth:`_p_deactivate`. - Deleting :attr:`_p_changed` just calls :meth:`_p_invalidate`. .. note:: The current cache implementation is intimately tied up with the persistence implementation and has internal access to the persistence state. The cache implementation can update the persistence state for newly created and uninitialized objects directly. The future persistence and cache implementations will be far more decoupled. The persistence implementation will only manage object state and generate object-usage events. The cache implementation(s) will be responsible for managing persistence-related (meta-)state, such as _p_state, _p_changed, _p_oid, etc. So in that future implementation, the cache will be more central to managing object persistence information. Caches have a :meth:`new_ghost` method that: - adds an object to the cache, and - initializes its persistence data. .. doctest:: >>> import persistent >>> from persistent.tests.utils import ResettingJar >>> class C(persistent.Persistent): ... pass >>> jar = ResettingJar() >>> cache = persistent.PickleCache(jar, 10, 100) >>> ob = C.__new__(C) >>> cache.new_ghost(b'1', ob) >>> ob._p_changed >>> ob._p_jar is jar True >>> ob._p_oid == b'1' True >>> cache.cache_non_ghost_count 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/api/collections.rst0000644000076500000240000000060200000000000021222 0ustar00jmaddenstaff00000000000000======================== Persistent Collections ======================== The ``persistent`` package provides two simple collections that are persistent and keep track of when they are mutated in place. .. autoclass:: persistent.mapping.PersistentMapping :members: :show-inheritance: .. autoclass:: persistent.list.PersistentList :members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/api/interfaces.rst0000644000076500000240000000105100000000000021026 0ustar00jmaddenstaff00000000000000:mod:`persistent.interfaces` =================================== .. automodule:: persistent.interfaces .. autointerface:: IPersistent :members: :member-order: bysource .. autointerface:: IPersistentDataManager :members: :member-order: bysource .. autointerface:: IPickleCache :members: :member-order: bysource Implementations =============== This package provides one implementation of :class:`IPersistent` that should be extended. .. autoclass:: persistent.Persistent :members: :show-inheritance: ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/api/pickling.rst0000644000076500000240000000742600000000000020517 0ustar00jmaddenstaff00000000000000Pickling Persistent Objects =========================== Persistent objects are designed to make the standard Python pickling machinery happy: .. doctest:: >>> import pickle >>> from persistent.tests.cucumbers import Simple >>> from persistent.tests.cucumbers import print_dict >>> x = Simple('x', aaa=1, bbb='foo') >>> print_dict(x.__getstate__()) {'__name__': 'x', 'aaa': 1, 'bbb': 'foo'} >>> f, (c,), state = x.__reduce__() >>> f.__name__ '__newobj__' >>> f.__module__.replace('_', '') # Normalize Python2/3 'copyreg' >>> c.__name__ 'Simple' >>> print_dict(state) {'__name__': 'x', 'aaa': 1, 'bbb': 'foo'} >>> import pickle >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True >>> x.__setstate__({'z': 1}) >>> x.__dict__ {'z': 1} This support even works well for derived classes which customize pickling by overriding :meth:`__getnewargs__`, :meth:`__getstate__` and :meth:`__setstate__`. .. doctest:: >>> from persistent.tests.cucumbers import Custom >>> x = Custom('x', 'y') >>> x.__getnewargs__() ('x', 'y') >>> x.a = 99 >>> (f, (c, ax, ay), a) = x.__reduce__() >>> f.__name__ '__newobj__' >>> f.__module__.replace('_', '') # Normalize Python2/3 'copyreg' >>> c.__name__ 'Custom' >>> ax, ay, a ('x', 'y', 99) >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True The support works for derived classes which define :attr:`__slots__`. It ignores any slots which map onto the "persistent" namespace (prefixed with ``_p_``) or the "volatile" namespace (prefixed with ``_v_``): .. doctest:: >>> from persistent.tests.cucumbers import SubSlotted >>> x = SubSlotted('x', 'y', 'z') Note that we haven't yet assigned a value to the ``s4`` attribute: .. doctest:: >>> d, s = x.__getstate__() >>> d >>> print_dict(s) {'s1': 'x', 's2': 'y', 's3': 'z'} >>> import pickle >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True After assigning it: .. doctest:: >>> x.s4 = 'spam' >>> d, s = x.__getstate__() >>> d >>> print_dict(s) {'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'} >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True :class:`persistent.Persistent` supports derived classes which have base classes defining :attr:`__slots`, but which do not define attr:`__slots__` themselves: .. doctest:: >>> from persistent.tests.cucumbers import SubSubSlotted >>> x = SubSubSlotted('x', 'y', 'z') >>> d, s = x.__getstate__() >>> print_dict(d) {} >>> print_dict(s) {'s1': 'x', 's2': 'y', 's3': 'z'} >>> import pickle >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True >>> x.s4 = 'spam' >>> x.foo = 'bar' >>> x.baz = 'bam' >>> d, s = x.__getstate__() >>> print_dict(d) {'baz': 'bam', 'foo': 'bar'} >>> print_dict(s) {'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'} >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/api.rst0000644000076500000240000000027300000000000016710 0ustar00jmaddenstaff00000000000000:mod:`persistent` API documentation =================================== .. toctree:: :maxdepth: 2 api/interfaces api/collections api/attributes api/pickling api/cache ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/conf.py0000644000076500000240000002074000000000000016705 0ustar00jmaddenstaff00000000000000# -*- coding: utf-8 -*- # # persistent documentation build configuration file, created by # sphinx-quickstart on Wed Feb 16 20:50:32 2011. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'repoze.sphinx.autointerface', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'persistent' copyright = u'2011,2016 ZODB Developers ' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '4.2' # The full version, including alpha/beta/rc tags. release = '4.2.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'persistentdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'persistent.tex', u'persistent Documentation', u'ZODB Developers \\textless{}zope-dev@zope.org\\textgreater{}', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'persistent', u'persistent Documentation', [u'ZODB Developers '], 1) ] # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'persistent' epub_author = u'ZODB Developers ' epub_publisher = u'ZODB Developers ' epub_copyright = u'2011, ZODB Developers ' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. #epub_exclude_files = [] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None} ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/glossary.rst0000644000076500000240000000273100000000000020003 0ustar00jmaddenstaff00000000000000.. _glossary: Glossary ======== .. glossary:: :sorted: data manager The object responsible for storing and loading an object's :term:`pickled data` in a backing store. Also called a :term:`jar`. jar Alias for :term:`data manager`: short for "pickle jar", because it traditionally holds the :term:`pickled data` of persistent objects. object cache An MRU cache for objects associated with a given :term:`data manager`. ghost An object whose :term:`pickled data` has not yet been loaded from its :term:`jar`. Accessing or mutating any of its attributes causes that data to be loaded, which is referred to as :term:`activation`. volatile attribute Attributes of a persistent object which are *not* captured as part of its :term:`pickled data`. These attributes thus disappear during :term:`deactivation` or :term:`invalidation`. pickled data The serialized data of a persistent object, stored in and retrieved from a backing store by a :term:`data manager`. activation Moving an object from the ``GHOST`` state to the ``UPTODATE`` state, load its :term:`pickled data` from its :term:`jar`. deactivation Moving an object from the ``UPTODATE`` state to the ``GHOST`` state, discarding its :term:`pickled data`. invalidation Moving an object from either the ``UPTODATE`` state or the ``CHANGED`` state to the ``GHOST`` state, discarding its :term:`pickled data`. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/index.rst0000644000076500000240000000071600000000000017250 0ustar00jmaddenstaff00000000000000:mod:`persistent`: automatic persistence for Python objects ============================================================ This package contains a generic persistence implementation for Python. It forms the core protocol for making objects interact "transparently" with a database such as the ZODB. Contents: .. toctree:: :maxdepth: 2 using api glossary Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/make.bat0000644000076500000240000001064700000000000017020 0ustar00jmaddenstaff00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\persistent.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\persistent.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/docs/using.rst0000644000076500000240000003055000000000000017265 0ustar00jmaddenstaff00000000000000============================================= Using :mod:`persistent` in your application ============================================= Inheriting from :class:`persistent.Persistent` ============================================== The basic mechanism for making your application's objects persistent is mix-in inheritance. Instances whose classes derive from :class:`persistent.Persistent` are automatically capable of being created as :term:`ghost` instances, being associated with a database connection (called the :term:`jar`), and notifying the connection when they have been changed. Relationship to a Data Manager and its Cache ============================================ Except immediately after their creation, persistent objects are normally associated with a :term:`data manager` (also referred to as a :term:`jar`). An object's data manager is stored in its ``_p_jar`` attribute. The data manager is responsible for loading and saving the state of the persistent object to some sort of backing store, including managing any interactions with transaction machinery. Each data manager maintains an :term:`object cache`, which keeps track of the currently loaded objects, as well as any objects they reference which have not yet been loaded: such an object is called a :term:`ghost`. The cache is stored on the data manager in its ``_cache`` attribute. A persistent object remains in the ghost state until the application attempts to access or mutate one of its attributes: at that point, the object requests that its data manager load its state. The persistent object also notifies the cache that it has been loaded, as well as on each subsequent attribute access. The cache keeps a "most-recently-used" list of its objects, and removes objects in least-recently-used order when it is asked to reduce its working set. The examples below use a stub data manager class, and its stub cache class: .. doctest:: >>> class Cache(object): ... def __init__(self): ... self._mru = [] ... def mru(self, oid): ... self._mru.append(oid) >>> from zope.interface import implementer >>> from persistent.interfaces import IPersistentDataManager >>> @implementer(IPersistentDataManager) ... class DM(object): ... def __init__(self): ... self._cache = Cache() ... self.registered = 0 ... def register(self, ob): ... self.registered += 1 ... def setstate(self, ob): ... ob.__setstate__({'x': 42}) .. note:: Notice that the ``DM`` class always sets the ``x`` attribute to the value ``42`` when activating an object. Persistent objects without a Data Manager ========================================= Before persistent instance has been associated with a a data manager ( i.e., its ``_p_jar`` is still ``None``). The examples below use a class, ``P``, defined as: .. doctest:: >>> from persistent import Persistent >>> from persistent.interfaces import GHOST, UPTODATE, CHANGED >>> class P(Persistent): ... def __init__(self): ... self.x = 0 ... def inc(self): ... self.x += 1 Instances of the derived ``P`` class which are not (yet) assigned to a :term:`data manager` behave as other Python instances, except that they have some extra attributes: .. doctest:: >>> p = P() >>> p.x 0 The :attr:`_p_changed` attribute is a three-state flag: it can be one of ``None`` (the object is not loaded), ``False`` (the object has not been changed since it was loaded) or ``True`` (the object has been changed). Until the object is assigned a :term:`jar`, this attribute will always be ``False``. .. doctest:: >>> p._p_changed False The :attr:`_p_state` attribute is an integer, representing which of the "persistent lifecycle" states the object is in. Until the object is assigned a :term:`jar`, this attribute will always be ``0`` (the ``UPTODATE`` constant): .. doctest:: >>> p._p_state == UPTODATE True The :attr:`_p_jar` attribute is the object's :term:`data manager`. Since it has not yet been assigned, its value is ``None``: .. doctest:: >>> print(p._p_jar) None The :attr:`_p_oid` attribute is the :term:`object id`, a unique value normally assigned by the object's :term:`data manager`. Since the object has not yet been associated with its :term:`jar`, its value is ``None``: .. doctest:: >>> print(p._p_oid) None Without a data manager, modifying a persistent object has no effect on its ``_p_state`` or ``_p_changed``. .. doctest:: >>> p.inc() >>> p.inc() >>> p.x 2 >>> p._p_changed False >>> p._p_state 0 Try all sorts of different ways to change the object's state: .. doctest:: >>> p._p_deactivate() >>> p._p_state 0 >>> p._p_changed False >>> p._p_changed = True >>> p._p_changed False >>> p._p_state 0 >>> del p._p_changed >>> p._p_changed False >>> p._p_state 0 >>> p.x 2 Associating an Object with a Data Manager ========================================= Once associated with a data manager, a persistent object's behavior changes: .. doctest:: >>> p = P() >>> dm = DM() >>> p._p_oid = "00000012" >>> p._p_jar = dm >>> p._p_changed False >>> p._p_state 0 >>> p.__dict__ {'x': 0} >>> dm.registered 0 Modifying the object marks it as changed and registers it with the data manager. Subsequent modifications don't have additional side-effects. .. doctest:: >>> p.inc() >>> p.x 1 >>> p.__dict__ {'x': 1} >>> p._p_changed True >>> p._p_state 1 >>> dm.registered 1 >>> p.inc() >>> p._p_changed True >>> p._p_state 1 >>> dm.registered 1 Object which register themselves with the data manager are candidates for storage to the backing store at a later point in time. Note that mutating a non-persistent attribute of a persistent object such as a :class:`dict` or :class:`list` will *not* cause the containing object to be changed. Instead you can either explicitly control the state as described below, or use a :class:`~.PersistentList` or :class:`~.PersistentMapping`. Explicitly controlling ``_p_state`` =================================== Persistent objects expose three methods for moving an object into and out of the "ghost" state:: :meth:`persistent.Persistent._p_activate`, :meth:`persistent.Persistent._p_activate_p_deactivate`, and :meth:`persistent.Persistent._p_invalidate`: .. doctest:: >>> p = P() >>> p._p_oid = '00000012' >>> p._p_jar = DM() After being assigned a jar, the object is initially in the ``UPTODATE`` state: .. doctest:: >>> p._p_state 0 From that state, ``_p_deactivate`` rests the object to the ``GHOST`` state: .. doctest:: >>> p._p_deactivate() >>> p._p_state -1 From the ``GHOST`` state, ``_p_activate`` reloads the object's data and moves it to the ``UPTODATE`` state: .. doctest:: >>> p._p_activate() >>> p._p_state 0 >>> p.x 42 Changing the object puts it in the ``CHANGED`` state: .. doctest:: >>> p.inc() >>> p.x 43 >>> p._p_state 1 Attempting to deactivate in the ``CHANGED`` state is a no-op: .. doctest:: >>> p._p_deactivate() >>> p.__dict__ {'x': 43} >>> p._p_changed True >>> p._p_state 1 ``_p_invalidate`` forces objects into the ``GHOST`` state; it works even on objects in the ``CHANGED`` state, which is the key difference between deactivation and invalidation: .. doctest:: >>> p._p_invalidate() >>> p.__dict__ {} >>> p._p_state -1 You can manually reset the ``_p_changed`` field to ``False``: in this case, the object changes to the ``UPTODATE`` state but retains its modifications: .. doctest:: >>> p.inc() >>> p.x 43 >>> p._p_changed = False >>> p._p_state 0 >>> p._p_changed False >>> p.x 43 For an object in the "ghost" state, assigning ``True`` (or any value which is coercible to ``True``) to its ``_p_changed`` attributes activates the object, which is exactly the same as calling ``_p_activate``: .. doctest:: >>> p._p_invalidate() >>> p._p_state -1 >>> p._p_changed = True >>> p._p_changed True >>> p._p_state 1 >>> p.x 42 The pickling protocol ===================== Because persistent objects need to control how they are pickled and unpickled, the :class:`persistent.Persistent` base class overrides the implementations of ``__getstate__()`` and ``__setstate__()``: .. doctest:: >>> p = P() >>> dm = DM() >>> p._p_oid = "00000012" >>> p._p_jar = dm >>> p.__getstate__() {'x': 0} >>> p._p_state 0 Calling ``__setstate__`` always leaves the object in the uptodate state. .. doctest:: >>> p.__setstate__({'x': 5}) >>> p._p_state 0 A :term:`volatile attribute` is an attribute those whose name begins with a special prefix (``_v__``). Unlike normal attributes, volatile attributes do not get stored in the object's :term:`pickled data`. .. doctest:: >>> p._v_foo = 2 >>> p.__getstate__() {'x': 5} Assigning to volatile attributes doesn't cause the object to be marked as changed: .. doctest:: >>> p._p_state 0 The ``_p_serial`` attribute is not affected by calling setstate. .. doctest:: >>> p._p_serial = b"00000012" >>> p.__setstate__(p.__getstate__()) >>> p._p_serial b'00000012' Estimated Object Size ===================== We can store a size estimation in ``_p_estimated_size``. Its default is 0. The size estimation can be used by a cache associated with the data manager to help in the implementation of its replacement strategy or its size bounds. .. doctest:: >>> p._p_estimated_size 0 >>> p._p_estimated_size = 1000 >>> p._p_estimated_size 1024 Huh? Why is the estimated size coming out different than what we put in? The reason is that the size isn't stored exactly. For backward compatibility reasons, the size needs to fit in 24 bits, so, internally, it is adjusted somewhat. Of course, the estimated size must not be negative. .. doctest:: >>> p._p_estimated_size = -1 Traceback (most recent call last): .... ValueError: _p_estimated_size must not be negative Overriding the attribute protocol ================================= Subclasses which override the attribute-management methods provided by :class:`persistent.Persistent`, but must obey some constraints: :meth:`__getattribute__` When overriding ``__getattribute__``, the derived class implementation **must** first call :meth:`persistent.IPersistent._p_getattr`, passing the name being accessed. This method ensures that the object is activated, if needed, and handles the "special" attributes which do not require activation (e.g., ``_p_oid``, ``__class__``, ``__dict__``, etc.) If ``_p_getattr`` returns ``True``, the derived class implementation **must** delegate to the base class implementation for the attribute. :meth:`__setattr__` When overriding ``__setattr__``, the derived class implementation **must** first call :meth:`persistent.IPersistent._p_setattr`, passing the name being accessed and the value. This method ensures that the object is activated, if needed, and handles the "special" attributes which do not require activation (``_p_*``). If ``_p_setattr`` returns ``True``, the derived implementation must assume that the attribute value has been set by the base class. :meth:`__delattr__` When overriding ``__delattr__``, the derived class implementation **must** first call :meth:`persistent.IPersistent._p_delattr`, passing the name being accessed. This method ensures that the object is activated, if needed, and handles the "special" attributes which do not require activation (``_p_*``). If ``_p_delattr`` returns ``True``, the derived implementation must assume that the attribute has been deleted base class. :meth:`__getattr__` For the ``__getattr__`` method, the behavior is like that for regular Python classes and for earlier versions of ZODB 3. Implementing ``_p_repr`` ======================== Subclasses can implement ``_p_repr`` to provide a custom representation. If this method raises an exception, the default representation will be used. The benefit of implementing ``_p_repr`` instead of overriding ``__repr__`` is that it provides safer handling for objects that can't be activated because their persistent data is missing or their jar is closed. .. doctest:: >>> class P(Persistent): ... def _p_repr(self): ... return "Custom repr" >>> p = P() >>> print(repr(p)) Custom repr ././@PaxHeader0000000000000000000000000000003300000000000011451 xustar000000000000000027 mtime=1585243094.988869 persistent-4.6.4/persistent/0000755000076500000240000000000000000000000016653 5ustar00jmaddenstaff00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/__init__.py0000644000076500000240000000274400000000000020773 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE # ############################################################################## """Prefer C implementations of Persistent / PickleCache / TimeStamp. Fall back to pure Python implementations. """ import sys __all__ = [ 'IPersistent', 'Persistent', 'GHOST', 'UPTODATE', 'CHANGED', 'STICKY', 'PickleCache', 'TimeStamp', ] # Take care not to shadow the module names from persistent import interfaces as _interfaces from persistent import timestamp as _timestamp from persistent import persistence as _persistence from persistent import picklecache as _picklecache IPersistent = _interfaces.IPersistent Persistent = _persistence.Persistent GHOST = _interfaces.GHOST UPTODATE = _interfaces.UPTODATE CHANGED = _interfaces.CHANGED STICKY = _interfaces.STICKY PickleCache = _picklecache.PickleCache # BWC for TimeStamp. TimeStamp = _timestamp sys.modules['persistent.TimeStamp'] = sys.modules['persistent.timestamp'] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/_compat.h0000644000076500000240000000277200000000000020456 0ustar00jmaddenstaff00000000000000/***************************************************************************** Copyright (c) 2012 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ #ifndef PERSISTENT__COMPAT_H #define PERSISTENT__COMPAT_H #include "Python.h" #if PY_MAJOR_VERSION >= 3 #define PY3K #endif #ifdef PY3K #define INTERN PyUnicode_InternFromString #define INTERN_INPLACE PyUnicode_InternInPlace #define NATIVE_CHECK_EXACT PyUnicode_CheckExact #define NATIVE_FROM_STRING_AND_SIZE PyUnicode_FromStringAndSize #define Py_TPFLAGS_HAVE_RICHCOMPARE 0 #define INT_FROM_LONG(x) PyLong_FromLong(x) #define INT_CHECK(x) PyLong_Check(x) #define INT_AS_LONG(x) PyLong_AsLong(x) #define CAPI_CAPSULE_NAME "persistent.cPersistence.CAPI" #else #define INTERN PyString_InternFromString #define INTERN_INPLACE PyString_InternInPlace #define NATIVE_CHECK_EXACT PyString_CheckExact #define NATIVE_FROM_STRING_AND_SIZE PyString_FromStringAndSize #define INT_FROM_LONG(x) PyInt_FromLong(x) #define INT_CHECK(x) PyInt_Check(x) #define INT_AS_LONG(x) PyInt_AS_LONG(x) #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/_compat.py0000644000076500000240000001750300000000000020655 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2012 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import sys import os import types from zope.interface import implementedBy from zope.interface import classImplements __all__ = [ 'use_c_impl', 'copy_reg', 'IterableUserDict', 'UserList', 'intern', 'PYPY', 'PYTHON3', 'PYTHON2', ] # pylint:disable=import-error,self-assigning-variable if sys.version_info[0] > 2: import copyreg as copy_reg from collections import UserDict as IterableUserDict from collections import UserList from sys import intern PYTHON3 = True PYTHON2 = False else: # pragma: no cover import copy_reg from UserDict import IterableUserDict from UserList import UserList PYTHON3 = False PYTHON2 = True intern = intern PYPY = hasattr(sys, 'pypy_version_info') def _c_optimizations_required(): """ Return a true value if the C optimizations are required. This uses the ``PURE_PYTHON`` variable as documented in `use_c_impl`. """ pure_env = os.environ.get('PURE_PYTHON') require_c = pure_env == "0" return require_c def _c_optimizations_available(): """ Return the C optimization modules, if available, otherwise a false value. If the optimizations are required but not available, this raises the ImportError. Either all optimization modules are available or none are. This does not say whether they should be used or not. """ catch = () if _c_optimizations_required() else (ImportError,) try: from persistent import cPersistence from persistent import cPickleCache from persistent import _timestamp return { 'persistent.persistence': cPersistence, 'persistent.picklecache': cPickleCache, 'persistent.timestamp': _timestamp, } except catch: # pragma: no cover (only Jython doesn't build extensions) return {} def _c_optimizations_ignored(): """ The opposite of `_c_optimizations_required`. On PyPy, this always returns True. Otherwise, if ``$PURE_PYTHON`` is set to any non-empty value besides "0", optimizations are ignored. Setting ``$PURE_PYTHON`` to "1", for example, ignores optimizations. Setting ``$PURE_PYTHON`` to an empty value *does not* ignore optimizations. """ pure_env = os.environ.get('PURE_PYTHON') # The extensions can be compiled with PyPy 7.3, but they don't work. return PYPY or (pure_env and pure_env != "0") def _should_attempt_c_optimizations(): """ Return a true value if we should attempt to use the C optimizations. This takes into account whether we're on PyPy and the value of the ``PURE_PYTHON`` environment variable, as defined in `use_c_impl`. Note that setting ``PURE_PYTHON=0`` forces the use of C optimizations, even on PyPy. """ if _c_optimizations_required(): return True if PYPY: # pragma: no cover return False return not _c_optimizations_ignored() def use_c_impl(py_impl, name=None, globs=None, mod_name=None): """ Decorator. Given an object implemented in Python, with a name like ``Foo``, import the corresponding C implementation from ``persistent.c`` with the name ``Foo`` and use it instead (where ``NAME`` is the module name). This can also be used for constants and other things that do not have a name by passing the name as the second argument. Example:: @use_c_impl class Foo(object): ... GHOST = use_c_impl(12, 'GHOST') If the ``PURE_PYTHON`` environment variable is set to any value other than ``"0"``, or we're on PyPy, ignore the C implementation and return the Python version. If the C implementation cannot be imported, return the Python version. If ``PURE_PYTHON`` is set to 0, *require* the C implementation (let the ImportError propagate); note that PyPy can import the C implementation in this case (and all tests pass). In all cases, the Python version is kept available in the module globals with the name ``FooPy``. If the Python version is a class that implements interfaces, then the C version will be declared to also implement those interfaces. If the Python version is a class, then each function defined directly in that class will be replaced with a new version using globals that still use the original name of the class for the Python implementation. This lets the function bodies refer to the class using the name the class is defined with, as it would expect. (Only regular functions and static methods are handled.) However, it also means that mutating the module globals later on will not be visible to the methods of the class. In this example, ``Foo().method()`` will always return 1:: GLOBAL_OBJECT = 1 @use_c_impl class Foo(object): def method(self): super(Foo, self).method() return GLOBAL_OBJECT GLOBAL_OBJECT = 2 """ name = name or py_impl.__name__ globs = globs or sys._getframe(1).f_globals mod_name = mod_name or globs['__name__'] def find_impl(): if not _should_attempt_c_optimizations(): return py_impl c_opts = _c_optimizations_available() if not c_opts: # pragma: no cover (only Jython doesn't build extensions) return py_impl __traceback_info__ = c_opts c_opt = c_opts[mod_name] return getattr(c_opt, name) c_impl = find_impl() # Always make available by the FooPy name globs[name + 'Py'] = py_impl if c_impl is not py_impl and isinstance(py_impl, type): # Rebind the globals of all the functions to still see the # object under its original class name, so that references # in function bodies work as expected. py_attrs = vars(py_impl) new_globals = None for k, v in list(py_attrs.items()): static = isinstance(v, staticmethod) if static: # Often this is __new__ v = v.__func__ if not isinstance(v, types.FunctionType): continue # Somewhat surprisingly, on Python 2, while # ``Class.function`` results in a # ``types.UnboundMethodType`` (``instancemethed``) object, # ``Class.__dict__["function"]`` returns a # ``types.FunctionType``, just like ``Class.function`` # (and the dictionary access, of course) does on Python 3. # The upshot is, we don't need different version-dependent # code. Hooray! if new_globals is None: new_globals = v.__globals__.copy() new_globals[py_impl.__name__] = py_impl # On Python 2, all arguments are optional, but an Python 3, all # are required. v = types.FunctionType( v.__code__, new_globals, k, # name v.__defaults__, v.__closure__, ) if static: v = staticmethod(v) setattr(py_impl, k, v) # copy the interface declarations. implements = list(implementedBy(py_impl)) if implements: classImplements(c_impl, *implements) return c_impl ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/_ring_build.py0000644000076500000240000000400000000000000021474 0ustar00jmaddenstaff00000000000000# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2018 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## from __future__ import absolute_import, print_function, division import os from cffi import FFI this_dir = os.path.dirname(os.path.abspath(__file__)) ffi = FFI() with open(os.path.join(this_dir, 'ring.h')) as f: cdefs = f.read() # Define a structure with the same layout as CPersistentRing, # and an extra member. We'll cast between them to reuse the # existing functions. struct_def = """ typedef struct CPersistentRingCFFI_struct { struct CPersistentRing_struct *r_prev; struct CPersistentRing_struct *r_next; uintptr_t pobj_id; /* The id(PersistentPy object) */ } CPersistentRingCFFI; """ cdefs += struct_def + """ void cffi_ring_add(CPersistentRing* ring, void* elt); void cffi_ring_del(void* elt); void cffi_ring_move_to_head(CPersistentRing* ring, void* elt); """ ffi.cdef(cdefs) source = """ #include "ring.c" """ + struct_def + """ /* Like the other functions, but taking the CFFI version of the struct. This * saves casting at runtime in Python. */ #define cffi_ring_add(ring, elt) ring_add((CPersistentRing*)ring, (CPersistentRing*)elt) #define cffi_ring_del(elt) ring_del((CPersistentRing*)elt) #define cffi_ring_move_to_head(ring, elt) ring_move_to_head((CPersistentRing*)ring, (CPersistentRing*)elt) """ ffi.set_source('persistent._ring', source, include_dirs=[this_dir]) if __name__ == '__main__': ffi.compile() ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/_timestamp.c0000644000076500000240000004132700000000000021170 0ustar00jmaddenstaff00000000000000/***************************************************************************** Copyright (c) 2001, 2004 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ #define PY_SSIZE_T_CLEAN #include "Python.h" #include "bytesobject.h" #include #include "_compat.h" PyObject *TimeStamp_FromDate(int, int, int, int, int, double); PyObject *TimeStamp_FromString(const char *); static char TimeStampModule_doc[] = "A 64-bit TimeStamp used as a ZODB serial number.\n" "\n" "$Id$\n"; /* A magic constant having the value 0.000000013969839. When an number of seconds between 0 and 59 is *divided* by this number, we get a number between 0 (for 0), 71582786 (for 1) and 4223384393 (for 59), all of which can be represented in a 32-bit unsigned integer, suitable for packing into 4 bytes using `TS_PACK_UINT32_INTO_BYTES`. To get (close to) the original seconds back, use `TS_UNPACK_UINT32_FROM_BYTES` and *multiply* by this number. */ #define TS_SECOND_BYTES_BIAS ((double)((double)60) / ((double)(0x10000)) / ((double)(0x10000))) #define TS_BASE_YEAR 1900 #define TS_MINUTES_PER_DAY 1440 /* We pretend there are always 31 days in a month; this has us using 372 days in a year in some calculations */ #define TS_DAYS_PER_MONTH 31 #define TS_MONTHS_PER_YEAR 12 #define TS_MINUTES_PER_MONTH (TS_DAYS_PER_MONTH * TS_MINUTES_PER_DAY) #define TS_MINUTES_PER_YEAR (TS_MINUTES_PER_MONTH * TS_MONTHS_PER_YEAR) /* The U suffixes matter on these constants to be sure the compiler generates the appropriate instructions when optimizations are enabled. On x86_64 GCC, if -fno-wrapv is given and -O is used, the compiler might choose to treat these as 32 bit signed quantities otherwise, producing incorrect results on some corner cases. See https://github.com/zopefoundation/persistent/issues/86 */ /** * Given an unsigned int *v*, pack it into the four * unsigned char bytes beginning at *bytes*. If *v* is larger * than 2^31 (i.e., it doesn't fit in 32 bits), the results will * be invalid (the first byte will be 0.) * * The inverse is `TS_UNPACK_UINT32_FROM_BYTES`. This is a * lossy operation and may lose some lower-order precision. * */ #define TS_PACK_UINT32_INTO_BYTES(v, bytes) do { \ *(bytes) = v / 0x1000000U; \ *(bytes + 1) = (v % 0x1000000U) / 0x10000U; \ *(bytes + 2) = (v % 0x10000U) / 0x100U; \ *(bytes + 3) = v % 0x100U; \ } while (0) /** * Given a sequence of four unsigned chars beginning at *bytes* * as produced by `TS_PACK_UINT32_INTO_BYTES`, return the * original unsigned int. * * Remember this is a lossy operation, and the value you get back * may not exactly match the original value. If the original value * was greater than 2^31 it will definitely not match. */ #define TS_UNPACK_UINT32_FROM_BYTES(bytes) (*(bytes) * 0x1000000U + *(bytes + 1) * 0x10000U + *(bytes + 2) * 0x100U + *(bytes + 3)) typedef struct { PyObject_HEAD /* The first four bytes of data store the year, month, day, hour, and minute as the number of minutes since Jan 1 00:00. The final four bytes store the seconds since 00:00 as the number of microseconds. Both are normalized into those four bytes the same way with TS_[UN]PACK_UINT32_INTO|FROM_BYTES. */ unsigned char data[8]; } TimeStamp; /* The first dimension of the arrays below is non-leapyear / leapyear */ static char month_len[2][12] = { {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31} }; static short joff[2][12] = { {0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334}, {0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335} }; static double gmoff=0; static int leap(int year) { return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0); } static int days_in_month(int year, int month) { return month_len[leap(year)][month]; } static double TimeStamp_yad(int y) { double d, s; y -= TS_BASE_YEAR; d = (y - 1) * 365; if (y > 0) { s = 1.0; y -= 1; } else { s = -1.0; y = -y; } return d + s * (y / 4 - y / 100 + (y + 300) / 400); } static double TimeStamp_abst(int y, int mo, int d, int m, int s) { return (TimeStamp_yad(y) + joff[leap(y)][mo] + d) * 86400 + m * 60 + s; } static int TimeStamp_init_gmoff(void) { struct tm *t; time_t z=0; t = gmtime(&z); if (t == NULL) { PyErr_SetString(PyExc_SystemError, "gmtime failed"); return -1; } gmoff = TimeStamp_abst(t->tm_year + TS_BASE_YEAR, t->tm_mon, t->tm_mday - 1, t->tm_hour * 60 + t->tm_min, t->tm_sec); return 0; } static void TimeStamp_dealloc(TimeStamp *ts) { PyObject_Del(ts); } static PyObject* TimeStamp_richcompare(TimeStamp *self, TimeStamp *other, int op) { PyObject *result = NULL; int cmp; if (Py_TYPE(other) != Py_TYPE(self)) { result = Py_NotImplemented; } else { cmp = memcmp(self->data, other->data, 8); switch (op) { case Py_LT: result = (cmp < 0) ? Py_True : Py_False; break; case Py_LE: result = (cmp <= 0) ? Py_True : Py_False; break; case Py_EQ: result = (cmp == 0) ? Py_True : Py_False; break; case Py_NE: result = (cmp != 0) ? Py_True : Py_False; break; case Py_GT: result = (cmp > 0) ? Py_True : Py_False; break; case Py_GE: result = (cmp >= 0) ? Py_True : Py_False; break; } } Py_XINCREF(result); return result; } #ifdef PY3K static Py_hash_t #else static long #endif TimeStamp_hash(TimeStamp *self) { register unsigned char *p = (unsigned char *)self->data; register int len = 8; register long x = *p << 7; while (--len >= 0) x = (1000003*x) ^ *p++; x ^= 8; if (x == -1) x = -2; return x; } typedef struct { /* TODO: reverse-engineer what's in these things and comment them */ int y; int m; int d; int mi; } TimeStampParts; static void TimeStamp_unpack(TimeStamp *self, TimeStampParts *p) { unsigned int minutes_since_base; minutes_since_base = TS_UNPACK_UINT32_FROM_BYTES(self->data); p->y = minutes_since_base / TS_MINUTES_PER_YEAR + TS_BASE_YEAR; p->m = (minutes_since_base % TS_MINUTES_PER_YEAR) / TS_MINUTES_PER_MONTH + 1; p->d = (minutes_since_base % TS_MINUTES_PER_MONTH) / TS_MINUTES_PER_DAY + 1; p->mi = minutes_since_base % TS_MINUTES_PER_DAY; } static double TimeStamp_sec(TimeStamp *self) { unsigned int v; v = TS_UNPACK_UINT32_FROM_BYTES(self->data +4); return TS_SECOND_BYTES_BIAS * v; } static PyObject * TimeStamp_year(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.y); } static PyObject * TimeStamp_month(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.m); } static PyObject * TimeStamp_day(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.d); } static PyObject * TimeStamp_hour(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.mi / 60); } static PyObject * TimeStamp_minute(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.mi % 60); } static PyObject * TimeStamp_second(TimeStamp *self) { return PyFloat_FromDouble(TimeStamp_sec(self)); } static PyObject * TimeStamp_timeTime(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return PyFloat_FromDouble(TimeStamp_abst(p.y, p.m - 1, p.d - 1, p.mi, 0) + TimeStamp_sec(self) - gmoff); } static PyObject * TimeStamp_raw(TimeStamp *self) { return PyBytes_FromStringAndSize((const char*)self->data, 8); } static PyObject * TimeStamp_repr(TimeStamp *self) { PyObject *raw, *result; raw = TimeStamp_raw(self); result = PyObject_Repr(raw); Py_DECREF(raw); return result; } static PyObject * TimeStamp_str(TimeStamp *self) { char buf[128]; TimeStampParts p; int len; TimeStamp_unpack(self, &p); len =sprintf(buf, "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%09.6f", p.y, p.m, p.d, p.mi / 60, p.mi % 60, TimeStamp_sec(self)); return NATIVE_FROM_STRING_AND_SIZE(buf, len); } static PyObject * TimeStamp_laterThan(TimeStamp *self, PyObject *obj) { TimeStamp *o = NULL; TimeStampParts p; unsigned char new[8]; int i; if (Py_TYPE(obj) != Py_TYPE(self)) { PyErr_SetString(PyExc_TypeError, "expected TimeStamp object"); return NULL; } o = (TimeStamp *)obj; if (memcmp(self->data, o->data, 8) > 0) { Py_INCREF(self); return (PyObject *)self; } memcpy(new, o->data, 8); for (i = 7; i > 3; i--) { if (new[i] == 255) new[i] = 0; else { new[i]++; return TimeStamp_FromString((const char*)new); } } /* All but the first two bytes are the same. Need to increment the year, month, and day explicitly. */ TimeStamp_unpack(o, &p); if (p.mi >= 1439) { p.mi = 0; if (p.d == month_len[leap(p.y)][p.m - 1]) { p.d = 1; if (p.m == 12) { p.m = 1; p.y++; } else p.m++; } else p.d++; } else p.mi++; return TimeStamp_FromDate(p.y, p.m, p.d, p.mi / 60, p.mi % 60, 0); } static struct PyMethodDef TimeStamp_methods[] = { {"year", (PyCFunction)TimeStamp_year, METH_NOARGS}, {"minute", (PyCFunction)TimeStamp_minute, METH_NOARGS}, {"month", (PyCFunction)TimeStamp_month, METH_NOARGS}, {"day", (PyCFunction)TimeStamp_day, METH_NOARGS}, {"hour", (PyCFunction)TimeStamp_hour, METH_NOARGS}, {"second", (PyCFunction)TimeStamp_second, METH_NOARGS}, {"timeTime", (PyCFunction)TimeStamp_timeTime, METH_NOARGS}, {"laterThan", (PyCFunction)TimeStamp_laterThan, METH_O}, {"raw", (PyCFunction)TimeStamp_raw, METH_NOARGS}, {NULL, NULL}, }; #define DEFERRED_ADDRESS(ADDR) 0 static PyTypeObject TimeStamp_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(NULL), 0) "persistent.TimeStamp", sizeof(TimeStamp), /* tp_basicsize */ 0, /* tp_itemsize */ (destructor)TimeStamp_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ (reprfunc)TimeStamp_repr, /* tp_repr */ 0, /* tp_as_number */ 0, /* tp_as_sequence */ 0, /* tp_as_mapping */ (hashfunc)TimeStamp_hash, /* tp_hash */ 0, /* tp_call */ (reprfunc)TimeStamp_str, /* tp_str */ 0, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_RICHCOMPARE, /* tp_flags */ 0, /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ (richcmpfunc)&TimeStamp_richcompare, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ TimeStamp_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ }; PyObject * TimeStamp_FromString(const char *buf) { /* buf must be exactly 8 characters */ TimeStamp *ts = (TimeStamp *)PyObject_New(TimeStamp, &TimeStamp_type); memcpy(ts->data, buf, 8); return (PyObject *)ts; } #define CHECK_RANGE(VAR, LO, HI) if ((VAR) < (LO) || (VAR) > (HI)) { \ return PyErr_Format(PyExc_ValueError, \ # VAR " must be between %d and %d: %d", \ (LO), (HI), (VAR)); \ } PyObject * TimeStamp_FromDate(int year, int month, int day, int hour, int min, double sec) { TimeStamp *ts = NULL; int d; unsigned int years_since_base; unsigned int months_since_base; unsigned int days_since_base; unsigned int hours_since_base; unsigned int minutes_since_base; unsigned int v; if (year < TS_BASE_YEAR) return PyErr_Format(PyExc_ValueError, "year must be greater than %d: %d", TS_BASE_YEAR, year); CHECK_RANGE(month, 1, 12); d = days_in_month(year, month - 1); if (day < 1 || day > d) return PyErr_Format(PyExc_ValueError, "day must be between 1 and %d: %d", d, day); CHECK_RANGE(hour, 0, 23); CHECK_RANGE(min, 0, 59); /* Seconds are allowed to be anything, so chill If we did want to be pickly, 60 would be a better choice. if (sec < 0 || sec > 59) return PyErr_Format(PyExc_ValueError, "second must be between 0 and 59: %f", sec); */ ts = (TimeStamp *)PyObject_New(TimeStamp, &TimeStamp_type); /* months come in 1-based, hours and minutes come in 0-based */ /* The base time is Jan 1, 00:00 of TS_BASE_YEAR */ years_since_base = year - TS_BASE_YEAR; months_since_base = years_since_base * TS_MONTHS_PER_YEAR + (month - 1); days_since_base = months_since_base * TS_DAYS_PER_MONTH + (day - 1); hours_since_base = days_since_base * 24 + hour; minutes_since_base = hours_since_base * 60 + min; TS_PACK_UINT32_INTO_BYTES(minutes_since_base, ts->data); sec /= TS_SECOND_BYTES_BIAS; v = (unsigned int)sec; TS_PACK_UINT32_INTO_BYTES(v, ts->data + 4); return (PyObject *)ts; } PyObject * TimeStamp_TimeStamp(PyObject *obj, PyObject *args) { char *buf = NULL; Py_ssize_t len = 0; int y, mo, d, h = 0, m = 0; double sec = 0; #ifdef PY3K if (PyArg_ParseTuple(args, "y#", &buf, &len)) #else if (PyArg_ParseTuple(args, "s#", &buf, &len)) #endif { if (len != 8) { PyErr_SetString(PyExc_ValueError, "8-byte array expected"); return NULL; } return TimeStamp_FromString(buf); } PyErr_Clear(); if (!PyArg_ParseTuple(args, "iii|iid", &y, &mo, &d, &h, &m, &sec)) return NULL; return TimeStamp_FromDate(y, mo, d, h, m, sec); } static PyMethodDef TimeStampModule_functions[] = { {"TimeStamp", TimeStamp_TimeStamp, METH_VARARGS}, {NULL, NULL}, }; #ifdef PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "_timestamp", /* m_name */ TimeStampModule_doc, /* m_doc */ -1, /* m_size */ TimeStampModule_functions, /* m_methods */ NULL, /* m_reload */ NULL, /* m_traverse */ NULL, /* m_clear */ NULL, /* m_free */ }; #endif static PyObject* module_init(void) { PyObject *module; if (TimeStamp_init_gmoff() < 0) return NULL; #ifdef PY3K module = PyModule_Create(&moduledef); #else module = Py_InitModule4("_timestamp", TimeStampModule_functions, TimeStampModule_doc, NULL, PYTHON_API_VERSION); #endif if (module == NULL) return NULL; #ifdef PY3K ((PyObject*)&TimeStamp_type)->ob_type = &PyType_Type; #else TimeStamp_type.ob_type = &PyType_Type; #endif TimeStamp_type.tp_getattro = PyObject_GenericGetAttr; return module; } #ifdef PY3K PyMODINIT_FUNC PyInit__timestamp(void) { return module_init(); } #else PyMODINIT_FUNC init_timestamp(void) { module_init(); } #endif ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/cPersistence.c0000644000076500000240000013714700000000000021463 0ustar00jmaddenstaff00000000000000/***************************************************************************** Copyright (c) 2001, 2002 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ static char cPersistence_doc_string[] = "Defines Persistent mixin class for persistent objects.\n" "\n" "$Id$\n"; #define PY_SSIZE_T_CLEAN #include "cPersistence.h" #include "structmember.h" struct ccobject_head_struct { CACHE_HEAD }; /* The compiler on Windows used for Python 2.7 doesn't include stdint.h. */ #if !defined(PY3K) && defined(_WIN32) typedef unsigned long long uint64_t; # define PRIx64 "llx" #else # include # include #endif /* These objects are initialized when the module is loaded */ static PyObject *py_simple_new; /* Strings initialized by init_strings() below. */ static PyObject *py_keys, *py_setstate, *py___dict__, *py_timeTime; static PyObject *py__p_changed, *py__p_deactivate; static PyObject *py___getattr__, *py___setattr__, *py___delattr__; static PyObject *py___slotnames__, *copy_reg_slotnames, *__newobj__; static PyObject *py___getnewargs__, *py___getstate__; static PyObject *py_unsaved, *py_ghost, *py_saved, *py_changed, *py_sticky; static int init_strings(void) { #define INIT_STRING(S) \ if (!(py_ ## S = INTERN(#S))) \ return -1; INIT_STRING(keys); INIT_STRING(setstate); INIT_STRING(timeTime); INIT_STRING(__dict__); INIT_STRING(_p_changed); INIT_STRING(_p_deactivate); INIT_STRING(__getattr__); INIT_STRING(__setattr__); INIT_STRING(__delattr__); INIT_STRING(__slotnames__); INIT_STRING(__getnewargs__); INIT_STRING(__getstate__); INIT_STRING(unsaved); INIT_STRING(ghost); INIT_STRING(saved); INIT_STRING(changed); INIT_STRING(sticky); #undef INIT_STRING return 0; } #ifdef Py_DEBUG static void fatal_1350(cPersistentObject *self, const char *caller, const char *detail) { char buf[1000]; PyOS_snprintf(buf, sizeof(buf), "cPersistence.c %s(): object at %p with type %.200s\n" "%s.\n" "The only known cause is multiple threads trying to ghost and\n" "unghost the object simultaneously.\n" "That's not legal, but ZODB can't stop it.\n" "See Collector #1350.\n", caller, self, Py_TYPE(self)->tp_name, detail); Py_FatalError(buf); } #endif static void ghostify(cPersistentObject*); static PyObject * pickle_slotnames(PyTypeObject *cls); static PyObject * convert_name(PyObject *name); /* Load the state of the object, unghostifying it. Upon success, return 1. * If an error occurred, re-ghostify the object and return -1. */ static int unghostify(cPersistentObject *self) { if (self->state < 0 && self->jar) { PyObject *r; /* Is it ever possible to not have a cache? */ if (self->cache) { /* Create a node in the ring for this unghostified object. */ self->cache->non_ghost_count++; self->cache->total_estimated_size += _estimated_size_in_bytes(self->estimated_size); ring_add(&self->cache->ring_home, &self->ring); Py_INCREF(self); } /* set state to CHANGED while setstate() call is in progress to prevent a recursive call to _PyPersist_Load(). */ self->state = cPersistent_CHANGED_STATE; /* Call the object's __setstate__() */ r = PyObject_CallMethod(self->jar, "setstate", "O", (PyObject *)self); if (r == NULL) { ghostify(self); return -1; } self->state = cPersistent_UPTODATE_STATE; Py_DECREF(r); if (self->cache && self->ring.r_next == NULL) { #ifdef Py_DEBUG fatal_1350(self, "unghostify", "is not in the cache despite that we just " "unghostified it"); #else PyErr_Format(PyExc_SystemError, "object at %p with type " "%.200s not in the cache despite that we just " "unghostified it", self, Py_TYPE(self)->tp_name); return -1; #endif } } return 1; } /****************************************************************************/ static PyTypeObject Pertype; static void accessed(cPersistentObject *self) { /* Do nothing unless the object is in a cache and not a ghost. */ if (self->cache && self->state >= 0 && self->ring.r_next) ring_move_to_head(&self->cache->ring_home, &self->ring); } static void ghostify(cPersistentObject *self) { PyObject **dictptr, *slotnames; PyObject *errtype, *errvalue, *errtb; /* are we already a ghost? */ if (self->state == cPersistent_GHOST_STATE) return; /* Is it ever possible to not have a cache? */ if (self->cache == NULL) { self->state = cPersistent_GHOST_STATE; return; } if (self->ring.r_next == NULL) { /* There's no way to raise an error in this routine. */ #ifdef Py_DEBUG fatal_1350(self, "ghostify", "claims to be in a cache but isn't"); #else return; #endif } /* If we're ghostifying an object, we better have some non-ghosts. */ assert(self->cache->non_ghost_count > 0); self->cache->non_ghost_count--; self->cache->total_estimated_size -= _estimated_size_in_bytes(self->estimated_size); ring_del(&self->ring); self->state = cPersistent_GHOST_STATE; /* clear __dict__ */ dictptr = _PyObject_GetDictPtr((PyObject *)self); if (dictptr && *dictptr) { Py_DECREF(*dictptr); *dictptr = NULL; } /* clear all slots besides _p_* * ( for backward-compatibility reason we do this only if class does not * override __new__ ) */ if (Py_TYPE(self)->tp_new == Pertype.tp_new) { /* later we might clear an AttributeError but * if we have a pending exception that still needs to be * raised so that we don't generate a SystemError. */ PyErr_Fetch(&errtype, &errvalue, &errtb); slotnames = pickle_slotnames(Py_TYPE(self)); if (slotnames && slotnames != Py_None) { int i; for (i = 0; i < PyList_GET_SIZE(slotnames); i++) { PyObject *name; char *cname; int is_special; name = PyList_GET_ITEM(slotnames, i); #ifdef PY3K if (PyUnicode_Check(name)) { PyObject *converted = convert_name(name); cname = PyBytes_AS_STRING(converted); #else if (PyBytes_Check(name)) { cname = PyBytes_AS_STRING(name); #endif is_special = !strncmp(cname, "_p_", 3); #ifdef PY3K Py_DECREF(converted); #endif if (is_special) /* skip persistent */ { continue; } } /* NOTE: this skips our delattr hook */ if (PyObject_GenericSetAttr((PyObject *)self, name, NULL) < 0) /* delattr of non-set slot will raise AttributeError - we * simply ignore. */ PyErr_Clear(); } } Py_XDECREF(slotnames); PyErr_Restore(errtype, errvalue, errtb); } /* We remove the reference to the just ghosted object that the ring * holds. Note that the dictionary of oids->objects has an uncounted * reference, so if the ring's reference was the only one, this frees * the ghost object. Note further that the object's dealloc knows to * inform the dictionary that it is going away. */ Py_DECREF(self); } static int changed(cPersistentObject *self) { if ((self->state == cPersistent_UPTODATE_STATE || self->state == cPersistent_STICKY_STATE) && self->jar) { PyObject *meth, *arg, *result; static PyObject *s_register; if (s_register == NULL) s_register = INTERN("register"); meth = PyObject_GetAttr((PyObject *)self->jar, s_register); if (meth == NULL) return -1; arg = PyTuple_New(1); if (arg == NULL) { Py_DECREF(meth); return -1; } Py_INCREF(self); PyTuple_SET_ITEM(arg, 0, (PyObject *)self); result = PyObject_CallObject(meth, arg); Py_DECREF(arg); Py_DECREF(meth); if (result == NULL) return -1; Py_DECREF(result); self->state = cPersistent_CHANGED_STATE; } return 0; } static int readCurrent(cPersistentObject *self) { if ((self->state == cPersistent_UPTODATE_STATE || self->state == cPersistent_STICKY_STATE) && self->jar && self->oid) { static PyObject *s_readCurrent=NULL; PyObject *r; if (s_readCurrent == NULL) s_readCurrent = INTERN("readCurrent"); r = PyObject_CallMethodObjArgs(self->jar, s_readCurrent, self, NULL); if (r == NULL) return -1; Py_DECREF(r); } return 0; } static PyObject * Per__p_deactivate(cPersistentObject *self) { if (self->state == cPersistent_UPTODATE_STATE && self->jar) { PyObject **dictptr = _PyObject_GetDictPtr((PyObject *)self); if (dictptr && *dictptr) { Py_DECREF(*dictptr); *dictptr = NULL; } /* Note that we need to set to ghost state unless we are called directly. Methods that override this need to do the same! */ ghostify(self); if (PyErr_Occurred()) return NULL; } Py_INCREF(Py_None); return Py_None; } static PyObject * Per__p_activate(cPersistentObject *self) { if (unghostify(self) < 0) return NULL; Py_INCREF(Py_None); return Py_None; } static int Per_set_changed(cPersistentObject *self, PyObject *v); static PyObject * Per__p_invalidate(cPersistentObject *self) { signed char old_state = self->state; if (old_state != cPersistent_GHOST_STATE) { if (Per_set_changed(self, NULL) < 0) return NULL; ghostify(self); if (PyErr_Occurred()) return NULL; } Py_INCREF(Py_None); return Py_None; } static PyObject * pickle_slotnames(PyTypeObject *cls) { PyObject *slotnames; slotnames = PyDict_GetItem(cls->tp_dict, py___slotnames__); if (slotnames) { int n = PyObject_Not(slotnames); if (n < 0) return NULL; if (n) slotnames = Py_None; Py_INCREF(slotnames); return slotnames; } slotnames = PyObject_CallFunctionObjArgs(copy_reg_slotnames, (PyObject*)cls, NULL); if (slotnames && !(slotnames == Py_None || PyList_Check(slotnames))) { PyErr_SetString(PyExc_TypeError, "copy_reg._slotnames didn't return a list or None"); Py_DECREF(slotnames); return NULL; } return slotnames; } static PyObject * pickle_copy_dict(PyObject *state) { PyObject *copy, *key, *value; char *ckey; Py_ssize_t pos = 0; copy = PyDict_New(); if (!copy) return NULL; if (!state) return copy; while (PyDict_Next(state, &pos, &key, &value)) { int is_special; #ifdef PY3K if (key && PyUnicode_Check(key)) { PyObject *converted = convert_name(key); ckey = PyBytes_AS_STRING(converted); #else if (key && PyBytes_Check(key)) { ckey = PyBytes_AS_STRING(key); #endif is_special = (*ckey == '_' && (ckey[1] == 'v' || ckey[1] == 'p') && ckey[2] == '_'); #ifdef PY3K Py_DECREF(converted); #endif if (is_special) /* skip volatile and persistent */ continue; } if (PyObject_SetItem(copy, key, value) < 0) goto err; } return copy; err: Py_DECREF(copy); return NULL; } static char pickle___getstate__doc[] = "Get the object serialization state\n" "\n" "If the object has no assigned slots and has no instance dictionary, then \n" "None is returned.\n" "\n" "If the object has no assigned slots and has an instance dictionary, then \n" "the a copy of the instance dictionary is returned. The copy has any items \n" "with names starting with '_v_' or '_p_' ommitted.\n" "\n" "If the object has assigned slots, then a two-element tuple is returned. \n" "The first element is either None or a copy of the instance dictionary, \n" "as described above. The second element is a dictionary with items \n" "for each of the assigned slots.\n" ; static PyObject * pickle___getstate__(PyObject *self) { PyObject *slotnames=NULL, *slots=NULL, *state=NULL; PyObject **dictp; int n=0; slotnames = pickle_slotnames(Py_TYPE(self)); if (!slotnames) return NULL; dictp = _PyObject_GetDictPtr(self); if (dictp) state = pickle_copy_dict(*dictp); else { state = Py_None; Py_INCREF(state); } if (slotnames != Py_None) { int i; slots = PyDict_New(); if (!slots) goto end; for (i = 0; i < PyList_GET_SIZE(slotnames); i++) { PyObject *name, *value; char *cname; int is_special; name = PyList_GET_ITEM(slotnames, i); #ifdef PY3K if (PyUnicode_Check(name)) { PyObject *converted = convert_name(name); cname = PyBytes_AS_STRING(converted); #else if (PyBytes_Check(name)) { cname = PyBytes_AS_STRING(name); #endif is_special = (*cname == '_' && (cname[1] == 'v' || cname[1] == 'p') && cname[2] == '_'); #ifdef PY3K Py_DECREF(converted); #endif if (is_special) /* skip volatile and persistent */ { continue; } } /* Unclear: Will this go through our getattr hook? */ value = PyObject_GetAttr(self, name); if (value == NULL) PyErr_Clear(); else { int err = PyDict_SetItem(slots, name, value); Py_DECREF(value); if (err < 0) goto end; n++; } } } if (n) state = Py_BuildValue("(NO)", state, slots); end: Py_XDECREF(slotnames); Py_XDECREF(slots); return state; } static int pickle_setattrs_from_dict(PyObject *self, PyObject *dict) { PyObject *key, *value; Py_ssize_t pos = 0; if (!PyDict_Check(dict)) { PyErr_SetString(PyExc_TypeError, "Expected dictionary"); return -1; } while (PyDict_Next(dict, &pos, &key, &value)) { if (PyObject_SetAttr(self, key, value) < 0) return -1; } return 0; } static char pickle___setstate__doc[] = "Set the object serialization state\n\n" "The state should be in one of 3 forms:\n\n" "- None\n\n" " Ignored\n\n" "- A dictionary\n\n" " In this case, the object's instance dictionary will be cleared and \n" " updated with the new state.\n\n" "- A two-tuple with a string as the first element. \n\n" " In this case, the method named by the string in the first element will\n" " be called with the second element.\n\n" " This form supports migration of data formats.\n\n" "- A two-tuple with None or a Dictionary as the first element and\n" " with a dictionary as the second element.\n\n" " If the first element is not None, then the object's instance dictionary \n" " will be cleared and updated with the value.\n\n" " The items in the second element will be assigned as attributes.\n" ; static PyObject * pickle___setstate__(PyObject *self, PyObject *state) { PyObject *slots=NULL; if (PyTuple_Check(state)) { if (!PyArg_ParseTuple(state, "OO:__setstate__", &state, &slots)) return NULL; } if (state != Py_None) { PyObject **dict; PyObject *items; PyObject *d_key, *d_value; Py_ssize_t i; int len; dict = _PyObject_GetDictPtr(self); if (!dict) { PyErr_SetString(PyExc_TypeError, "this object has no instance dictionary"); return NULL; } if (!*dict) { *dict = PyDict_New(); if (!*dict) return NULL; } PyDict_Clear(*dict); if (PyDict_CheckExact(state)) { i = 0; while (PyDict_Next(state, &i, &d_key, &d_value)) { /* normally the keys for instance attributes are interned. we should try to do that here. */ if (NATIVE_CHECK_EXACT(d_key)) { Py_INCREF(d_key); INTERN_INPLACE(&d_key); Py_DECREF(d_key); } if (PyObject_SetItem(*dict, d_key, d_value) < 0) return NULL; } } else { /* can happen that not a built-in dict is passed as state fall back to iterating over items, instead of silently failing with PyDict_Next */ items = PyMapping_Items(state); if (items == NULL) return NULL; len = PySequence_Size(items); if (len < 0) { Py_DECREF(items); return NULL; } for ( i=0; istate >= 0) { /* If the cache has been cleared, then a non-ghost object isn't in the ring any longer. */ if (self->ring.r_next != NULL) { /* if we're ghostifying an object, we better have some non-ghosts */ assert(self->cache->non_ghost_count > 0); self->cache->non_ghost_count--; self->cache->total_estimated_size -= _estimated_size_in_bytes(self->estimated_size); ring_del(&self->ring); } } if (self->cache) cPersistenceCAPI->percachedel(self->cache, self->oid); Py_XDECREF(self->cache); Py_XDECREF(self->jar); Py_XDECREF(self->oid); Py_TYPE(self)->tp_free(self); } static int Per_traverse(cPersistentObject *self, visitproc visit, void *arg) { int err; #define VISIT(SLOT) \ if (SLOT) { \ err = visit((PyObject *)(SLOT), arg); \ if (err) \ return err; \ } VISIT(self->jar); VISIT(self->oid); VISIT(self->cache); #undef VISIT return 0; } /* convert_name() returns a new reference to a string name or sets an exception and returns NULL. */ static PyObject * convert_name(PyObject *name) { #ifdef Py_USING_UNICODE /* The Unicode to string conversion is done here because the existing tp_setattro slots expect a string object as name and we wouldn't want to break those. */ if (PyUnicode_Check(name)) { name = PyUnicode_AsEncodedString(name, NULL, NULL); } else #endif if (!PyBytes_Check(name)) { PyErr_SetString(PyExc_TypeError, "attribute name must be a string"); return NULL; } else Py_INCREF(name); return name; } /* Returns true if the object requires unghostification. There are several special attributes that we allow access to without requiring that the object be unghostified: __class__ __del__ __dict__ __of__ __setstate__ */ static int unghost_getattr(const char *s) { if (*s++ != '_') return 1; if (*s == 'p') { s++; if (*s == '_') return 0; /* _p_ */ else return 1; } else if (*s == '_') { s++; switch (*s) { case 'c': return strcmp(s, "class__"); case 'd': s++; if (!strcmp(s, "el__")) return 0; /* __del__ */ if (!strcmp(s, "ict__")) return 0; /* __dict__ */ return 1; case 'o': return strcmp(s, "of__"); case 's': return strcmp(s, "setstate__"); default: return 1; } } return 1; } static PyObject* Per_getattro(cPersistentObject *self, PyObject *name) { PyObject *result = NULL; /* guilty until proved innocent */ PyObject *converted; char *s; converted = convert_name(name); if (!converted) goto Done; s = PyBytes_AS_STRING(converted); if (unghost_getattr(s)) { if (unghostify(self) < 0) goto Done; accessed(self); } result = PyObject_GenericGetAttr((PyObject *)self, name); Done: Py_XDECREF(converted); return result; } /* Exposed as _p_getattr method. Test whether base getattr should be used */ static PyObject * Per__p_getattr(cPersistentObject *self, PyObject *name) { PyObject *result = NULL; /* guilty until proved innocent */ PyObject *converted; char *s; converted = convert_name(name); if (!converted) goto Done; s = PyBytes_AS_STRING(converted); if (*s != '_' || unghost_getattr(s)) { if (unghostify(self) < 0) goto Done; accessed(self); result = Py_False; } else result = Py_True; Py_INCREF(result); Done: Py_XDECREF(converted); return result; } /* TODO: we should probably not allow assignment of __class__ and __dict__. */ static int Per_setattro(cPersistentObject *self, PyObject *name, PyObject *v) { int result = -1; /* guilty until proved innocent */ PyObject *converted; char *s; converted = convert_name(name); if (!converted) goto Done; s = PyBytes_AS_STRING(converted); if (strncmp(s, "_p_", 3) != 0) { if (unghostify(self) < 0) goto Done; accessed(self); if (strncmp(s, "_v_", 3) != 0 && self->state != cPersistent_CHANGED_STATE) { if (changed(self) < 0) goto Done; } } result = PyObject_GenericSetAttr((PyObject *)self, name, v); Done: Py_XDECREF(converted); return result; } static int Per_p_set_or_delattro(cPersistentObject *self, PyObject *name, PyObject *v) { int result = -1; /* guilty until proved innocent */ PyObject *converted; char *s; converted = convert_name(name); if (!converted) goto Done; s = PyBytes_AS_STRING(converted); if (strncmp(s, "_p_", 3)) { if (unghostify(self) < 0) goto Done; accessed(self); result = 0; } else { if (PyObject_GenericSetAttr((PyObject *)self, name, v) < 0) goto Done; result = 1; } Done: Py_XDECREF(converted); return result; } static PyObject * Per__p_setattr(cPersistentObject *self, PyObject *args) { PyObject *name, *v, *result; int r; if (!PyArg_ParseTuple(args, "OO:_p_setattr", &name, &v)) return NULL; r = Per_p_set_or_delattro(self, name, v); if (r < 0) return NULL; result = r ? Py_True : Py_False; Py_INCREF(result); return result; } static PyObject * Per__p_delattr(cPersistentObject *self, PyObject *name) { int r; PyObject *result; r = Per_p_set_or_delattro(self, name, NULL); if (r < 0) return NULL; result = r ? Py_True : Py_False; Py_INCREF(result); return result; } static PyObject * Per_get_changed(cPersistentObject *self) { if (self->state < 0) { Py_INCREF(Py_None); return Py_None; } return PyBool_FromLong(self->state == cPersistent_CHANGED_STATE); } static int Per_set_changed(cPersistentObject *self, PyObject *v) { int deactivate = 0; int true; if (!v) { /* delattr is used to invalidate an object even if it has changed. */ if (self->state != cPersistent_GHOST_STATE) self->state = cPersistent_UPTODATE_STATE; deactivate = 1; } else if (v == Py_None) deactivate = 1; if (deactivate) { PyObject *res, *meth; meth = PyObject_GetAttr((PyObject *)self, py__p_deactivate); if (meth == NULL) return -1; res = PyObject_CallObject(meth, NULL); if (res) Py_DECREF(res); else { /* an error occured in _p_deactivate(). It's not clear what we should do here. The code is obviously ignoring the exception, but it shouldn't return 0 for a getattr and set an exception. The simplest change is to clear the exception, but that simply masks the error. This prints an error to stderr just like exceptions in __del__(). It would probably be better to log it but that would be painful from C. */ PyErr_WriteUnraisable(meth); } Py_DECREF(meth); return 0; } /* !deactivate. If passed a true argument, mark self as changed (starting * with ZODB 3.6, that includes activating the object if it's a ghost). * If passed a false argument, and the object isn't a ghost, set the * state as up-to-date. */ true = PyObject_IsTrue(v); if (true == -1) return -1; if (true) { if (self->state < 0) { if (unghostify(self) < 0) return -1; } return changed(self); } /* We were passed a false, non-None argument. If we're not a ghost, * mark self as up-to-date. */ if (self->state >= 0) self->state = cPersistent_UPTODATE_STATE; return 0; } static PyObject * Per_get_oid(cPersistentObject *self) { PyObject *oid = self->oid ? self->oid : Py_None; Py_INCREF(oid); return oid; } static int Per_set_oid(cPersistentObject *self, PyObject *v) { if (self->cache) { int result; if (v == NULL) { PyErr_SetString(PyExc_ValueError, "can't delete _p_oid of cached object"); return -1; } result = PyObject_RichCompareBool(self->oid, v, Py_NE); if (result < 0) return -1; if (result) { PyErr_SetString(PyExc_ValueError, "can not change _p_oid of cached object"); return -1; } } Py_XDECREF(self->oid); Py_XINCREF(v); self->oid = v; return 0; } static PyObject * Per_get_jar(cPersistentObject *self) { PyObject *jar = self->jar ? self->jar : Py_None; Py_INCREF(jar); return jar; } static int Per_set_jar(cPersistentObject *self, PyObject *v) { if (self->cache) { int result; if (v == NULL) { PyErr_SetString(PyExc_ValueError, "can't delete _p_jar of cached object"); return -1; } result = PyObject_RichCompareBool(self->jar, v, Py_NE); if (result < 0) return -1; if (result) { PyErr_SetString(PyExc_ValueError, "can not change _p_jar of cached object"); return -1; } } Py_XDECREF(self->jar); Py_XINCREF(v); self->jar = v; return 0; } static PyObject * Per_get_serial(cPersistentObject *self) { return PyBytes_FromStringAndSize(self->serial, 8); } static int Per_set_serial(cPersistentObject *self, PyObject *v) { if (v) { if (PyBytes_Check(v) && PyBytes_GET_SIZE(v) == 8) memcpy(self->serial, PyBytes_AS_STRING(v), 8); else { PyErr_SetString(PyExc_ValueError, "_p_serial must be an 8-character bytes array"); return -1; } } else memset(self->serial, 0, 8); return 0; } static PyObject * Per_get_mtime(cPersistentObject *self) { static PyObject* TimeStamp; PyObject *t, *v; if (unghostify(self) < 0) return NULL; accessed(self); if (memcmp(self->serial, "\0\0\0\0\0\0\0\0", 8) == 0) { Py_INCREF(Py_None); return Py_None; } if (!TimeStamp) { PyObject* ts_module; ts_module = PyImport_ImportModule("persistent._timestamp"); if (!ts_module) return NULL; TimeStamp = PyObject_GetAttrString(ts_module, "TimeStamp"); Py_DECREF(ts_module); if (!TimeStamp) return NULL; } #ifdef PY3K t = PyObject_CallFunction(TimeStamp, "y#", self->serial, (Py_ssize_t)8); #else t = PyObject_CallFunction(TimeStamp, "s#", self->serial, (Py_ssize_t)8); #endif if (!t) { return NULL; } v = PyObject_CallMethod(t, "timeTime", ""); Py_DECREF(t); return v; } static PyObject * Per_get_state(cPersistentObject *self) { return INT_FROM_LONG(self->state); } static PyObject * Per_get_estimated_size(cPersistentObject *self) { return INT_FROM_LONG(_estimated_size_in_bytes(self->estimated_size)); } static int Per_set_estimated_size(cPersistentObject *self, PyObject *v) { if (v) { if (INT_CHECK(v)) { long lv = INT_AS_LONG(v); if (lv < 0) { PyErr_SetString(PyExc_ValueError, "_p_estimated_size must not be negative"); return -1; } self->estimated_size = _estimated_size_in_24_bits(lv); } else { PyErr_SetString(PyExc_TypeError, "_p_estimated_size must be an integer"); return -1; } } else self->estimated_size = 0; return 0; } static PyObject * Per_get_status(cPersistentObject *self) { PyObject *result = NULL; if (!self->jar) { result = py_unsaved; } else { switch (self->state) { case cPersistent_GHOST_STATE: result = py_ghost; break; case cPersistent_STICKY_STATE: result = py_sticky; break; case cPersistent_UPTODATE_STATE: result = py_saved; break; case cPersistent_CHANGED_STATE: result = py_changed; break; } } if (result) { Py_INCREF(result); } return result; } static PyObject* Per_get_sticky(cPersistentObject *self) { return PyBool_FromLong(self->state == cPersistent_STICKY_STATE); } static int Per_set_sticky(cPersistentObject *self, PyObject* value) { if (self->state < 0) { PyErr_SetString(PyExc_ValueError, "can't set sticky flag on a ghost"); return -1; } if (self->jar) { if (PyObject_IsTrue(value)) { self->state = cPersistent_STICKY_STATE; } else { self->state = cPersistent_UPTODATE_STATE; } } return 0; } static PyObject* repr_format_exception(char* format) { /* If an exception we should catch occurred, return a new string of its repr. Otherwise, return NULL. */ PyObject *exc_t; PyObject *exc_v; PyObject *exc_tb; PyObject *result = NULL; if (PyErr_Occurred() && PyErr_ExceptionMatches(PyExc_Exception)) { PyErr_Fetch(&exc_t, &exc_v, &exc_tb); PyErr_NormalizeException(&exc_t, &exc_v, &exc_tb); PyErr_Clear(); result = PyUnicode_FromFormat(format, exc_v); Py_DECREF(exc_t); Py_DECREF(exc_v); Py_DECREF(exc_tb); } return result; } static PyObject* repr_helper(PyObject *o, char* format) { /* Returns a new reference, or NULL on error */ PyObject *result; if (o) { result = PyUnicode_FromFormat(format, o); if (!result) result = repr_format_exception(format); } else { result = PyUnicode_FromString(""); } return result; } static PyObject* Per_repr(cPersistentObject *self) { PyObject *prepr = NULL; PyObject *prepr_exc_str = NULL; PyObject *module = NULL; PyObject *name = NULL; PyObject *oid_str = NULL; PyObject *jar_str = NULL; PyObject *result = NULL; unsigned char* oid_bytes; char buf[20]; uint64_t oid_value; prepr = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "_p_repr"); if (prepr) { result = PyObject_CallFunctionObjArgs(prepr, self, NULL); if (result) goto cleanup; else { prepr_exc_str = repr_format_exception(" _p_repr %R"); if (!prepr_exc_str) goto cleanup; } } else { PyErr_Clear(); prepr_exc_str = PyUnicode_FromString(""); } if (self->oid && PyBytes_Check(self->oid) && PyBytes_GET_SIZE(self->oid) == 8) { oid_bytes = (unsigned char*)PyBytes_AS_STRING(self->oid); oid_value = ((uint64_t)oid_bytes[0] << 56) | ((uint64_t)oid_bytes[1] << 48) | ((uint64_t)oid_bytes[2] << 40) | ((uint64_t)oid_bytes[3] << 32) | ((uint64_t)oid_bytes[4] << 24) | ((uint64_t)oid_bytes[5] << 16) | ((uint64_t)oid_bytes[6] << 8) | ((uint64_t)oid_bytes[7]); /* Python's PyUnicode_FromFormat doesn't understand the ll length modifier for %x, so to format a 64-bit value we need to use stdio. */ snprintf(buf, sizeof(buf) - 1, "%" PRIx64, oid_value); oid_str = PyUnicode_FromFormat(" oid 0x%s", buf); } if (!oid_str) { oid_str = repr_helper(self->oid, " oid %R"); if (!oid_str) goto cleanup; } jar_str = repr_helper(self->jar, " in %R"); if (!jar_str) goto cleanup; module = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "__module__"); name = PyObject_GetAttrString((PyObject*)Py_TYPE(self), "__name__"); if (!module || !name) { /* Some error retrieving __module__ or __name__. Ignore it, use the C data. */ PyErr_Clear(); result = PyUnicode_FromFormat("<%s object at %p%S%S%S>", Py_TYPE(self)->tp_name, self, oid_str, jar_str, prepr_exc_str); } else { result = PyUnicode_FromFormat("<%S.%S object at %p%S%S%S>", module, name, self, oid_str, jar_str, prepr_exc_str); } cleanup: Py_XDECREF(prepr); Py_XDECREF(prepr_exc_str); Py_XDECREF(oid_str); Py_XDECREF(jar_str); Py_XDECREF(name); Py_XDECREF(module); return result; } static PyGetSetDef Per_getsets[] = { {"_p_changed", (getter)Per_get_changed, (setter)Per_set_changed}, {"_p_jar", (getter)Per_get_jar, (setter)Per_set_jar}, {"_p_mtime", (getter)Per_get_mtime}, {"_p_oid", (getter)Per_get_oid, (setter)Per_set_oid}, {"_p_serial", (getter)Per_get_serial, (setter)Per_set_serial}, {"_p_state", (getter)Per_get_state}, {"_p_estimated_size", (getter)Per_get_estimated_size, (setter)Per_set_estimated_size }, {"_p_status", (getter)Per_get_status}, {"_p_sticky", (getter)Per_get_sticky, (setter)Per_set_sticky}, {NULL} }; static struct PyMethodDef Per_methods[] = { {"_p_deactivate", (PyCFunction)Per__p_deactivate, METH_NOARGS, "_p_deactivate() -- Deactivate the object"}, {"_p_activate", (PyCFunction)Per__p_activate, METH_NOARGS, "_p_activate() -- Activate the object"}, {"_p_invalidate", (PyCFunction)Per__p_invalidate, METH_NOARGS, "_p_invalidate() -- Invalidate the object"}, {"_p_getattr", (PyCFunction)Per__p_getattr, METH_O, "_p_getattr(name) -- Test whether the base class must handle the name\n" "\n" "The method unghostifies the object, if necessary.\n" "The method records the object access, if necessary.\n" "\n" "This method should be called by subclass __getattribute__\n" "implementations before doing anything else. If the method\n" "returns True, then __getattribute__ implementations must delegate\n" "to the base class, Persistent.\n" }, {"_p_setattr", (PyCFunction)Per__p_setattr, METH_VARARGS, "_p_setattr(name, value) -- Save persistent meta data\n" "\n" "This method should be called by subclass __setattr__ implementations\n" "before doing anything else. If it returns true, then the attribute\n" "was handled by the base class.\n" "\n" "The method unghostifies the object, if necessary.\n" "The method records the object access, if necessary.\n" }, {"_p_delattr", (PyCFunction)Per__p_delattr, METH_O, "_p_delattr(name) -- Delete persistent meta data\n" "\n" "This method should be called by subclass __delattr__ implementations\n" "before doing anything else. If it returns true, then the attribute\n" "was handled by the base class.\n" "\n" "The method unghostifies the object, if necessary.\n" "The method records the object access, if necessary.\n" }, {"__getstate__", (PyCFunction)Per__getstate__, METH_NOARGS, pickle___getstate__doc }, {"__setstate__", (PyCFunction)pickle___setstate__, METH_O, pickle___setstate__doc}, {"__reduce__", (PyCFunction)pickle___reduce__, METH_NOARGS, pickle___reduce__doc}, {NULL, NULL} /* sentinel */ }; /* This module is compiled as a shared library. Some compilers don't allow addresses of Python objects defined in other libraries to be used in static initializers here. The DEFERRED_ADDRESS macro is used to tag the slots where such addresses appear; the module init function must fill in the tagged slots at runtime. The argument is for documentation -- the macro ignores it. */ #define DEFERRED_ADDRESS(ADDR) 0 static PyTypeObject Pertype = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "persistent.Persistent", /* tp_name */ sizeof(cPersistentObject), /* tp_basicsize */ 0, /* tp_itemsize */ (destructor)Per_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ (reprfunc)Per_repr, /* tp_repr */ 0, /* tp_as_number */ 0, /* tp_as_sequence */ 0, /* tp_as_mapping */ 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ (getattrofunc)Per_getattro, /* tp_getattro */ (setattrofunc)Per_setattro, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ 0, /* tp_doc */ (traverseproc)Per_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ Per_methods, /* tp_methods */ 0, /* tp_members */ Per_getsets, /* tp_getset */ }; /* End of code for Persistent objects */ /* -------------------------------------------------------- */ typedef int (*intfunctionwithpythonarg)(PyObject*); /* Load the object's state if necessary and become sticky */ static int Per_setstate(cPersistentObject *self) { if (unghostify(self) < 0) return -1; self->state = cPersistent_STICKY_STATE; return 0; } static PyObject * simple_new(PyObject *self, PyObject *type_object) { if (!PyType_Check(type_object)) { PyErr_SetString(PyExc_TypeError, "simple_new argument must be a type object."); return NULL; } return PyType_GenericNew((PyTypeObject *)type_object, NULL, NULL); } static PyMethodDef cPersistence_methods[] = { {"simple_new", simple_new, METH_O, "Create an object by simply calling a class's __new__ method without " "arguments."}, {NULL, NULL} }; static cPersistenceCAPIstruct truecPersistenceCAPI = { &Pertype, (getattrofunc)Per_getattro, /*tp_getattr with object key*/ (setattrofunc)Per_setattro, /*tp_setattr with object key*/ changed, accessed, ghostify, (intfunctionwithpythonarg)Per_setstate, NULL, /* The percachedel slot is initialized in cPickleCache.c when the module is loaded. It uses a function in a different shared library. */ readCurrent }; #ifdef PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "cPersistence", /* m_name */ cPersistence_doc_string, /* m_doc */ -1, /* m_size */ cPersistence_methods, /* m_methods */ NULL, /* m_reload */ NULL, /* m_traverse */ NULL, /* m_clear */ NULL, /* m_free */ }; #endif static PyObject* module_init(void) { PyObject *module, *capi; PyObject *copy_reg; if (init_strings() < 0) return NULL; #ifdef PY3K module = PyModule_Create(&moduledef); #else module = Py_InitModule3("cPersistence", cPersistence_methods, cPersistence_doc_string); #endif #ifdef PY3K ((PyObject*)&Pertype)->ob_type = &PyType_Type; #else Pertype.ob_type = &PyType_Type; #endif Pertype.tp_new = PyType_GenericNew; if (PyType_Ready(&Pertype) < 0) return NULL; if (PyModule_AddObject(module, "Persistent", (PyObject *)&Pertype) < 0) return NULL; cPersistenceCAPI = &truecPersistenceCAPI; #ifdef PY3K capi = PyCapsule_New(cPersistenceCAPI, CAPI_CAPSULE_NAME, NULL); #else capi = PyCObject_FromVoidPtr(cPersistenceCAPI, NULL); #endif if (!capi) return NULL; if (PyModule_AddObject(module, "CAPI", capi) < 0) return NULL; if (PyModule_AddIntConstant(module, "GHOST", cPersistent_GHOST_STATE) < 0) return NULL; if (PyModule_AddIntConstant(module, "UPTODATE", cPersistent_UPTODATE_STATE) < 0) return NULL; if (PyModule_AddIntConstant(module, "CHANGED", cPersistent_CHANGED_STATE) < 0) return NULL; if (PyModule_AddIntConstant(module, "STICKY", cPersistent_STICKY_STATE) < 0) return NULL; py_simple_new = PyObject_GetAttrString(module, "simple_new"); if (!py_simple_new) return NULL; #ifdef PY3K copy_reg = PyImport_ImportModule("copyreg"); #else copy_reg = PyImport_ImportModule("copy_reg"); #endif if (!copy_reg) return NULL; copy_reg_slotnames = PyObject_GetAttrString(copy_reg, "_slotnames"); if (!copy_reg_slotnames) { Py_DECREF(copy_reg); return NULL; } __newobj__ = PyObject_GetAttrString(copy_reg, "__newobj__"); if (!__newobj__) { Py_DECREF(copy_reg); return NULL; } return module; } #ifdef PY3K PyMODINIT_FUNC PyInit_cPersistence(void) { return module_init(); } #else PyMODINIT_FUNC initcPersistence(void) { module_init(); } #endif ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/cPersistence.h0000644000076500000240000001176100000000000021461 0ustar00jmaddenstaff00000000000000/***************************************************************************** Copyright (c) 2001, 2002 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ #ifndef CPERSISTENCE_H #define CPERSISTENCE_H #include "_compat.h" #include "bytesobject.h" #include "ring.h" #define CACHE_HEAD \ PyObject_HEAD \ CPersistentRing ring_home; \ int non_ghost_count; \ Py_ssize_t total_estimated_size; struct ccobject_head_struct; typedef struct ccobject_head_struct PerCache; /* How big is a persistent object? 12 PyGC_Head is two pointers and an int 8 PyObject_HEAD is an int and a pointer 12 jar, oid, cache pointers 8 ring struct 8 serialno 4 state + extra 4 size info (56) so far 4 dict ptr 4 weaklist ptr ------------------------- 68 only need 62, but obmalloc rounds up to multiple of eight Even a ghost requires 64 bytes. It's possible to make a persistent instance with slots and no dict, which changes the storage needed. */ #define cPersistent_HEAD \ PyObject_HEAD \ PyObject *jar; \ PyObject *oid; \ PerCache *cache; \ CPersistentRing ring; \ char serial[8]; \ signed state:8; \ unsigned estimated_size:24; /* We recently added estimated_size. We originally added it as a new unsigned long field after a signed char state field and a 3-character reserved field. This didn't work because there are packages in the wild that have their own copies of cPersistence.h that didn't see the update. To get around this, we used the reserved space by making estimated_size a 24-bit bit field in the space occupied by the old 3-character reserved field. To fit in 24 bits, we made the units of estimated_size 64-character blocks. This allows is to handle up to a GB. We should never see that, but to be paranoid, we also truncate sizes greater than 1GB. We also set the minimum size to 64 bytes. We use the _estimated_size_in_24_bits and _estimated_size_in_bytes macros both to avoid repetition and to make intent a little clearer. */ #define _estimated_size_in_24_bits(I) ((I) > 1073741696 ? 16777215 : (I)/64+1) #define _estimated_size_in_bytes(I) ((I)*64) #define cPersistent_GHOST_STATE -1 #define cPersistent_UPTODATE_STATE 0 #define cPersistent_CHANGED_STATE 1 #define cPersistent_STICKY_STATE 2 typedef struct { cPersistent_HEAD } cPersistentObject; typedef void (*percachedelfunc)(PerCache *, PyObject *); typedef struct { PyTypeObject *pertype; getattrofunc getattro; setattrofunc setattro; int (*changed)(cPersistentObject*); void (*accessed)(cPersistentObject*); void (*ghostify)(cPersistentObject*); int (*setstate)(PyObject*); percachedelfunc percachedel; int (*readCurrent)(cPersistentObject*); } cPersistenceCAPIstruct; #define cPersistenceType cPersistenceCAPI->pertype #ifndef DONT_USE_CPERSISTENCECAPI static cPersistenceCAPIstruct *cPersistenceCAPI; #endif #define cPersistanceModuleName "cPersistence" #define PER_TypeCheck(O) PyObject_TypeCheck((O), cPersistenceCAPI->pertype) #define PER_USE_OR_RETURN(O,R) {if((O)->state==cPersistent_GHOST_STATE && cPersistenceCAPI->setstate((PyObject*)(O)) < 0) return (R); else if ((O)->state==cPersistent_UPTODATE_STATE) (O)->state=cPersistent_STICKY_STATE;} #define PER_CHANGED(O) (cPersistenceCAPI->changed((cPersistentObject*)(O))) #define PER_READCURRENT(O, E) \ if (cPersistenceCAPI->readCurrent((cPersistentObject*)(O)) < 0) { E; } #define PER_GHOSTIFY(O) (cPersistenceCAPI->ghostify((cPersistentObject*)(O))) /* If the object is sticky, make it non-sticky, so that it can be ghostified. The value is not meaningful */ #define PER_ALLOW_DEACTIVATION(O) ((O)->state==cPersistent_STICKY_STATE && ((O)->state=cPersistent_UPTODATE_STATE)) #define PER_PREVENT_DEACTIVATION(O) ((O)->state==cPersistent_UPTODATE_STATE && ((O)->state=cPersistent_STICKY_STATE)) /* Make a persistent object usable from C by: - Making sure it is not a ghost - Making it sticky. IMPORTANT: If you call this and don't call PER_ALLOW_DEACTIVATION, your object will not be ghostified. PER_USE returns a 1 on success and 0 failure, where failure means error. */ #define PER_USE(O) \ (((O)->state != cPersistent_GHOST_STATE \ || (cPersistenceCAPI->setstate((PyObject*)(O)) >= 0)) \ ? (((O)->state==cPersistent_UPTODATE_STATE) \ ? ((O)->state=cPersistent_STICKY_STATE) : 1) : 0) #define PER_ACCESSED(O) (cPersistenceCAPI->accessed((cPersistentObject*)(O))) #endif ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/cPickleCache.c0000644000076500000240000012311600000000000021321 0ustar00jmaddenstaff00000000000000/***************************************************************************** Copyright (c) 2001, 2002 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ /* Objects are stored under three different regimes: Regime 1: Persistent Classes Persistent Classes are part of ZClasses. They are stored in the self->data dictionary, and are never garbage collected. The klass_items() method returns a sequence of (oid,object) tuples for every Persistent Class, which should make it possible to implement garbage collection in Python if necessary. Regime 2: Ghost Objects There is no benefit to keeping a ghost object which has no external references, therefore a weak reference scheme is used to ensure that ghost objects are removed from memory as soon as possible, when the last external reference is lost. Ghost objects are stored in the self->data dictionary. Normally a dictionary keeps a strong reference on its values, however this reference count is 'stolen'. This weak reference scheme leaves a dangling reference, in the dictionary, when the last external reference is lost. To clean up this dangling reference the persistent object dealloc function calls self->cache->_oid_unreferenced(self->oid). The cache looks up the oid in the dictionary, ensures it points to an object whose reference count is zero, then removes it from the dictionary. Before removing the object from the dictionary it must temporarily resurrect the object in much the same way that class instances are resurrected before their __del__ is called. Since ghost objects are stored under a different regime to non-ghost objects, an extra ghostify function in cPersistenceAPI replaces self->state=GHOST_STATE assignments that were common in other persistent classes (such as BTrees). Regime 3: Non-Ghost Objects Non-ghost objects are stored in two data structures: the dictionary mapping oids to objects and a doubly-linked list that encodes the order in which the objects were accessed. The dictionary reference is borrowed, as it is for ghosts. The list reference is a new reference; the list stores recently used objects, even if they are otherwise unreferenced, to avoid loading the object from the database again. The doubly-link-list nodes contain next and previous pointers linking together the cache and all non-ghost persistent objects. The node embedded in the cache is the home position. On every attribute access a non-ghost object will relink itself just behind the home position in the ring. Objects accessed least recently will eventually find themselves positioned after the home position. Occasionally other nodes are temporarily inserted in the ring as position markers. The cache contains a ring_lock flag which must be set and unset before and after doing so. Only if the flag is unset can the cache assume that all nodes are either his own home node, or nodes from persistent objects. This assumption is useful during the garbage collection process. The number of non-ghost objects is counted in self->non_ghost_count. The garbage collection process consists of traversing the ring, and deactivating (that is, turning into a ghost) every object until self->non_ghost_count is down to the target size, or until it reaches the home position again. Note that objects in the sticky or changed states are still kept in the ring, however they can not be deactivated. The garbage collection process must skip such objects, rather than deactivating them. */ static char cPickleCache_doc_string[] = "Defines the PickleCache used by ZODB Connection objects.\n" "\n" "$Id$\n"; #define DONT_USE_CPERSISTENCECAPI #include "cPersistence.h" #include "structmember.h" #include #include #undef Py_FindMethod /* Python string objects to speed lookups; set by module init. */ static PyObject *py__p_changed; static PyObject *py__p_deactivate; static PyObject *py__p_jar; static PyObject *py__p_oid; static cPersistenceCAPIstruct *cPersistenceCAPI; /* This object is the pickle cache. The CACHE_HEAD macro guarantees that layout of this struct is the same as the start of ccobject_head in cPersistence.c */ typedef struct { CACHE_HEAD int klass_count; /* count of persistent classes */ PyObject *data; /* oid -> object dict */ PyObject *jar; /* Connection object */ int cache_size; /* target number of items in cache */ Py_ssize_t cache_size_bytes; /* target total estimated size of items in cache */ /* Most of the time the ring contains only: * many nodes corresponding to persistent objects * one 'home' node from the cache. In some cases it is handy to temporarily add other types of node into the ring as placeholders. 'ring_lock' is a boolean indicating that someone has already done this. Currently this is only used by the garbage collection code. */ int ring_lock; /* 'cache_drain_resistance' controls how quickly the cache size will drop when it is smaller than the configured size. A value of zero means it will not drop below the configured size (suitable for most caches). Otherwise, it will remove cache_non_ghost_count/cache_drain_resistance items from the cache every time (suitable for rarely used caches, such as those associated with Zope versions. */ int cache_drain_resistance; } ccobject; static int cc_ass_sub(ccobject *self, PyObject *key, PyObject *v); /* ---------------------------------------------------------------- */ #define OBJECT_FROM_RING(SELF, HERE) \ ((cPersistentObject *)(((char *)here) - offsetof(cPersistentObject, ring))) /* Insert self into the ring, following after. */ static void insert_after(CPersistentRing *self, CPersistentRing *after) { assert(self != NULL); assert(after != NULL); self->r_prev = after; self->r_next = after->r_next; after->r_next->r_prev = self; after->r_next = self; } /* Remove self from the ring. */ static void unlink_from_ring(CPersistentRing *self) { assert(self != NULL); self->r_prev->r_next = self->r_next; self->r_next->r_prev = self->r_prev; } static int scan_gc_items(ccobject *self, int target, Py_ssize_t target_bytes) { /* This function must only be called with the ring lock held, because it places non-object placeholders in the ring. */ cPersistentObject *object; CPersistentRing *here; CPersistentRing before_original_home; int result = -1; /* guilty until proved innocent */ /* Scan the ring, from least to most recently used, deactivating * up-to-date objects, until we either find the ring_home again or * or we've ghosted enough objects to reach the target size. * Tricky: __getattr__ and __del__ methods can do anything, and in * particular if we ghostify an object with a __del__ method, that method * can load the object again, putting it back into the MRU part of the * ring. Waiting to find ring_home again can thus cause an infinite * loop (Collector #1208). So before_original_home records the MRU * position we start with, and we stop the scan when we reach that. */ insert_after(&before_original_home, self->ring_home.r_prev); here = self->ring_home.r_next; /* least recently used object */ /* All objects should be deactivated when the objects count parameter * (target) is zero and the size limit parameter in bytes(target_bytes) * is also zero. * * Otherwise the objects should be collect while one of the following * conditions are True: * - the ghost count is bigger than the number of objects limit(target). * - the estimated size in bytes is bigger than the size limit in * bytes(target_bytes). */ while (here != &before_original_home && ( (!target && !target_bytes) || ( (target && self->non_ghost_count > target) || (target_bytes && self->total_estimated_size > target_bytes) ) ) ) { assert(self->ring_lock); assert(here != &self->ring_home); /* At this point we know that the ring only contains nodes from persistent objects, plus our own home node. We know this because the ring lock is held. We can safely assume the current ring node is a persistent object now we know it is not the home */ object = OBJECT_FROM_RING(self, here); if (object->state == cPersistent_UPTODATE_STATE) { CPersistentRing placeholder; PyObject *method; PyObject *temp; int error_occurred = 0; /* deactivate it. This is the main memory saver. */ /* Add a placeholder, a dummy node in the ring. We need to do this to mark our position in the ring. It is possible that the PyObject_GetAttr() call below will invoke a __getattr__() hook in Python. Also possible that deactivation will lead to a __del__ method call. So another thread might run, and mutate the ring as a side effect of object accesses. There's no predicting then where in the ring here->next will point after that. The placeholder won't move as a side effect of calling Python code. */ insert_after(&placeholder, here); method = PyObject_GetAttr((PyObject *)object, py__p_deactivate); if (method == NULL) error_occurred = 1; else { temp = PyObject_CallObject(method, NULL); Py_DECREF(method); if (temp == NULL) error_occurred = 1; else Py_DECREF(temp); } here = placeholder.r_next; unlink_from_ring(&placeholder); if (error_occurred) goto Done; } else here = here->r_next; } result = 0; Done: unlink_from_ring(&before_original_home); return result; } static PyObject * lockgc(ccobject *self, int target_size, Py_ssize_t target_size_bytes) { /* This is thread-safe because of the GIL, and there's nothing * in between checking the ring_lock and acquiring it that calls back * into Python. */ if (self->ring_lock) { Py_INCREF(Py_None); return Py_None; } self->ring_lock = 1; if (scan_gc_items(self, target_size, target_size_bytes) < 0) { self->ring_lock = 0; return NULL; } self->ring_lock = 0; Py_INCREF(Py_None); return Py_None; } static PyObject * cc_incrgc(ccobject *self, PyObject *args) { int obsolete_arg = -999; int starting_size = self->non_ghost_count; int target_size = self->cache_size; Py_ssize_t target_size_bytes = self->cache_size_bytes; if (self->cache_drain_resistance >= 1) { /* This cache will gradually drain down to a small size. Check a (small) number of objects proportional to the current size */ int target_size_2 = (starting_size - 1 - starting_size / self->cache_drain_resistance); if (target_size_2 < target_size) target_size = target_size_2; } if (!PyArg_ParseTuple(args, "|i:incrgc", &obsolete_arg)) return NULL; if (obsolete_arg != -999 && (PyErr_Warn(PyExc_DeprecationWarning, "No argument expected") < 0)) return NULL; return lockgc(self, target_size, target_size_bytes); } static PyObject * cc_full_sweep(ccobject *self, PyObject *args) { int dt = -999; /* TODO: This should be deprecated; */ if (!PyArg_ParseTuple(args, "|i:full_sweep", &dt)) return NULL; if (dt == -999) return lockgc(self, 0, 0); else return cc_incrgc(self, args); } static PyObject * cc_minimize(ccobject *self, PyObject *args) { int ignored = -999; if (!PyArg_ParseTuple(args, "|i:minimize", &ignored)) return NULL; if (ignored != -999 && (PyErr_Warn(PyExc_DeprecationWarning, "No argument expected") < 0)) return NULL; return lockgc(self, 0, 0); } static int _invalidate(ccobject *self, PyObject *key) { static PyObject *_p_invalidate = NULL; PyObject *meth, *v; v = PyDict_GetItem(self->data, key); if (v == NULL) return 0; if (_p_invalidate == NULL) { _p_invalidate = INTERN("_p_invalidate"); if (_p_invalidate == NULL) { /* It doesn't make any sense to ignore this error, but the caller ignores all errors. TODO: and why does it do that? This should be fixed */ return -1; } } if (v->ob_refcnt <= 1 && PyType_Check(v)) { /* This looks wrong, but it isn't. We use strong references to types because they don't have the ring members. The result is that we *never* remove classes unless they are modified. We can fix this by using wekrefs uniformly. */ self->klass_count--; return PyDict_DelItem(self->data, key); } meth = PyObject_GetAttr(v, _p_invalidate); if (meth == NULL) return -1; v = PyObject_CallObject(meth, NULL); Py_DECREF(meth); if (v == NULL) return -1; Py_DECREF(v); return 0; } static PyObject * cc_invalidate(ccobject *self, PyObject *inv) { PyObject *key, *v; Py_ssize_t i = 0; if (PyDict_Check(inv)) { while (PyDict_Next(inv, &i, &key, &v)) { if (_invalidate(self, key) < 0) return NULL; } PyDict_Clear(inv); } else { if (PyBytes_Check(inv)) { if (_invalidate(self, inv) < 0) return NULL; } else { int l, r; l = PyObject_Length(inv); if (l < 0) return NULL; for (i=l; --i >= 0; ) { key = PySequence_GetItem(inv, i); if (!key) return NULL; r = _invalidate(self, key); Py_DECREF(key); if (r < 0) return NULL; } /* Dubious: modifying the input may be an unexpected side effect. */ PySequence_DelSlice(inv, 0, l); } } Py_INCREF(Py_None); return Py_None; } static PyObject * cc_get(ccobject *self, PyObject *args) { PyObject *r, *key, *d = NULL; if (!PyArg_ParseTuple(args, "O|O:get", &key, &d)) return NULL; r = PyDict_GetItem(self->data, key); if (!r) { if (d) r = d; else r = Py_None; } Py_INCREF(r); return r; } static PyObject * cc_items(ccobject *self) { return PyObject_CallMethod(self->data, "items", ""); } static PyObject * cc_klass_items(ccobject *self) { PyObject *l,*k,*v; Py_ssize_t p = 0; l = PyList_New(0); if (l == NULL) return NULL; while (PyDict_Next(self->data, &p, &k, &v)) { if(PyType_Check(v)) { v = Py_BuildValue("OO", k, v); if (v == NULL) { Py_DECREF(l); return NULL; } if (PyList_Append(l, v) < 0) { Py_DECREF(v); Py_DECREF(l); return NULL; } Py_DECREF(v); } } return l; } static PyObject * cc_debug_info(ccobject *self) { PyObject *l,*k,*v; Py_ssize_t p = 0; l = PyList_New(0); if (l == NULL) return NULL; while (PyDict_Next(self->data, &p, &k, &v)) { if (v->ob_refcnt <= 0) v = Py_BuildValue("Oi", k, v->ob_refcnt); else if (! PyType_Check(v) && PER_TypeCheck(v) ) v = Py_BuildValue("Oisi", k, v->ob_refcnt, v->ob_type->tp_name, ((cPersistentObject*)v)->state); else v = Py_BuildValue("Ois", k, v->ob_refcnt, v->ob_type->tp_name); if (v == NULL) goto err; if (PyList_Append(l, v) < 0) goto err; } return l; err: Py_DECREF(l); return NULL; } static PyObject * cc_lru_items(ccobject *self) { PyObject *l; CPersistentRing *here; if (self->ring_lock) { /* When the ring lock is held, we have no way of know which ring nodes belong to persistent objects, and which a placeholders. */ PyErr_SetString(PyExc_ValueError, ".lru_items() is unavailable during garbage collection"); return NULL; } l = PyList_New(0); if (l == NULL) return NULL; here = self->ring_home.r_next; while (here != &self->ring_home) { PyObject *v; cPersistentObject *object = OBJECT_FROM_RING(self, here); if (object == NULL) { Py_DECREF(l); return NULL; } v = Py_BuildValue("OO", object->oid, object); if (v == NULL) { Py_DECREF(l); return NULL; } if (PyList_Append(l, v) < 0) { Py_DECREF(v); Py_DECREF(l); return NULL; } Py_DECREF(v); here = here->r_next; } return l; } static void cc_oid_unreferenced(ccobject *self, PyObject *oid) { /* This is called by the persistent object deallocation function when the reference count on a persistent object reaches zero. We need to fix up our dictionary; its reference is now dangling because we stole its reference count. Be careful to not release the global interpreter lock until this is complete. */ cPersistentObject *dead_pers_obj; ccobject* dead_pers_obj_ref_to_self; /* If the cache has been cleared by GC, data will be NULL. */ if (!self->data) return; dead_pers_obj = (cPersistentObject*)PyDict_GetItem(self->data, oid); assert(dead_pers_obj); assert(dead_pers_obj->ob_refcnt == 0); dead_pers_obj_ref_to_self = (ccobject*)dead_pers_obj->cache; assert(dead_pers_obj_ref_to_self == self); /* Need to be very hairy here because a dictionary is about to decref an already deleted object. */ Py_INCREF(dead_pers_obj); assert(dead_pers_obj->ob_refcnt == 1); /* Incremement the refcount again, because delitem is going to DECREF it. If its refcount reached zero again, we'd call back to the dealloc function that called us. */ Py_INCREF(dead_pers_obj); if (PyDict_DelItem(self->data, oid) < 0) { /* Almost ignore errors if it wasn't already present (somehow; that shouldn't be possible since we literally just got it out of this dict and we're holding the GIL and not making any calls that could cause a greenlet switch so the state of the dictionary should not change). We still need to finish the cleanup. Just write an unraisable error (like an exception from __del__, because that's basically what this is). */ PyErr_WriteUnraisable((PyObject*)dead_pers_obj); PyErr_Clear(); /* Have the same side effect of clearing a ref count as the dict would have.*/ Py_DECREF(dead_pers_obj); } /* Now remove the dead object's reference to self. Note that this could cause self to be dealloced. */ Py_DECREF(dead_pers_obj_ref_to_self); dead_pers_obj->cache = NULL; assert(dead_pers_obj->ob_refcnt == 1); /* Don't DECREF the object, because this function is called from the object's dealloc function. If the refcnt reaches zero (again), it will all be invoked recursively. */ } static PyObject * cc_ringlen(ccobject *self) { CPersistentRing *here; int c = 0; for (here = self->ring_home.r_next; here != &self->ring_home; here = here->r_next) c++; return INT_FROM_LONG(c); } static PyObject * cc_update_object_size_estimation(ccobject *self, PyObject *args) { PyObject *oid; cPersistentObject *v; unsigned int new_size; if (!PyArg_ParseTuple(args, "OI:updateObjectSizeEstimation", &oid, &new_size)) return NULL; /* Note: reference borrowed */ v = (cPersistentObject *)PyDict_GetItem(self->data, oid); if (v) { /* we know this object -- update our "total_size_estimation" we must only update when the object is in the ring */ if (v->ring.r_next) { self->total_estimated_size += _estimated_size_in_bytes( (int)(_estimated_size_in_24_bits(new_size)) - (int)(v->estimated_size) ); /* we do this in "Connection" as we need it even when the object is not in the cache (or not the ring) */ /* v->estimated_size = new_size; */ } } Py_RETURN_NONE; } static PyObject* cc_new_ghost(ccobject *self, PyObject *args) { PyObject *tmp, *key, *v; if (!PyArg_ParseTuple(args, "OO:new_ghost", &key, &v)) return NULL; /* Sanity check the value given to make sure it is allowed in the cache */ if (PyType_Check(v)) { /* Its a persistent class, such as a ZClass. Thats ok. */ } else if (! PER_TypeCheck(v)) { /* If it's not an instance of a persistent class, (ie Python classes that derive from persistent.Persistent, BTrees, etc), report an error. */ PyErr_SetString(PyExc_TypeError, "Cache values must be persistent objects."); return NULL; } /* Can't access v->oid directly because the object might be a * persistent class. */ tmp = PyObject_GetAttr(v, py__p_oid); if (tmp == NULL) return NULL; Py_DECREF(tmp); if (tmp != Py_None) { PyErr_SetString(PyExc_ValueError, "New ghost object must not have an oid"); return NULL; } /* useful sanity check, but not strictly an invariant of this class */ tmp = PyObject_GetAttr(v, py__p_jar); if (tmp == NULL) return NULL; Py_DECREF(tmp); if (tmp != Py_None) { PyErr_SetString(PyExc_ValueError, "New ghost object must not have a jar"); return NULL; } tmp = PyDict_GetItem(self->data, key); if (tmp) { Py_DECREF(tmp); PyErr_SetString(PyExc_ValueError, "The given oid is already in the cache"); return NULL; } if (PyType_Check(v)) { if (PyObject_SetAttr(v, py__p_jar, self->jar) < 0) return NULL; if (PyObject_SetAttr(v, py__p_oid, key) < 0) return NULL; if (PyDict_SetItem(self->data, key, v) < 0) return NULL; PyObject_GC_UnTrack((void *)self->data); self->klass_count++; } else { cPersistentObject *p = (cPersistentObject *)v; if(p->cache != NULL) { PyErr_SetString(PyExc_AssertionError, "Already in a cache"); return NULL; } if (PyDict_SetItem(self->data, key, v) < 0) return NULL; /* the dict should have a borrowed reference */ PyObject_GC_UnTrack((void *)self->data); Py_DECREF(v); Py_INCREF(self); p->cache = (PerCache *)self; Py_INCREF(self->jar); p->jar = self->jar; Py_INCREF(key); p->oid = key; p->state = cPersistent_GHOST_STATE; } Py_RETURN_NONE; } static struct PyMethodDef cc_methods[] = { {"items", (PyCFunction)cc_items, METH_NOARGS, "Return list of oid, object pairs for all items in cache."}, {"lru_items", (PyCFunction)cc_lru_items, METH_NOARGS, "List (oid, object) pairs from the lru list, as 2-tuples."}, {"klass_items", (PyCFunction)cc_klass_items, METH_NOARGS, "List (oid, object) pairs of cached persistent classes."}, {"full_sweep", (PyCFunction)cc_full_sweep, METH_VARARGS, "full_sweep() -- Perform a full sweep of the cache."}, {"minimize", (PyCFunction)cc_minimize, METH_VARARGS, "minimize([ignored]) -- Remove as many objects as possible\n\n" "Ghostify all objects that are not modified. Takes an optional\n" "argument, but ignores it."}, {"incrgc", (PyCFunction)cc_incrgc, METH_VARARGS, "incrgc() -- Perform incremental garbage collection\n\n" "This method had been depricated!" "Some other implementations support an optional parameter 'n' which\n" "indicates a repetition count; this value is ignored."}, {"invalidate", (PyCFunction)cc_invalidate, METH_O, "invalidate(oids) -- invalidate one, many, or all ids"}, {"get", (PyCFunction)cc_get, METH_VARARGS, "get(key [, default]) -- get an item, or a default"}, {"ringlen", (PyCFunction)cc_ringlen, METH_NOARGS, "ringlen() -- Returns number of non-ghost items in cache."}, {"debug_info", (PyCFunction)cc_debug_info, METH_NOARGS, "debug_info() -- Returns debugging data about objects in the cache."}, {"update_object_size_estimation", (PyCFunction)cc_update_object_size_estimation, METH_VARARGS, "update_object_size_estimation(oid, new_size) -- " "update the caches size estimation for *oid* " "(if this is known to the cache)."}, {"new_ghost", (PyCFunction)cc_new_ghost, METH_VARARGS, "new_ghost() -- Initialize a ghost and add it to the cache."}, {NULL, NULL} /* sentinel */ }; static int cc_init(ccobject *self, PyObject *args, PyObject *kwds) { int cache_size = 100; Py_ssize_t cache_size_bytes = 0; PyObject *jar; if (!PyArg_ParseTuple(args, "O|in", &jar, &cache_size, &cache_size_bytes)) return -1; self->jar = NULL; self->data = PyDict_New(); if (self->data == NULL) { Py_DECREF(self); return -1; } /* Untrack the dict mapping oids to objects. The dict contains uncounted references to ghost objects, so it isn't safe for GC to visit it. If GC finds an object with more referents that refcounts, it will die with an assertion failure. When the cache participates in GC, it will need to traverse the objects in the doubly-linked list, which will account for all the non-ghost objects. */ PyObject_GC_UnTrack((void *)self->data); self->jar = jar; Py_INCREF(jar); self->cache_size = cache_size; self->cache_size_bytes = cache_size_bytes; self->non_ghost_count = 0; self->total_estimated_size = 0; self->klass_count = 0; self->cache_drain_resistance = 0; self->ring_lock = 0; self->ring_home.r_next = &self->ring_home; self->ring_home.r_prev = &self->ring_home; return 0; } static void cc_dealloc(ccobject *self) { PyObject_GC_UnTrack((PyObject *)self); Py_XDECREF(self->data); Py_XDECREF(self->jar); PyObject_GC_Del(self); } static int cc_clear(ccobject *self) { Py_ssize_t pos = 0; PyObject *k, *v; /* Clearing the cache is delicate. A non-ghost object will show up in the ring and in the dict. If we deallocating the dict before clearing the ring, the GC will decref each object in the dict. Since the dict references are uncounted, this will lead to objects having negative refcounts. Freeing the non-ghost objects should eliminate many objects from the cache, but there may still be ghost objects left. It's not safe to decref the dict until it's empty, so we need to manually clear those out of the dict, too. We accomplish that by replacing all the ghost objects with None. */ /* We don't need to lock the ring, because the cache is unreachable. It should be impossible for anyone to be modifying the cache. */ assert(! self->ring_lock); while (self->ring_home.r_next != &self->ring_home) { CPersistentRing *here = self->ring_home.r_next; cPersistentObject *o = OBJECT_FROM_RING(self, here); if (o->cache) { Py_INCREF(o); /* account for uncounted reference */ if (PyDict_DelItem(self->data, o->oid) < 0) return -1; } o->cache = NULL; Py_DECREF(self); self->ring_home.r_next = here->r_next; o->ring.r_prev = NULL; o->ring.r_next = NULL; Py_DECREF(o); here = here->r_next; } Py_XDECREF(self->jar); while (PyDict_Next(self->data, &pos, &k, &v)) { Py_INCREF(v); if (PyDict_SetItem(self->data, k, Py_None) < 0) return -1; } Py_XDECREF(self->data); self->data = NULL; self->jar = NULL; return 0; } static int cc_traverse(ccobject *self, visitproc visit, void *arg) { int err; CPersistentRing *here; /* If we're in the midst of cleaning up old objects, the ring contains * assorted junk we must not pass on to the visit() callback. This * should be rare (our cleanup code would need to have called back * into Python, which in turn triggered Python's gc). When it happens, * simply don't chase any pointers. The cache will appear to be a * source of external references then, and at worst we miss cleaning * up a dead cycle until the next time Python's gc runs. */ if (self->ring_lock) return 0; #define VISIT(SLOT) \ if (SLOT) { \ err = visit((PyObject *)(SLOT), arg); \ if (err) \ return err; \ } VISIT(self->jar); here = self->ring_home.r_next; /* It is possible that an object is traversed after it is cleared. In that case, there is no ring. */ if (!here) return 0; while (here != &self->ring_home) { cPersistentObject *o = OBJECT_FROM_RING(self, here); VISIT(o); here = here->r_next; } #undef VISIT return 0; } static Py_ssize_t cc_length(ccobject *self) { return PyObject_Length(self->data); } static PyObject * cc_subscript(ccobject *self, PyObject *key) { PyObject *r; r = PyDict_GetItem(self->data, key); if (r == NULL) { PyErr_SetObject(PyExc_KeyError, key); return NULL; } Py_INCREF(r); return r; } static int cc_add_item(ccobject *self, PyObject *key, PyObject *v) { int result; PyObject *oid, *object_again, *jar; cPersistentObject *p; /* Sanity check the value given to make sure it is allowed in the cache */ if (PyType_Check(v)) { /* Its a persistent class, such as a ZClass. Thats ok. */ } else if (! PER_TypeCheck(v)) { /* If it's not an instance of a persistent class, (ie Python classes that derive from persistent.Persistent, BTrees, etc), report an error. */ PyErr_SetString(PyExc_TypeError, "Cache values must be persistent objects."); return -1; } /* Can't access v->oid directly because the object might be a * persistent class. */ oid = PyObject_GetAttr(v, py__p_oid); if (oid == NULL) return -1; if (! PyBytes_Check(oid)) { Py_DECREF(oid); PyErr_Format(PyExc_TypeError, "Cached object oid must be bytes, not a %s", oid->ob_type->tp_name); return -1; } /* we know they are both strings. * now check if they are the same string. */ result = PyObject_RichCompareBool(key, oid, Py_NE); Py_DECREF(oid); if (result < 0) { return -1; } if (result) { PyErr_SetString(PyExc_ValueError, "Cache key does not match oid"); return -1; } /* useful sanity check, but not strictly an invariant of this class */ jar = PyObject_GetAttr(v, py__p_jar); if (jar == NULL) return -1; if (jar==Py_None) { Py_DECREF(jar); PyErr_SetString(PyExc_ValueError, "Cached object jar missing"); return -1; } Py_DECREF(jar); object_again = PyDict_GetItem(self->data, key); if (object_again) { if (object_again != v) { PyErr_SetString(PyExc_ValueError, "A different object already has the same oid"); return -1; } else { /* re-register under the same oid - no work needed */ return 0; } } if (PyType_Check(v)) { if (PyDict_SetItem(self->data, key, v) < 0) return -1; PyObject_GC_UnTrack((void *)self->data); self->klass_count++; return 0; } else { PerCache *cache = ((cPersistentObject *)v)->cache; if (cache) { if (cache != (PerCache *)self) /* This object is already in a different cache. */ PyErr_SetString(PyExc_ValueError, "Cache values may only be in one cache."); return -1; } /* else: This object is already one of ours, which is ok. It would be very strange if someone was trying to register the same object under a different key. */ } if (PyDict_SetItem(self->data, key, v) < 0) return -1; /* the dict should have a borrowed reference */ PyObject_GC_UnTrack((void *)self->data); Py_DECREF(v); p = (cPersistentObject *)v; Py_INCREF(self); p->cache = (PerCache *)self; if (p->state >= 0) { /* insert this non-ghost object into the ring just behind the home position. */ self->non_ghost_count++; ring_add(&self->ring_home, &p->ring); /* this list should have a new reference to the object */ Py_INCREF(v); } return 0; } static int cc_del_item(ccobject *self, PyObject *key) { PyObject *v; cPersistentObject *p; /* unlink this item from the ring */ v = PyDict_GetItem(self->data, key); if (v == NULL) { PyErr_SetObject(PyExc_KeyError, key); return -1; } if (PyType_Check(v)) { self->klass_count--; } else { p = (cPersistentObject *)v; if (p->state >= 0) { self->non_ghost_count--; ring_del(&p->ring); /* The DelItem below will account for the reference held by the list. */ } else { /* This is a ghost object, so we haven't kept a reference count on it. For it have stayed alive this long someone else must be keeping a reference to it. Therefore we need to temporarily give it back a reference count before calling DelItem below */ Py_INCREF(v); } Py_DECREF((PyObject *)p->cache); p->cache = NULL; } if (PyDict_DelItem(self->data, key) < 0) { PyErr_SetString(PyExc_RuntimeError, "unexpectedly couldn't remove key in cc_ass_sub"); return -1; } return 0; } static int cc_ass_sub(ccobject *self, PyObject *key, PyObject *v) { if (!PyBytes_Check(key)) { PyErr_Format(PyExc_TypeError, "cPickleCache key must be bytes, not a %s", key->ob_type->tp_name); return -1; } if (v) return cc_add_item(self, key, v); else return cc_del_item(self, key); } static PyMappingMethods cc_as_mapping = { (lenfunc)cc_length, /* mp_length */ (binaryfunc)cc_subscript, /* mp_subscript */ (objobjargproc)cc_ass_sub, /* mp_ass_subscript */ }; static PyObject * cc_cache_data(ccobject *self, void *context) { return PyDict_Copy(self->data); } static PyGetSetDef cc_getsets[] = { {"cache_data", (getter)cc_cache_data}, {NULL} }; static PyMemberDef cc_members[] = { {"cache_size", T_INT, offsetof(ccobject, cache_size)}, {"cache_size_bytes", T_PYSSIZET, offsetof(ccobject, cache_size_bytes)}, {"total_estimated_size", T_PYSSIZET, offsetof(ccobject, total_estimated_size), READONLY}, {"cache_drain_resistance", T_INT, offsetof(ccobject, cache_drain_resistance)}, {"cache_non_ghost_count", T_INT, offsetof(ccobject, non_ghost_count), READONLY}, {"cache_klass_count", T_INT, offsetof(ccobject, klass_count), READONLY}, {NULL} }; /* This module is compiled as a shared library. Some compilers don't allow addresses of Python objects defined in other libraries to be used in static initializers here. The DEFERRED_ADDRESS macro is used to tag the slots where such addresses appear; the module init function must fill in the tagged slots at runtime. The argument is for documentation -- the macro ignores it. */ #define DEFERRED_ADDRESS(ADDR) 0 static PyTypeObject Cctype = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "persistent.PickleCache", /* tp_name */ sizeof(ccobject), /* tp_basicsize */ 0, /* tp_itemsize */ (destructor)cc_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ 0, /* tp_repr */ 0, /* tp_as_number */ 0, /* tp_as_sequence */ &cc_as_mapping, /* tp_as_mapping */ 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ 0, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ 0, /* tp_doc */ (traverseproc)cc_traverse, /* tp_traverse */ (inquiry)cc_clear, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ cc_methods, /* tp_methods */ cc_members, /* tp_members */ cc_getsets, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ 0, /* tp_dictoffset */ (initproc)cc_init, /* tp_init */ }; #ifdef PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "cPickleCache", /* m_name */ cPickleCache_doc_string, /* m_doc */ -1, /* m_size */ NULL, /* m_methods */ NULL, /* m_reload */ NULL, /* m_traverse */ NULL, /* m_clear */ NULL, /* m_free */ }; #endif static PyObject* module_init(void) { PyObject *module; #ifdef PY3K ((PyObject*)&Cctype)->ob_type = &PyType_Type; #else Cctype.ob_type = &PyType_Type; #endif Cctype.tp_new = &PyType_GenericNew; if (PyType_Ready(&Cctype) < 0) { return NULL; } #ifdef PY3K module = PyModule_Create(&moduledef); #else module = Py_InitModule3("cPickleCache", NULL, cPickleCache_doc_string); #endif #ifdef PY3K cPersistenceCAPI = (cPersistenceCAPIstruct *)PyCapsule_Import(CAPI_CAPSULE_NAME, 0); #else cPersistenceCAPI = (cPersistenceCAPIstruct *)PyCObject_Import( "persistent.cPersistence", "CAPI"); #endif if (!cPersistenceCAPI) return NULL; cPersistenceCAPI->percachedel = (percachedelfunc)cc_oid_unreferenced; py__p_changed = INTERN("_p_changed"); if (!py__p_changed) return NULL; py__p_deactivate = INTERN("_p_deactivate"); if (!py__p_deactivate) return NULL; py__p_jar = INTERN("_p_jar"); if (!py__p_jar) return NULL; py__p_oid = INTERN("_p_oid"); if (!py__p_oid) return NULL; if (PyModule_AddStringConstant(module, "cache_variant", "stiff/c") < 0) return NULL; /* This leaks a reference to Cctype, but it doesn't matter. */ if (PyModule_AddObject(module, "PickleCache", (PyObject *)&Cctype) < 0) return NULL; return module; } #ifdef PY3K PyMODINIT_FUNC PyInit_cPickleCache(void) { return module_init(); } #else PyMODINIT_FUNC initcPickleCache(void) { module_init(); } #endif ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/dict.py0000644000076500000240000000135700000000000020156 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## # persistent.dict is deprecated. Use persistent.mapping from persistent.mapping import PersistentMapping as PersistentDict ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/interfaces.py0000644000076500000240000004400700000000000021355 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Persistence Interfaces """ from zope.interface import Interface from zope.interface import Attribute from persistent._compat import use_c_impl # Allowed values for _p_state. Use the C constants if available (which # are defined in cPersistence --- there is no cInterfaces --- so we # need to pass the corresponding ``mod_name``), otherwise define some # values here. GHOST = use_c_impl(-1, 'GHOST', mod_name='persistent.persistence') UPTODATE = use_c_impl(0, 'UPTODATE', mod_name='persistent.persistence') CHANGED = use_c_impl(1, 'CHANGED', mod_name='persistent.persistence') STICKY = use_c_impl(2, 'STICKY', mod_name='persistent.persistence') OID_TYPE = SERIAL_TYPE = bytes class IPersistent(Interface): """Python persistent interface A persistent object can be in one of several states: - Unsaved The object has been created but not saved in a data manager. In this state, the _p_changed attribute is non-None and false and the _p_jar attribute is None. - Saved The object has been saved and has not been changed since it was saved. In this state, the _p_changed attribute is non-None and false and the _p_jar attribute is set to a data manager. - Sticky This state is identical to the saved state except that the object cannot transition to the ghost state. This is a special state used by C methods of persistent objects to make sure that state is not unloaded in the middle of computation. In this state, the _p_changed attribute is non-None and false and the _p_jar attribute is set to a data manager. There is no Python API for detecting whether an object is in the sticky state. - Changed The object has been changed. In this state, the _p_changed attribute is true and the _p_jar attribute is set to a data manager. - Ghost the object is in memory but its state has not been loaded from the database (or its state has been unloaded). In this state, the object doesn't contain any application data. In this state, the _p_changed attribute is None, and the _p_jar attribute is set to the data manager from which the object was obtained. In all the above, _p_oid (the persistent object id) is set when _p_jar first gets set. The following state transitions are possible: - Unsaved -> Saved This transition occurs when an object is saved in the database. This usually happens when an unsaved object is added to (e.g. as an attribute or item of) a saved (or changed) object and the transaction is committed. - Saved -> Changed Sticky -> Changed Ghost -> Changed This transition occurs when someone sets an attribute or sets _p_changed to a true value on a saved, sticky or ghost object. When the transition occurs, the persistent object is required to call the register() method on its data manager, passing itself as the only argument. Prior to ZODB 3.6, setting _p_changed to a true value on a ghost object was ignored (the object remained a ghost, and getting its _p_changed attribute continued to return None). - Saved -> Sticky This transition occurs when C code marks the object as sticky to prevent its deactivation. - Saved -> Ghost This transition occurs when a saved object is deactivated or invalidated. See discussion below. - Sticky -> Saved This transition occurs when C code unmarks the object as sticky to allow its deactivation. - Changed -> Saved This transition occurs when a transaction is committed. After saving the state of a changed object during transaction commit, the data manager sets the object's _p_changed to a non-None false value. - Changed -> Ghost This transition occurs when a transaction is aborted. All changed objects are invalidated by the data manager by an abort. - Ghost -> Saved This transition occurs when an attribute or operation of a ghost is accessed and the object's state is loaded from the database. Note that there is a separate C API that is not included here. The C API requires a specific data layout and defines the sticky state. About Invalidation, Deactivation and the Sticky & Ghost States The sticky state is intended to be a short-lived state, to prevent an object's state from being discarded while we're in C routines. It is an error to invalidate an object in the sticky state. Deactivation is a request that an object discard its state (become a ghost). Deactivation is an optimization, and a request to deactivate may be ignored. There are two equivalent ways to request deactivation: - call _p_deactivate() - set _p_changed to None There are two ways to invalidate an object: call the _p_invalidate() method (preferred) or delete its _p_changed attribute. This cannot be ignored, and is used when semantics require invalidation. Normally, an invalidated object transitions to the ghost state. However, some objects cannot be ghosts. When these objects are invalidated, they immediately reload their state from their data manager, and are then in the saved state. reprs By default, persistent objects include the reprs of their _p_oid and _p_jar, if any, in their repr. If a subclass implements the optional method ``_p_repr``, it will be called and its results returned instead of the default repr; if this method raises an exception, that exception will be caught and its repr included in the default repr. """ _p_jar = Attribute( """The data manager for the object. The data manager should implement IPersistentDataManager (note that this constraint is not enforced). If there is no data manager, then this is None. Once assigned to a data manager, an object cannot be re-assigned to another. """) _p_oid = Attribute( """The object id. It is up to the data manager to assign this. The special value None is reserved to indicate that an object id has not been assigned. Non-None object ids must be non-empty strings. The 8-byte string consisting of 8 NUL bytes ('\x00\x00\x00\x00\x00\x00\x00\x00') is reserved to identify the database root object. Once assigned an OID, an object cannot be re-assigned another. """) _p_changed = Attribute( """The persistent state of the object. This is one of: None -- The object is a ghost. false but not None -- The object is saved (or has never been saved). true -- The object has been modified since it was last saved. The object state may be changed by assigning or deleting this attribute; however, assigning None is ignored if the object is not in the saved state, and may be ignored even if the object is in the saved state. At and after ZODB 3.6, setting _p_changed to a true value for a ghost object activates the object; prior to 3.6, setting _p_changed to a true value on a ghost object was ignored. Note that an object can transition to the changed state only if it has a data manager. When such a state change occurs, the 'register' method of the data manager must be called, passing the persistent object. Deleting this attribute forces invalidation independent of existing state, although it is an error if the sticky state is current. """) _p_serial = Attribute( """The object serial number. This member is used by the data manager to distiguish distinct revisions of a given persistent object. This is an 8-byte string (not Unicode). """) _p_mtime = Attribute( """The object's modification time (read-only). This is a float, representing seconds since the epoch (as returned by time.time). """) _p_state = Attribute( """The object's persistence state token. Must be one of GHOST, UPTODATE, CHANGED, or STICKY. """) _p_estimated_size = Attribute( """An estimate of the object's size in bytes. May be set by the data manager. """) # Attribute access protocol def __getattribute__(name): """ Handle activating ghosts before returning an attribute value. "Special" attributes and '_p_*' attributes don't require activation. """ def __setattr__(name, value): """ Handle activating ghosts before setting an attribute value. "Special" attributes and '_p_*' attributes don't require activation. """ def __delattr__(name): """ Handle activating ghosts before deleting an attribute value. "Special" attributes and '_p_*' attributes don't require activation. """ # Pickling protocol. def __getstate__(): """Get the object data. The state should not include persistent attributes ("_p_name"). The result must be picklable. """ def __setstate__(state): """Set the object data. """ def __reduce__(): """Reduce an object to contituent parts for serialization. """ # Custom methods def _p_activate(): """Activate the object. Change the object to the saved state if it is a ghost. """ def _p_deactivate(): """Deactivate the object. Possibly change an object in the saved state to the ghost state. It may not be possible to make some persistent objects ghosts, and, for optimization reasons, the implementation may choose to keep an object in the saved state. """ def _p_invalidate(): """Invalidate the object. Invalidate the object. This causes any data to be thrown away, even if the object is in the changed state. The object is moved to the ghost state; further accesses will cause object data to be reloaded. """ def _p_getattr(name): """Test whether the base class must handle the name The method unghostifies the object, if necessary. The method records the object access, if necessary. This method should be called by subclass __getattribute__ implementations before doing anything else. If the method returns True, then __getattribute__ implementations must delegate to the base class, Persistent. """ def _p_setattr(name, value): """Save persistent meta data This method should be called by subclass __setattr__ implementations before doing anything else. If it returns true, then the attribute was handled by the base class. The method unghostifies the object, if necessary. The method records the object access, if necessary. """ def _p_delattr(name): """Delete persistent meta data This method should be called by subclass __delattr__ implementations before doing anything else. If it returns true, then the attribute was handled by the base class. The method unghostifies the object, if necessary. The method records the object access, if necessary. """ # TODO: document conflict resolution. class IPersistentDataManager(Interface): """Provide services for managing persistent state. This interface is used by a persistent object to interact with its data manager in the context of a transaction. """ _cache = Attribute("The pickle cache associated with this connection.") def setstate(object): """Load the state for the given object. The object should be in the ghost state. The object's state will be set and the object will end up in the saved state. The object must provide the IPersistent interface. """ def oldstate(obj, tid): """Return copy of 'obj' that was written by transaction 'tid'. The returned object does not have the typical metadata (_p_jar, _p_oid, _p_serial) set. I'm not sure how references to other peristent objects are handled. Parameters obj: a persistent object from this Connection. tid: id of a transaction that wrote an earlier revision. Raises KeyError if tid does not exist or if tid deleted a revision of obj. """ def register(object): """Register an IPersistent with the current transaction. This method must be called when the object transitions to the changed state. A subclass could override this method to customize the default policy of one transaction manager for each thread. """ # Maybe later: ## def mtime(object): ## """Return the modification time of the object. ## The modification time may not be known, in which case None ## is returned. If non-None, the return value is the kind of ## timestamp supplied by Python's time.time(). ## """ class IPickleCache(Interface): """ API of the cache for a ZODB connection. """ def __getitem__(oid): """ -> the persistent object for OID. o Raise KeyError if not found. """ def __setitem__(oid, value): """ Save the persistent object under OID. o 'oid' must be a string, else raise ValueError. o Raise KeyError on duplicate """ def __delitem__(oid): """ Remove the persistent object for OID. o 'oid' must be a string, else raise ValueError. o Raise KeyError if not found. """ def get(oid, default=None): """ -> the persistent object for OID. o Return 'default' if not found. """ def __len__(): """ -> the number of OIDs in the cache. """ def items(): """-> a sequence of tuples (oid, value) for cached objects. o Only includes items in 'data' (no p-classes). """ def ringlen(): """ -> the number of persistent objects in the ring. o Only includes items in the ring (no ghosts or p-classes). """ def lru_items(): """ -> a sequence of tuples (oid, value) for cached objects. o Tuples will be in LRU order. o Only includes items in the ring (no ghosts or p-classes). """ def klass_items(): """-> a sequence of tuples (oid, value) for cached p-classes. o Only includes persistent classes. """ def incrgc(): """ Perform an incremental garbage collection sweep. o Reduce number of non-ghosts to 'cache_size', if possible. o Ghostify in LRU order. o Skip dirty or sticky objects. o Quit once we get down to 'cache_size'. """ def full_sweep(): """ Perform a full garbage collection sweep. o Reduce number of non-ghosts to 0, if possible. o Ghostify all non-sticky / non-changed objecs. """ def minimize(): """ Alias for 'full_sweep'. o XXX? """ def new_ghost(oid, obj): """ Add the given (ghost) object to the cache. Also, set its _p_jar and _p_oid, and ensure it is in the GHOST state. If the object doesn't define '_p_oid' / '_p_jar', raise. If the object's '_p_oid' is not None, raise. If the object's '_p_jar' is not None, raise. If 'oid' is already in the cache, raise. """ def invalidate(to_invalidate): """ Invalidate the indicated objects. o If 'to_invalidate' is a string, treat it as an OID. o Otherwise, iterate over it as a sequence of OIDs. o Any OID corresponding to a p-class will cause the corresponding p-class to be removed from the cache. o For all other OIDs, ghostify the corrsponding object and remove it from the ring. """ def debug_info(): """Return debugging data about objects in the cache. o Return a sequence of tuples, (oid, refcount, typename, state). """ def update_object_size_estimation(oid, new_size): """Update the cache's size estimation for 'oid', if known to the cache. """ cache_size = Attribute('Target size of the cache') cache_drain_resistance = Attribute('Factor for draining cache below ' 'target size') cache_non_ghost_count = Attribute('Number of non-ghosts in the cache ' '(XXX how is it different from ' 'ringlen?') cache_data = Attribute("Property: copy of our 'data' dict") cache_klass_count = Attribute("Property: len of 'persistent_classes'") class IExtendedPickleCache(IPickleCache): """ Extra operations for a pickle cache. """ def reify(to_reify): """ Reify the indicated objects. o If 'to_reify' is a string, treat it as an OID. o Otherwise, iterate over it as a sequence of OIDs. o For each OID, if present in 'data' and in GHOST state: o Call '_p_activate' on the object. o Add it to the ring. o If any OID is present but not in GHOST state, skip it. o Raise KeyErrory if any OID is not present. """ def mru(oid): """ Move the element corresonding to 'oid' to the head. o Raise KeyError if no element is found. """ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/list.py0000644000076500000240000001256400000000000020210 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE # ############################################################################## """Python implementation of persistent list. $Id$""" import sys import persistent from persistent._compat import UserList from persistent._compat import PYTHON2 # The slice object you get when you write list[:] _SLICE_ALL = slice(None, None, None) class PersistentList(UserList, persistent.Persistent): """A persistent wrapper for list objects. Mutating instances of this class will cause them to be marked as changed and automatically persisted. .. versionchanged:: 4.5.2 Using the `clear` method, or deleting a slice (e.g., ``del inst[:]`` or ``del inst[x:x]``) now only results in marking the instance as changed if it actually removed items. .. versionchanged:: 4.5.2 The `copy` method is available on Python 2. """ __super_getitem = UserList.__getitem__ __super_setitem = UserList.__setitem__ __super_delitem = UserList.__delitem__ __super_iadd = UserList.__iadd__ __super_imul = UserList.__imul__ __super_append = UserList.append __super_insert = UserList.insert __super_pop = UserList.pop __super_remove = UserList.remove __super_reverse = UserList.reverse __super_sort = UserList.sort __super_extend = UserList.extend __super_clear = ( UserList.clear if hasattr(UserList, 'clear') else lambda inst: inst.__delitem__(_SLICE_ALL) ) if not PYTHON2 and sys.version_info[:3] < (3, 7, 4): # Prior to 3.7.4, Python 3 (but not Python 2) failed to properly # return an instance of the same class. # See https://bugs.python.org/issue27639 # and https://github.com/zopefoundation/persistent/issues/112. # We only define the special method on the necessary versions to avoid # any speed penalty. def __getitem__(self, item): result = self.__super_getitem(item) if isinstance(item, slice): result = self.__class__(result) return result if sys.version_info[:3] < (3, 7, 4): # Likewise for __copy__, except even Python 2 needs it. # See https://github.com/python/cpython/commit/3645d29a1dc2102fdb0f5f0c0129ff2295bcd768 def __copy__(self): inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) # Create a copy and avoid triggering descriptors inst.__dict__["data"] = self.__dict__["data"][:] return inst def __setitem__(self, i, item): self.__super_setitem(i, item) self._p_changed = 1 def __delitem__(self, i): # If they write del list[:] but we're empty, # no need to mark us changed. Likewise with # a slice that's empty, like list[1:1]. len_before = len(self.data) self.__super_delitem(i) if len(self.data) != len_before: self._p_changed = 1 if PYTHON2: # pragma: no cover __super_setslice = UserList.__setslice__ __super_delslice = UserList.__delslice__ def copy(self): return self.__class__(self) def __setslice__(self, i, j, other): self.__super_setslice(i, j, other) self._p_changed = 1 def __delslice__(self, i, j): # In the past we just called super, but we want to apply the # same _p_changed optimization logic that __delitem__ does. Don't # call it as ``self.__delitem__``, though, because user code in subclasses # on Python 2 may not be expecting to get a slice. PersistentList.__delitem__(self, slice(i, j)) def __iadd__(self, other): L = self.__super_iadd(other) self._p_changed = 1 return L def __imul__(self, n): L = self.__super_imul(n) self._p_changed = 1 return L def append(self, item): self.__super_append(item) self._p_changed = 1 def clear(self): """ Remove all items from the list. .. versionchanged:: 4.5.2 Now marks the list as changed, and is available on both Python 2 and Python 3. """ needs_changed = bool(self) self.__super_clear() if needs_changed: self._p_changed = 1 def insert(self, i, item): self.__super_insert(i, item) self._p_changed = 1 def pop(self, i=-1): rtn = self.__super_pop(i) self._p_changed = 1 return rtn def remove(self, item): self.__super_remove(item) self._p_changed = 1 def reverse(self): self.__super_reverse() self._p_changed = 1 def sort(self, *args, **kwargs): self.__super_sort(*args, **kwargs) self._p_changed = 1 def extend(self, other): self.__super_extend(other) self._p_changed = 1 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/mapping.py0000644000076500000240000001346400000000000020670 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE # ############################################################################## """Python implementation of persistent base types $Id$""" import sys import persistent from persistent._compat import IterableUserDict class default(object): def __init__(self, func): self.func = func def __get__(self, inst, class_): if inst is None: return self return self.func(inst) class PersistentMapping(IterableUserDict, persistent.Persistent): """A persistent wrapper for mapping objects. This class allows wrapping of mapping objects so that object changes are registered. As a side effect, mapping objects may be subclassed. A subclass of PersistentMapping or any code that adds new attributes should not create an attribute named _container. This is reserved for backwards compatibility reasons. """ # UserDict provides all of the mapping behavior. The # PersistentMapping class is responsible marking the persistent # state as changed when a method actually changes the state. At # the mapping API evolves, we may need to add more methods here. __super_delitem = IterableUserDict.__delitem__ __super_setitem = IterableUserDict.__setitem__ __super_clear = IterableUserDict.clear __super_update = IterableUserDict.update __super_setdefault = IterableUserDict.setdefault __super_pop = IterableUserDict.pop __super_popitem = IterableUserDict.popitem # Be sure to make a deep copy of our ``data`` (See PersistentList.) # See https://github.com/python/cpython/commit/3645d29a1dc2102fdb0f5f0c0129ff2295bcd768 # This was fixed in CPython 3.7.4, but we can't rely on that because it # doesn't handle our old ``_container`` appropriately (it goes directly # to ``self.__dict__``, bypassing the descriptor). The code here was initially # based on the version found in 3.7.4. def __copy__(self): inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) # Create a copy and avoid triggering descriptors if '_container' in inst.__dict__: # BWC for ZODB < 3.3. data = inst.__dict__.pop('_container') else: data = inst.__dict__['data'] inst.__dict__["data"] = data.copy() return inst def __delitem__(self, key): self.__super_delitem(key) self._p_changed = 1 def __setitem__(self, key, v): self.__super_setitem(key, v) self._p_changed = 1 def clear(self): """ Remove all data from this dictionary. .. versionchanged:: 4.5.2 If there was nothing to remove, this object is no longer marked as modified. """ # Historically this method always marked ourself as changed, # so if there was a _container it was persisted as data. We want # to preserve that, even if we won't make any modifications otherwise. needs_changed = '_container' in self.__dict__ or bool(self) # Python 2 implements this by directly calling self.data.clear(), # but Python 3 does so by repeatedly calling self.popitem() self.__super_clear() if needs_changed: self._p_changed = 1 def update(self, *args, **kwargs): """ D.update([E, ]**F) -> None. .. versionchanged:: 4.5.2 Now accepts arbitrary keyword arguments. In the special case of a keyword argument named ``b`` that is a dictionary, the behaviour will change. """ self.__super_update(*args, **kwargs) self._p_changed = 1 def setdefault(self, key, *args, **kwargs): # pylint:disable=arguments-differ # (Python 3 and Python 2 have very different signatures.) # We could inline all of UserDict's implementation into the # method here, but I'd rather not depend at all on the # implementation in UserDict (simple as it is). if key not in self.data: self._p_changed = 1 return self.__super_setdefault(key, *args, **kwargs) def pop(self, key, *args, **kwargs): # pylint:disable=arguments-differ # (Python 3 and Python 2 have very different signatures.) self._p_changed = 1 return self.__super_pop(key, *args, **kwargs) def popitem(self): """ Remove an item. .. versionchanged:: 4.5.2 No longer marks this object as modified if it was empty and an exception raised. """ result = self.__super_popitem() self._p_changed = 1 return result # Old implementations (prior to 2001; see # https://github.com/zopefoundation/ZODB/commit/c64281cf2830b569eed4f211630a8a61d22a0f0b#diff-b0f568e20f51129c10a096abad27c64a) # used ``_container`` rather than ``data``. Use a descriptor to provide # ``data`` when we have ``_container`` instead @default def data(self): # pylint:disable=method-hidden # We don't want to cause a write on read, so we're careful not to # do anything that would cause us to become marked as changed, however, # if we're modified, then the saved record will have data, not # _container. data = self.__dict__.pop('_container') self.__dict__['data'] = data return data ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/persistence.py0000644000076500000240000005371400000000000021563 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2011 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import struct from zope.interface import implementer from persistent import interfaces from persistent.interfaces import SERIAL_TYPE from persistent.timestamp import TimeStamp from persistent.timestamp import _ZERO from persistent._compat import copy_reg from persistent._compat import intern from persistent._compat import use_c_impl __all__ = [ 'Persistent', 'PersistentPy', ] # We use implementation details of PickleCachePy # pylint:disable=protected-access # we have lots of not-quite-correct continuation indentation. # TODO: Fix that in a whitespace-only commit. # pylint:disable=bad-continuation # There are a few places we need to work with exact types. # pylint:disable=unidiomatic-typecheck _INITIAL_SERIAL = _ZERO # Bitwise flags _CHANGED = 0x0001 _STICKY = 0x0002 _OGA = object.__getattribute__ _OSA = object.__setattr__ _ODA = object.__delattr__ # These names can be used from a ghost without causing it to be # activated. These are standardized with the C implementation SPECIAL_NAMES = ('__class__', '__del__', '__dict__', '__of__', '__setstate__',) # And this is an implementation detail of this class; it holds # the standard names plus the slot names, allowing for just one # check in __getattribute__ _SPECIAL_NAMES = set(SPECIAL_NAMES) # __ring is for use by PickleCachePy and is opaque to us. _SLOTS = ('__jar', '__oid', '__serial', '__flags', '__size', '__ring',) _SPECIAL_NAMES.update([intern('_Persistent' + x) for x in _SLOTS]) # Represent 8-byte OIDs as hex integer, just like # ZODB does. _OID_STRUCT = struct.Struct('>Q') _OID_UNPACK = _OID_STRUCT.unpack @use_c_impl @implementer(interfaces.IPersistent) class Persistent(object): """ Pure Python implmentation of Persistent base class """ __slots__ = _SLOTS def __new__(cls, *args, **kw): inst = super(Persistent, cls).__new__(cls) # We bypass the __setattr__ implementation of this object # at __new__ time, just like the C implementation does. This # makes us compatible with subclasses that want to access # properties like _p_changed in their setattr implementation _OSA(inst, '_Persistent__jar', None) _OSA(inst, '_Persistent__oid', None) _OSA(inst, '_Persistent__serial', None) _OSA(inst, '_Persistent__flags', None) _OSA(inst, '_Persistent__size', 0) _OSA(inst, '_Persistent__ring', None) return inst # _p_jar: see IPersistent. def _get_jar(self): return _OGA(self, '_Persistent__jar') def _set_jar(self, value): jar = _OGA(self, '_Persistent__jar') if self._p_is_in_cache(jar) and value is not None and jar != value: # The C implementation only forbids changing the jar # if we're already in a cache. Match its error message raise ValueError('can not change _p_jar of cached object') if _OGA(self, '_Persistent__jar') != value: _OSA(self, '_Persistent__jar', value) _OSA(self, '_Persistent__flags', 0) def _del_jar(self): jar = _OGA(self, '_Persistent__jar') if jar is not None: if self._p_is_in_cache(jar): raise ValueError("can't delete _p_jar of cached object") _OSA(self, '_Persistent__jar', None) _OSA(self, '_Persistent__flags', None) _p_jar = property(_get_jar, _set_jar, _del_jar) # _p_oid: see IPersistent. def _get_oid(self): return _OGA(self, '_Persistent__oid') def _set_oid(self, value): if value == _OGA(self, '_Persistent__oid'): return # The C implementation allows *any* value to be # used as the _p_oid. #if value is not None: # if not isinstance(value, OID_TYPE): # raise ValueError('Invalid OID type: %s' % value) # The C implementation only forbids changing the OID # if we're in a cache, regardless of what the current # value or jar is if self._p_is_in_cache(): # match the C error message raise ValueError('can not change _p_oid of cached object') _OSA(self, '_Persistent__oid', value) def _del_oid(self): jar = _OGA(self, '_Persistent__jar') oid = _OGA(self, '_Persistent__oid') if jar is not None: if oid and jar._cache.get(oid): raise ValueError('Cannot delete _p_oid of cached object') _OSA(self, '_Persistent__oid', None) _p_oid = property(_get_oid, _set_oid, _del_oid) # _p_serial: see IPersistent. def _get_serial(self): serial = _OGA(self, '_Persistent__serial') if serial is not None: return serial return _INITIAL_SERIAL def _set_serial(self, value): if not isinstance(value, SERIAL_TYPE): raise ValueError('Invalid SERIAL type: %s' % value) if len(value) != 8: raise ValueError('SERIAL must be 8 octets') _OSA(self, '_Persistent__serial', value) def _del_serial(self): _OSA(self, '_Persistent__serial', None) _p_serial = property(_get_serial, _set_serial, _del_serial) # _p_changed: see IPersistent. def _get_changed(self): if _OGA(self, '_Persistent__jar') is None: return False flags = _OGA(self, '_Persistent__flags') if flags is None: # ghost return None return bool(flags & _CHANGED) def _set_changed(self, value): if _OGA(self, '_Persistent__flags') is None: if value: self._p_activate() self._p_set_changed_flag(value) else: if value is None: # -> ghost self._p_deactivate() else: self._p_set_changed_flag(value) def _del_changed(self): self._p_invalidate() _p_changed = property(_get_changed, _set_changed, _del_changed) # _p_mtime def _get_mtime(self): # The C implementation automatically unghostifies the object # when _p_mtime is accessed. self._p_activate() self._p_accessed() serial = _OGA(self, '_Persistent__serial') return TimeStamp(serial).timeTime() if serial is not None else None _p_mtime = property(_get_mtime) # _p_state def _get_state(self): # Note the use of OGA and caching to avoid recursive calls to __getattribute__: # __getattribute__ calls _p_accessed calls cache.mru() calls _p_state if _OGA(self, '_Persistent__jar') is None: return interfaces.UPTODATE flags = _OGA(self, '_Persistent__flags') if flags is None: return interfaces.GHOST if flags & _CHANGED: result = interfaces.CHANGED else: result = interfaces.UPTODATE if flags & _STICKY: return interfaces.STICKY return result _p_state = property(_get_state) # _p_estimated_size: XXX don't want to reserve the space? def _get_estimated_size(self): return _OGA(self, '_Persistent__size') * 64 def _set_estimated_size(self, value): if isinstance(value, int): if value < 0: raise ValueError('_p_estimated_size must not be negative') _OSA(self, '_Persistent__size', _estimated_size_in_24_bits(value)) else: raise TypeError("_p_estimated_size must be an integer") def _del_estimated_size(self): _OSA(self, '_Persistent__size', 0) _p_estimated_size = property( _get_estimated_size, _set_estimated_size, _del_estimated_size) # The '_p_sticky' property is not (yet) part of the API: for now, # it exists to simplify debugging and testing assertions. def _get_sticky(self): flags = _OGA(self, '_Persistent__flags') if flags is None: return False return bool(flags & _STICKY) def _set_sticky(self, value): flags = _OGA(self, '_Persistent__flags') if flags is None: raise ValueError('Ghost') if value: flags |= _STICKY else: flags &= ~_STICKY _OSA(self, '_Persistent__flags', flags) _p_sticky = property(_get_sticky, _set_sticky) # The '_p_status' property is not (yet) part of the API: for now, # it exists to simplify debugging and testing assertions. def _get_status(self): if _OGA(self, '_Persistent__jar') is None: return 'unsaved' flags = _OGA(self, '_Persistent__flags') if flags is None: return 'ghost' if flags & _STICKY: return 'sticky' if flags & _CHANGED: return 'changed' return 'saved' _p_status = property(_get_status) # Methods from IPersistent. def __getattribute__(self, name): """ See IPersistent. """ oga = _OGA if (not name.startswith('_p_') and name not in _SPECIAL_NAMES): if oga(self, '_Persistent__flags') is None: oga(self, '_p_activate')() oga(self, '_p_accessed')() return oga(self, name) def __setattr__(self, name, value): special_name = (name in _SPECIAL_NAMES or name.startswith('_p_')) volatile = name.startswith('_v_') if not special_name: if _OGA(self, '_Persistent__flags') is None: _OGA(self, '_p_activate')() if not volatile: _OGA(self, '_p_accessed')() _OSA(self, name, value) if (_OGA(self, '_Persistent__jar') is not None and _OGA(self, '_Persistent__oid') is not None and not special_name and not volatile): before = _OGA(self, '_Persistent__flags') after = before | _CHANGED if before != after: _OSA(self, '_Persistent__flags', after) _OGA(self, '_p_register')() def __delattr__(self, name): special_name = (name in _SPECIAL_NAMES or name.startswith('_p_')) if not special_name: if _OGA(self, '_Persistent__flags') is None: _OGA(self, '_p_activate')() _OGA(self, '_p_accessed')() before = _OGA(self, '_Persistent__flags') after = before | _CHANGED if before != after: _OSA(self, '_Persistent__flags', after) if (_OGA(self, '_Persistent__jar') is not None and _OGA(self, '_Persistent__oid') is not None): _OGA(self, '_p_register')() _ODA(self, name) def _slotnames(self, _v_exclude=True): slotnames = copy_reg._slotnames(type(self)) return [x for x in slotnames if not x.startswith('_p_') and not (x.startswith('_v_') and _v_exclude) and not x.startswith('_Persistent__') and x not in _SLOTS] def __getstate__(self): """ See IPersistent. """ idict = getattr(self, '__dict__', None) slotnames = self._slotnames() if idict is not None: # TODO: Convert to a dictionary comprehension, avoid the intermediate # list. # pylint:disable=consider-using-dict-comprehension d = dict([x for x in idict.items() if not x[0].startswith('_p_') and not x[0].startswith('_v_')]) else: d = None if slotnames: s = {} for slotname in slotnames: value = getattr(self, slotname, self) if value is not self: s[slotname] = value return d, s return d def __setstate__(self, state): """ See IPersistent. """ if isinstance(state, tuple): inst_dict, slots = state else: inst_dict, slots = state, () idict = getattr(self, '__dict__', None) if inst_dict is not None: if idict is None: raise TypeError('No instance dict') idict.clear() for k, v in inst_dict.items(): # Normally the keys for instance attributes are interned. # Do that here, but only if it is possible to do so. idict[intern(k) if type(k) is str else k] = v slotnames = self._slotnames() if slotnames: for k, v in slots.items(): setattr(self, k, v) def __reduce__(self): """ See IPersistent. """ gna = getattr(self, '__getnewargs__', lambda: ()) return (copy_reg.__newobj__, (type(self),) + gna(), self.__getstate__()) def _p_activate(self): """ See IPersistent. """ oga = _OGA before = oga(self, '_Persistent__flags') if before is None: # Only do this if we're a ghost # Begin by marking up-to-date in case we bail early _OSA(self, '_Persistent__flags', 0) jar = oga(self, '_Persistent__jar') if jar is None: return oid = oga(self, '_Persistent__oid') if oid is None: return # If we're actually going to execute a set-state, # mark as changed to prevent any recursive call # (actually, our earlier check that we're a ghost should # prevent this, but the C implementation sets it to changed # while calling jar.setstate, and this is observable to clients). # The main point of this is to prevent changes made during # setstate from registering the object with the jar. _OSA(self, '_Persistent__flags', interfaces.CHANGED) try: jar.setstate(self) except: _OSA(self, '_Persistent__flags', before) raise else: # If we succeed, no matter what the implementation # of setstate did, mark ourself as up-to-date. The # C implementation unconditionally does this. _OSA(self, '_Persistent__flags', 0) # up-to-date # In the C implementation, _p_invalidate winds up calling # _p_deactivate. There are ZODB tests that depend on this; # it's not documented but there may be code in the wild # that does as well def _p_deactivate(self): """ See IPersistent. """ flags = _OGA(self, '_Persistent__flags') if flags is not None and not flags: self._p_invalidate_deactivate_helper() def _p_invalidate(self): """ See IPersistent. """ # If we think we have changes, we must pretend # like we don't so that deactivate does its job _OSA(self, '_Persistent__flags', 0) self._p_deactivate() def _p_invalidate_deactivate_helper(self, clear=True): jar = _OGA(self, '_Persistent__jar') if jar is None: return if _OGA(self, '_Persistent__flags') is not None: _OSA(self, '_Persistent__flags', None) if clear: try: idict = _OGA(self, '__dict__') except AttributeError: pass else: idict.clear() type_ = type(self) # for backward-compatibility reason we release __slots__ only if # class does not override __new__ if type_.__new__ is Persistent.__new__: for slotname in Persistent._slotnames(self, _v_exclude=False): try: getattr(type_, slotname).__delete__(self) except AttributeError: # AttributeError means slot variable was not initialized at all - # - we can simply skip its deletion. pass # Implementation detail: deactivating/invalidating # updates the size of the cache (if we have one) # by telling it this object no longer takes any bytes # (-1 is a magic number to compensate for the implementation, # which always adds one to the size given) try: cache = jar._cache except AttributeError: pass else: cache.update_object_size_estimation(_OGA(self, '_Persistent__oid'), -1) # See notes in PickleCache.sweep for why we have to do this cache._persistent_deactivate_ran = True def _p_getattr(self, name): """ See IPersistent. """ if name.startswith('_p_') or name in _SPECIAL_NAMES: return True self._p_activate() self._p_accessed() return False def _p_setattr(self, name, value): """ See IPersistent. """ if name.startswith('_p_'): _OSA(self, name, value) return True self._p_activate() self._p_accessed() return False def _p_delattr(self, name): """ See IPersistent. """ if name.startswith('_p_'): if name == '_p_oid' and self._p_is_in_cache(_OGA(self, '_Persistent__jar')): # The C implementation forbids deleting the oid # if we're already in a cache. Match its error message raise ValueError('can not change _p_jar of cached object') _ODA(self, name) return True self._p_activate() self._p_accessed() return False # Helper methods: not APIs: we name them with '_p_' to bypass # the __getattribute__ bit which bumps the cache. def _p_register(self): jar = _OGA(self, '_Persistent__jar') if jar is not None and _OGA(self, '_Persistent__oid') is not None: jar.register(self) def _p_set_changed_flag(self, value): if value: before = _OGA(self, '_Persistent__flags') after = before | _CHANGED if before != after: self._p_register() _OSA(self, '_Persistent__flags', after) else: flags = _OGA(self, '_Persistent__flags') flags &= ~_CHANGED _OSA(self, '_Persistent__flags', flags) def _p_accessed(self): # Notify the jar's pickle cache that we have been accessed. # This relies on what has been (until now) an implementation # detail, the '_cache' attribute of the jar. We made it a # private API to avoid the cycle of keeping a reference to # the cache on the persistent object. # The below is the equivalent of this, but avoids # several recursive through __getattribute__, especially for _p_state, # and benchmarks much faster # # if(self.__jar is None or # self.__oid is None or # self._p_state < 0 ): return oga = _OGA jar = oga(self, '_Persistent__jar') if jar is None: return oid = oga(self, '_Persistent__oid') if oid is None: return flags = oga(self, '_Persistent__flags') if flags is None: # ghost return # The KeyError arises in ZODB: ZODB.serialize.ObjectWriter # can assign a jar and an oid to newly seen persistent objects, # but because they are newly created, they aren't in the # pickle cache yet. There doesn't seem to be a way to distinguish # that at this level, all we can do is catch it. # The AttributeError arises in ZODB test cases try: jar._cache.mru(oid) except (AttributeError, KeyError): pass def _p_is_in_cache(self, jar=None): oid = _OGA(self, '_Persistent__oid') if not oid: return False jar = jar or _OGA(self, '_Persistent__jar') cache = getattr(jar, '_cache', None) if cache is not None: return cache.get(oid) is self return None def __repr__(self): # pylint:disable=broad-except p_repr_str = '' p_repr = getattr(type(self), '_p_repr', None) if p_repr is not None: try: return p_repr(self) except Exception as e: p_repr_str = ' _p_repr %r' % (e,) oid = _OGA(self, '_Persistent__oid') jar = _OGA(self, '_Persistent__jar') oid_str = '' jar_str = '' if oid is not None: try: if isinstance(oid, bytes) and len(oid) == 8: oid_str = ' oid 0x%x' % (_OID_UNPACK(oid)[0],) else: oid_str = ' oid %r' % (oid,) except Exception as e: oid_str = ' oid %r' % (e,) if jar is not None: try: jar_str = ' in %r' % (jar,) except Exception as e: jar_str = ' in %r' % (e,) return '<%s.%s object at 0x%x%s%s%s>' % ( # Match the C name for this exact class type(self).__module__ if type(self) is not Persistent else 'persistent', type(self).__name__ if type(self) is not Persistent else 'Persistent', id(self), oid_str, jar_str, p_repr_str ) def _estimated_size_in_24_bits(value): if value > 1073741696: return 16777215 return (value//64) + 1 # This name is bound by the ``@use_c_impl`` decorator to the class defined above. # We make sure and list it statically, though, to help out linters. PersistentPy = PersistentPy # pylint:disable=undefined-variable,self-assigning-variable ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/picklecache.py0000644000076500000240000004242100000000000021463 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2009 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import gc from weakref import WeakValueDictionary from zope.interface import implementer from zope.interface import classImplements from persistent._compat import use_c_impl from persistent._compat import PYPY from persistent.interfaces import GHOST from persistent.interfaces import IPickleCache from persistent.interfaces import IExtendedPickleCache from persistent.interfaces import OID_TYPE from persistent.interfaces import UPTODATE from persistent.persistence import PersistentPy from persistent.persistence import _estimated_size_in_24_bits from persistent.ring import Ring __all__ = [ 'PickleCache', 'PickleCachePy', ] # We're tightly coupled to the PersistentPy implementation and access # its internals. # pylint:disable=protected-access _OGA = object.__getattribute__ _OSA = object.__setattr__ def _sweeping_ring(f): # A decorator for functions in the PickleCache # that are sweeping the entire ring (mutating it); # serves as a pseudo-lock to not mutate the ring further # in other functions def locked(self, *args, **kwargs): self._is_sweeping_ring = True try: return f(self, *args, **kwargs) finally: self._is_sweeping_ring = False return locked class _WeakValueDictionary(object): # Maps from OID -> Persistent object, but # only weakly references the Persistent object. This is similar # to ``weakref.WeakValueDictionary``, but is customized depending on the # platform. On PyPy, all objects can cheaply use a WeakRef, so that's # what we actually use. On CPython, though, ``PersistentPy`` cannot be weakly # referenced, so we rely on the fact that the ``id()`` of an object is its # memory location, and we use ``ctypes`` to cast that integer back to # the object. # # To remove stale addresses, we rely on the ``ffi.gc()`` object with the exact # same lifetime as the ``PersistentPy`` object. It calls us, we get the ``id`` # back out of the CData, and clean up. if PYPY: # pragma: no cover def __init__(self): self._data = WeakValueDictionary() def _from_addr(self, addr): return addr def _save_addr(self, oid, obj): return obj cleanup_hook = None else: def __init__(self): # careful not to require ctypes at import time; most likely the # C implementation is in use. import ctypes self._data = {} self._addr_to_oid = {} self._cast = ctypes.cast self._py_object = ctypes.py_object def _save_addr(self, oid, obj): i = id(obj) self._addr_to_oid[i] = oid return i def _from_addr(self, addr): return self._cast(addr, self._py_object).value def cleanup_hook(self, cdata): oid = self._addr_to_oid.pop(cdata.pobj_id, None) self._data.pop(oid, None) def __contains__(self, oid): return oid in self._data def __len__(self): return len(self._data) def __setitem__(self, key, value): addr = self._save_addr(key, value) self._data[key] = addr def pop(self, oid): return self._from_addr(self._data.pop(oid)) def items(self): from_addr = self._from_addr for oid, addr in self._data.items(): yield oid, from_addr(addr) def get(self, oid, default=None): addr = self._data.get(oid, self) if addr is self: return default return self._from_addr(addr) def __getitem__(self, oid): addr = self._data[oid] return self._from_addr(addr) @use_c_impl # We actually implement IExtendedPickleCache, but # the C version does not, and our interface declarations are # copied over by the decorator. So we make the declaration # of IExtendedPickleCache later. @implementer(IPickleCache) class PickleCache(object): # Tests may modify this to add additional types _CACHEABLE_TYPES = (type, PersistentPy) _SWEEPABLE_TYPES = (PersistentPy,) total_estimated_size = 0 cache_size_bytes = 0 # Set by functions that sweep the entire ring (via _sweeping_ring) # Serves as a pseudo-lock _is_sweeping_ring = False def __init__(self, jar, target_size=0, cache_size_bytes=0): # TODO: forward-port Dieter's bytes stuff self.jar = jar # We expect the jars to be able to have a pointer to # us; this is a reference cycle, but certain # aspects of invalidation and accessing depend on it. # The actual Connection objects we're used with do set this # automatically, but many test objects don't. # TODO: track this on the persistent objects themself? try: jar._cache = self except AttributeError: # Some ZODB tests pass in an object that cannot have an _cache pass self.cache_size = target_size self.drain_resistance = 0 self.non_ghost_count = 0 self.persistent_classes = {} self.data = _WeakValueDictionary() self.ring = Ring(self.data.cleanup_hook) self.cache_size_bytes = cache_size_bytes # IPickleCache API def __len__(self): """ See IPickleCache. """ return (len(self.persistent_classes) + len(self.data)) def __getitem__(self, oid): """ See IPickleCache. """ value = self.data.get(oid, self) if value is not self: return value return self.persistent_classes[oid] def __setitem__(self, oid, value): """ See IPickleCache. """ # The order of checks matters for C compatibility; # the ZODB tests depend on this # The C impl requires either a type or a Persistent subclass if not isinstance(value, self._CACHEABLE_TYPES): raise TypeError("Cache values must be persistent objects.") value_oid = value._p_oid if not isinstance(oid, OID_TYPE) or not isinstance(value_oid, OID_TYPE): raise TypeError('OID must be %s: key=%s _p_oid=%s' % (OID_TYPE, oid, value_oid)) if value_oid != oid: raise ValueError("Cache key does not match oid") if oid in self.persistent_classes or oid in self.data: # Have to be careful here, a GC might have just run # and cleaned up the object existing_data = self.get(oid) if existing_data is not None and existing_data is not value: # Raise the same type of exception as the C impl with the same # message. raise ValueError('A different object already has the same oid') # Match the C impl: it requires a jar. Let this raise AttributeError # if no jar is found. jar = value._p_jar if jar is None: raise ValueError("Cached object jar missing") # It also requires that it cannot be cached more than one place existing_cache = getattr(jar, '_cache', None) # type: PickleCache if (existing_cache is not None and existing_cache is not self and oid in existing_cache.data): raise ValueError("Cache values may only be in one cache.") if isinstance(value, type): # ZODB.persistentclass.PersistentMetaClass self.persistent_classes[oid] = value else: self.data[oid] = value if _OGA(value, '_p_state') != GHOST and value not in self.ring: self.ring.add(value) self.non_ghost_count += 1 elif self.data.cleanup_hook: # Ensure we begin monitoring for ``value`` to # be deallocated. self.ring.ring_node_for(value) def __delitem__(self, oid): """ See IPickleCache. """ if not isinstance(oid, OID_TYPE): raise TypeError('OID must be %s: %s' % (OID_TYPE, oid)) if oid in self.persistent_classes: del self.persistent_classes[oid] else: pobj = self.data.pop(oid) self.ring.delete(pobj) def get(self, oid, default=None): """ See IPickleCache. """ value = self.data.get(oid, self) if value is not self: return value return self.persistent_classes.get(oid, default) def mru(self, oid): """ See IPickleCache. """ if self._is_sweeping_ring: # accessess during sweeping, such as with an # overridden _p_deactivate, don't mutate the ring # because that could leave it inconsistent return False # marker return for tests value = self.data[oid] was_in_ring = value in self.ring if not was_in_ring: if _OGA(value, '_p_state') != GHOST: self.ring.add(value) self.non_ghost_count += 1 else: self.ring.move_to_head(value) return None def ringlen(self): """ See IPickleCache. """ return len(self.ring) def items(self): """ See IPickleCache. """ return self.data.items() def lru_items(self): """ See IPickleCache. """ return [ (obj._p_oid, obj) for obj in self.ring ] def klass_items(self): """ See IPickleCache. """ return self.persistent_classes.items() def incrgc(self, ignored=None): """ See IPickleCache. """ target = self.cache_size if self.drain_resistance >= 1: size = self.non_ghost_count target2 = size - 1 - (size // self.drain_resistance) if target2 < target: target = target2 # return value for testing return self._sweep(target, self.cache_size_bytes) def full_sweep(self, target=None): """ See IPickleCache. """ # return value for testing return self._sweep(0) minimize = full_sweep def new_ghost(self, oid, obj): """ See IPickleCache. """ if obj._p_oid is not None: raise ValueError('Object already has oid') if obj._p_jar is not None: raise ValueError('Object already has jar') if oid in self.persistent_classes or oid in self.data: raise KeyError('Duplicate OID: %s' % oid) obj._p_oid = oid obj._p_jar = self.jar if not isinstance(obj, type): if obj._p_state != GHOST: # The C implementation sets this stuff directly, # but we delegate to the class. However, we must be # careful to avoid broken _p_invalidate and _p_deactivate # that don't call the super class. See ZODB's # testConnection.doctest_proper_ghost_initialization_with_empty__p_deactivate obj._p_invalidate_deactivate_helper(False) self[oid] = obj def reify(self, to_reify): """ See IPickleCache. """ if isinstance(to_reify, OID_TYPE): #bytes to_reify = [to_reify] for oid in to_reify: value = self[oid] if value._p_state == GHOST: value._p_activate() self.non_ghost_count += 1 self.mru(oid) def invalidate(self, to_invalidate): """ See IPickleCache. """ if isinstance(to_invalidate, OID_TYPE): self._invalidate(to_invalidate) else: for oid in to_invalidate: self._invalidate(oid) def debug_info(self): result = [] for oid, klass in self.persistent_classes.items(): result.append(( oid, len(gc.get_referents(klass)), type(klass).__name__, klass._p_state, )) for oid, value in self.data.items(): result.append(( oid, len(gc.get_referents(value)), type(value).__name__, value._p_state, )) return result def update_object_size_estimation(self, oid, new_size): """ See IPickleCache. """ value = self.data.get(oid) if value is not None: # Recall that while the argument is given in bytes, # we have to work with 64-block chunks (plus one) # to match the C implementation. Hence the convoluted # arithmetic new_size_in_24 = _estimated_size_in_24_bits(new_size) p_est_size_in_24 = value._Persistent__size new_est_size_in_bytes = (new_size_in_24 - p_est_size_in_24) * 64 self.total_estimated_size += new_est_size_in_bytes cache_drain_resistance = property( lambda self: self.drain_resistance, lambda self, nv: setattr(self, 'drain_resistance', nv) ) cache_non_ghost_count = property(lambda self: self.non_ghost_count) cache_data = property(lambda self: dict(self.items())) cache_klass_count = property(lambda self: len(self.persistent_classes)) # Helpers # Set to true when a deactivation happens in our code. For # compatibility with the C implementation, we can only remove the # node and decrement our non-ghost count if our implementation # actually runs (broken subclasses can forget to call super; ZODB # has tests for this). This gets set to false everytime we examine # a node and checked afterwards. The C implementation has a very # incestuous relationship between cPickleCache and cPersistence: # the pickle cache calls _p_deactivate, which is responsible for # both decrementing the non-ghost count and removing its node from # the cache ring (and, if it gets deallocated, from the pickle # cache's dictionary). We're trying to keep that to a minimum, but # there's no way around it if we want full compatibility. _persistent_deactivate_ran = False @_sweeping_ring def _sweep(self, target, target_size_bytes=0): ejected = 0 ring = self.ring for node, value in ring.iteritems(): if ((target or target_size_bytes) # pylint:disable=too-many-boolean-expressions and (not target or self.non_ghost_count <= target) and (self.total_estimated_size <= target_size_bytes or not target_size_bytes)): break if value._p_state == UPTODATE: # The C implementation will only evict things that are specifically # in the up-to-date state self._persistent_deactivate_ran = False # sweeping an object out of the cache should also # ghost it---that's what C does. This winds up # calling `update_object_size_estimation`. # Also in C, if this was the last reference to the object, # it removes itself from the `data` dictionary. # If we're under PyPy or Jython, we need to run a GC collection # to make this happen...this is only noticeable though, when # we eject objects. Also, note that we can only take any of these # actions if our _p_deactivate ran, in case of buggy subclasses. # see _persistent_deactivate_ran value._p_deactivate() if (self._persistent_deactivate_ran # Test-cases sneak in non-Persistent objects, sigh, so naturally # they don't cooperate (without this check a bunch of test_picklecache # breaks) or not isinstance(value, self._SWEEPABLE_TYPES)): ring.delete_node(node) ejected += 1 self.non_ghost_count -= 1 return ejected @_sweeping_ring def _invalidate(self, oid): value = self.data.get(oid) if value is not None and value._p_state != GHOST: value._p_invalidate() self.ring.delete(value) self.non_ghost_count -= 1 elif oid in self.persistent_classes: persistent_class = self.persistent_classes.pop(oid) try: # ZODB.persistentclass.PersistentMetaClass objects # have this method and it must be called for transaction abort # and other forms of invalidation to work persistent_class._p_invalidate() except AttributeError: pass # This name is bound by the ``@use_c_impl`` decorator to the class defined above. # We make sure and list it statically, though, to help out linters. PickleCachePy = PickleCachePy # pylint:disable=undefined-variable,self-assigning-variable classImplements(PickleCachePy, IExtendedPickleCache) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/ring.c0000644000076500000240000000347500000000000017767 0ustar00jmaddenstaff00000000000000/***************************************************************************** Copyright (c) 2003 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ #define RING_C "$Id$\n" /* Support routines for the doubly-linked list of cached objects. The cache stores a doubly-linked list of persistent objects, with space for the pointers allocated in the objects themselves. The cache stores the distinguished head of the list, which is not a valid persistent object. The next pointers traverse the ring in order starting with the least recently used object. The prev pointers traverse the ring in order starting with the most recently used object. */ #include "Python.h" #include "ring.h" void ring_add(CPersistentRing *ring, CPersistentRing *elt) { assert(!elt->r_next); elt->r_next = ring; elt->r_prev = ring->r_prev; ring->r_prev->r_next = elt; ring->r_prev = elt; } void ring_del(CPersistentRing *elt) { assert(elt->r_next); elt->r_next->r_prev = elt->r_prev; elt->r_prev->r_next = elt->r_next; elt->r_next = NULL; elt->r_prev = NULL; } void ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt) { assert(elt->r_next); elt->r_prev->r_next = elt->r_next; elt->r_next->r_prev = elt->r_prev; elt->r_next = ring; elt->r_prev = ring->r_prev; ring->r_prev->r_next = elt; ring->r_prev = elt; } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/ring.h0000644000076500000240000000512000000000000017761 0ustar00jmaddenstaff00000000000000/***************************************************************************** Copyright (c) 2003 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ /* Support routines for the doubly-linked list of cached objects. The cache stores a headed, doubly-linked, circular list of persistent objects, with space for the pointers allocated in the objects themselves. The cache stores the distinguished head of the list, which is not a valid persistent object. The other list members are non-ghost persistent objects, linked in LRU (least-recently used) order. The r_next pointers traverse the ring starting with the least recently used object. The r_prev pointers traverse the ring starting with the most recently used object. Obscure: While each object is pointed at twice by list pointers (once by its predecessor's r_next, again by its successor's r_prev), the refcount on the object is bumped only by 1. This leads to some possibly surprising sequences of incref and decref code. Note that since the refcount is bumped at least once, the list does hold a strong reference to each object in it. */ typedef struct CPersistentRing_struct { struct CPersistentRing_struct *r_prev; struct CPersistentRing_struct *r_next; } CPersistentRing; /* The list operations here take constant time independent of the * number of objects in the list: */ /* Add elt as the most recently used object. elt must not already be * in the list, although this isn't checked. */ void ring_add(CPersistentRing *ring, CPersistentRing *elt); /* Remove elt from the list. elt must already be in the list, although * this isn't checked. */ void ring_del(CPersistentRing *elt); /* elt must already be in the list, although this isn't checked. It's * unlinked from its current position, and relinked into the list as the * most recently used object (which is arguably the tail of the list * instead of the head -- but the name of this function could be argued * either way). This is equivalent to * * ring_del(elt); * ring_add(ring, elt); * * but may be a little quicker. */ void ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt); ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/ring.py0000644000076500000240000001502100000000000020163 0ustar00jmaddenstaff00000000000000# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2015 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## # pylint:disable=inherit-non-class,no-self-argument,redefined-builtin,c-extension-no-member # pylint:disable=protected-access from zope.interface import Interface from zope.interface import implementer from persistent import _ring class IRing(Interface): """Conceptually, a doubly-linked list for efficiently keeping track of least- and most-recently used :class:`persistent.interfaces.IPersistent` objects. This is meant to be used by the :class:`persistent.picklecache.PickleCache` and should not be considered a public API. This interface documentation exists to assist development of the picklecache and alternate implementations by explaining assumptions and performance requirements. """ def __len__(): # pylint:disable=no-method-argument """Return the number of persistent objects stored in the ring. Should be constant time. """ def __contains__(object): # pylint:disable=unexpected-special-method-signature """Answer whether the given persistent object is found in the ring. Must not rely on object equality or object hashing, but only identity or the `_p_oid`. Should be constant time. """ def add(object): """Add the persistent object to the ring as most-recently used. When an object is in the ring, the ring holds a strong reference to it so it can be deactivated later by the pickle cache. Should be constant time. The object should not already be in the ring, but this is not necessarily enforced. """ def delete(object): """Remove the object from the ring if it is present. Returns a true value if it was present and a false value otherwise. An ideal implementation should be constant time, but linear time is allowed. """ def move_to_head(object): """Place the object as the most recently used object in the ring. The object should already be in the ring, but this is not necessarily enforced, and attempting to move an object that is not in the ring has undefined consequences. An ideal implementation should be constant time, but linear time is allowed. """ def __iter__(): # pylint:disable=no-method-argument """Iterate over each persistent object in the ring, in the order of least recently used to most recently used. Mutating the ring while an iteration is in progress has undefined consequences. """ ffi = _ring.ffi _FFI_RING = _ring.lib _OGA = object.__getattribute__ _OSA = object.__setattr__ _handles = set() @implementer(IRing) class _CFFIRing(object): """A ring backed by a C implementation. All operations are constant time. It is only available on platforms with ``cffi`` installed. """ __slots__ = ('ring_home', 'ring_to_obj', 'cleanup_func') def __init__(self, cleanup_func=None): node = self.ring_home = ffi.new("CPersistentRing*") node.r_next = node node.r_prev = node self.cleanup_func = cleanup_func # The Persistent objects themselves are responsible for keeping # the CFFI nodes alive, but we need to be able to detect whether # or not any given object is in our ring, plus know how many there are. # In addition, once an object enters the ring, it must be kept alive # so that it can be deactivated. # Note that because this is a strong reference to the # persistent object, its cleanup function --- triggered by the ``ffi.gc`` # object it owns --- will never be fired while it is in this dict. self.ring_to_obj = {} def ring_node_for(self, persistent_object, create=True): ring_data = _OGA(persistent_object, '_Persistent__ring') if ring_data is None: if not create: return None if self.cleanup_func: node = ffi.new('CPersistentRingCFFI*') node.pobj_id = ffi.cast('uintptr_t', id(persistent_object)) gc_ptr = ffi.gc(node, self.cleanup_func) else: node = ffi.new("CPersistentRing*") gc_ptr = None ring_data = ( node, gc_ptr, ) _OSA(persistent_object, '_Persistent__ring', ring_data) return ring_data[0] def __len__(self): return len(self.ring_to_obj) def __contains__(self, pobj): node = self.ring_node_for(pobj, False) return node and node in self.ring_to_obj def add(self, pobj): node = self.ring_node_for(pobj) _FFI_RING.cffi_ring_add(self.ring_home, node) self.ring_to_obj[node] = pobj def delete(self, pobj): its_node = self.ring_node_for(pobj, False) our_obj = self.ring_to_obj.pop(its_node, self) if its_node is not None and our_obj is not self and its_node.r_next: _FFI_RING.cffi_ring_del(its_node) return 1 return None def delete_node(self, node): # Minimal sanity checking, assumes we're called from iter. self.ring_to_obj.pop(node) _FFI_RING.cffi_ring_del(node) def move_to_head(self, pobj): node = self.ring_node_for(pobj, False) _FFI_RING.cffi_ring_move_to_head(self.ring_home, node) def iteritems(self): head = self.ring_home here = head.r_next ring_to_obj = self.ring_to_obj while here != head: # We allow mutation during iteration, which means # we must get the next ``here`` value before # yielding, just in case the current value is # removed. current = here here = here.r_next pobj = ring_to_obj[current] yield current, pobj def __iter__(self): for _, v in self.iteritems(): yield v # Export the best available implementation Ring = _CFFIRing ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1585243094.9939716 persistent-4.6.4/persistent/tests/0000755000076500000240000000000000000000000020015 5ustar00jmaddenstaff00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/__init__.py0000644000076500000240000000001200000000000022117 0ustar00jmaddenstaff00000000000000# package ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/attrhooks.py0000644000076500000240000001016600000000000022411 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2004 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Overriding attr methods Examples for overriding attribute access methods. """ from persistent import Persistent def _resettingJar(): from persistent.tests.utils import ResettingJar return ResettingJar() def _rememberingJar(): from persistent.tests.utils import RememberingJar return RememberingJar() class OverridesGetattr(Persistent): """Example of overriding __getattr__ """ def __getattr__(self, name): """Get attributes that can't be gotten the usual way """ # Don't pretend we have any special attributes. if name.startswith("__") and name.endswrith("__"): raise AttributeError(name) # pragma: no cover return name.upper(), self._p_changed class VeryPrivate(Persistent): """Example of overriding __getattribute__, __setattr__, and __delattr__ """ def __init__(self, **kw): self.__dict__['__secret__'] = kw.copy() def __getattribute__(self, name): """Get an attribute value See the very important note in the comment below! """ ################################################################# # IMPORTANT! READ THIS! 8-> # # We *always* give Persistent a chance first. # Persistent handles certain special attributes, like _p_ # attributes. In particular, the base class handles __dict__ # and __class__. # # We call _p_getattr. If it returns True, then we have to # use Persistent.__getattribute__ to get the value. # ################################################################# if Persistent._p_getattr(self, name): return Persistent.__getattribute__(self, name) # Data should be in our secret dictionary: secret = self.__dict__['__secret__'] if name in secret: return secret[name] # Maybe it's a method: meth = getattr(self.__class__, name, None) if meth is None: raise AttributeError(name) return meth.__get__(self, self.__class__) def __setattr__(self, name, value): """Set an attribute value """ ################################################################# # IMPORTANT! READ THIS! 8-> # # We *always* give Persistent a chance first. # Persistent handles certain special attributes, like _p_ # attributes. # # We call _p_setattr. If it returns True, then we are done. # It has already set the attribute. # ################################################################# if Persistent._p_setattr(self, name, value): return self.__dict__['__secret__'][name] = value if not name.startswith('tmp_'): self._p_changed = 1 def __delattr__(self, name): """Delete an attribute value """ ################################################################# # IMPORTANT! READ THIS! 8-> # # We *always* give Persistent a chance first. # Persistent handles certain special attributes, like _p_ # attributes. # # We call _p_delattr. If it returns True, then we are done. # It has already deleted the attribute. # ################################################################# if Persistent._p_delattr(self, name): return del self.__dict__['__secret__'][name] if not name.startswith('tmp_'): self._p_changed = 1 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/cucumbers.py0000644000076500000240000000534600000000000022367 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2003 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## # Example objects for pickling. from persistent import Persistent def print_dict(d): d = sorted(d.items()) print('{%s}' % (', '.join( [('%r: %r' % (k, v)) for (k, v) in d] ))) def cmpattrs(self, other, *attrs): result = 0 for attr in attrs: if attr[:3] in ('_v_', '_p_'): raise AssertionError("_v_ and _p_ attrs not allowed") lhs = getattr(self, attr, None) rhs = getattr(other, attr, None) result += lhs != rhs return result class Simple(Persistent): def __init__(self, name, **kw): self.__name__ = name self.__dict__.update(kw) self._v_favorite_color = 'blue' self._p_foo = 'bar' @property def _attrs(self): return list(self.__dict__.keys()) def __eq__(self, other): return cmpattrs(self, other, '__class__', *self._attrs) == 0 class Custom(Simple): def __new__(cls, x, y): r = Persistent.__new__(cls) r.x, r.y = x, y return r def __init__(self, x, y): self.a = 42 def __getnewargs__(self): return self.x, self.y def __getstate__(self): return self.a def __setstate__(self, a): self.a = a class Slotted(Persistent): __slots__ = 's1', 's2', '_p_splat', '_v_eek' def __init__(self, s1, s2): self.s1, self.s2 = s1, s2 self._v_eek = 1 self._p_splat = 2 @property def _attrs(self): raise NotImplementedError() def __eq__(self, other): return cmpattrs(self, other, '__class__', *self._attrs) == 0 class SubSlotted(Slotted): __slots__ = 's3', 's4' def __init__(self, s1, s2, s3): Slotted.__init__(self, s1, s2) self.s3 = s3 @property def _attrs(self): return ('s1', 's2', 's3', 's4') class SubSubSlotted(SubSlotted): def __init__(self, s1, s2, s3, **kw): SubSlotted.__init__(self, s1, s2, s3) self.__dict__.update(kw) self._v_favorite_color = 'blue' self._p_foo = 'bar' @property def _attrs(self): return ['s1', 's2', 's3', 's4'] + list(self.__dict__.keys()) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test__compat.py0000644000076500000240000000656700000000000023066 0ustar00jmaddenstaff00000000000000# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """ Tests for ``persistent._compat`` """ import unittest import os from persistent import _compat as compat class TestCOptimizationsFuncs(unittest.TestCase): # pylint:disable=protected-access def setUp(self): self.env_val = os.environ.get('PURE_PYTHON', self) self.orig_pypy = compat.PYPY compat.PYPY = False def tearDown(self): compat.PYPY = self.orig_pypy if self.env_val is not self: # Reset to what it was to begin with. os.environ['PURE_PYTHON'] = self.env_val else: # pragma: no cover # It wasn't present before, make sure it's not present now. os.environ.pop('PURE_PYTHON', None) self.env_val = None def _set_env(self, val): if val is not None: os.environ['PURE_PYTHON'] = val else: os.environ.pop('PURE_PYTHON', None) def test_ignored_no_env_val(self): self._set_env(None) self.assertFalse(compat._c_optimizations_ignored()) def test_ignored_zero(self): self._set_env('0') self.assertFalse(compat._c_optimizations_ignored()) def test_ignored_empty(self): self._set_env('') self.assertFalse(compat._c_optimizations_ignored()) def test_ignored_other_values(self): for val in "1", "yes", "hi": self._set_env(val) self.assertTrue(compat._c_optimizations_ignored()) def test_ignored_pypy(self): # No matter what the environment variable is, PyPy always ignores compat.PYPY = True for val in None, "", "0", "1", "yes": __traceback_info__ = val self._set_env(val) self.assertTrue(compat._c_optimizations_ignored()) def test_required(self): for val, expected in ( ('', False), ('0', True), ('1', False), ('Yes', False) ): self._set_env(val) self.assertEqual(expected, compat._c_optimizations_required()) def test_should_attempt(self): for val, expected in ( (None, True), ('', True), ('0', True), ('1', False), ('Yes', False) ): self._set_env(val) self.assertEqual(expected, compat._should_attempt_c_optimizations()) def test_should_attempt_pypy(self): compat.PYPY = True for val, expected in ( (None, False), ('', False), ('0', True), ('1', False), ('Yes', False) ): __traceback_info__ = val self._set_env(val) self.assertEqual(expected, compat._should_attempt_c_optimizations()) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test_docs.py0000644000076500000240000000372000000000000022360 0ustar00jmaddenstaff00000000000000# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """ Tests for the documentation. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # disable: accessing protected members, too many methods # pylint: disable=W0212,R0904 import os.path import unittest import doctest import manuel.capture import manuel.codeblock import manuel.doctest import manuel.ignore import manuel.testing def test_suite(): here = os.path.dirname(os.path.abspath(__file__)) while not os.path.exists(os.path.join(here, 'setup.py')): prev, here = here, os.path.dirname(here) if here == prev: # Let's avoid infinite loops at root raise AssertionError('could not find my setup.py') docs = os.path.join(here, 'docs', 'api') files_to_test = ( 'cache.rst', 'attributes.rst', 'pickling.rst', ) paths = [os.path.join(docs, f) for f in files_to_test] m = manuel.ignore.Manuel() m += manuel.doctest.Manuel(optionflags=( doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS | doctest.IGNORE_EXCEPTION_DETAIL )) m += manuel.codeblock.Manuel() m += manuel.capture.Manuel() suite = unittest.TestSuite() suite.addTest( manuel.testing.TestSuite( m, *paths ) ) return suite ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test_list.py0000644000076500000240000002511000000000000022400 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Tests for PersistentList """ import unittest from persistent.tests.utils import TrivialJar from persistent.tests.utils import copy_test l0 = [] l1 = [0] l2 = [0, 1] # pylint:disable=protected-access class OtherList: def __init__(self, initlist): self.__data = initlist def __len__(self): return len(self.__data) def __getitem__(self, i): return self.__data[i] class TestPList(unittest.TestCase): def _getTargetClass(self): from persistent.list import PersistentList return PersistentList def _makeJar(self): return TrivialJar() def _makeOne(self, *args): inst = self._getTargetClass()(*args) inst._p_jar = self._makeJar() return inst def test_volatile_attributes_not_persisted(self): # http://www.zope.org/Collectors/Zope/2052 m = self._getTargetClass()() m.foo = 'bar' m._v_baz = 'qux' state = m.__getstate__() self.assertTrue('foo' in state) self.assertFalse('_v_baz' in state) def testTheWorld(self): from persistent._compat import PYTHON2 # Test constructors pl = self._getTargetClass() u = pl() u0 = pl(l0) u1 = pl(l1) u2 = pl(l2) uu = pl(u) uu0 = pl(u0) uu1 = pl(u1) uu2 = pl(u2) pl(tuple(u)) pl(OtherList(u0)) pl("this is also a sequence") # Test __repr__ eq = self.assertEqual eq(str(u0), str(l0), "str(u0) == str(l0)") eq(repr(u1), repr(l1), "repr(u1) == repr(l1)") # Test __cmp__ and __len__ try: cmp except NameError: def cmp(a, b): if a == b: return 0 if a < b: return -1 return 1 def mycmp(a, b): r = cmp(a, b) if r < 0: return -1 if r > 0: return 1 return r to_test = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2] for a in to_test: for b in to_test: eq(mycmp(a, b), mycmp(len(a), len(b)), "mycmp(a, b) == mycmp(len(a), len(b))") # Test __getitem__ for i, val in enumerate(u2): eq(val, i, "u2[i] == i") # Test __setitem__ uu2[0] = 0 uu2[1] = 100 with self.assertRaises(IndexError): uu2[2] = 200 # Test __delitem__ del uu2[1] del uu2[0] with self.assertRaises(IndexError): del uu2[0] # Test __getslice__ for i in range(-3, 4): eq(u2[:i], l2[:i], "u2[:i] == l2[:i]") eq(u2[i:], l2[i:], "u2[i:] == l2[i:]") for j in range(-3, 4): eq(u2[i:j], l2[i:j], "u2[i:j] == l2[i:j]") # Test __setslice__ for i in range(-3, 4): u2[:i] = l2[:i] eq(u2, l2, "u2 == l2") u2[i:] = l2[i:] eq(u2, l2, "u2 == l2") for j in range(-3, 4): u2[i:j] = l2[i:j] eq(u2, l2, "u2 == l2") uu2 = u2[:] uu2[:0] = [-2, -1] eq(uu2, [-2, -1, 0, 1], "uu2 == [-2, -1, 0, 1]") uu2[0:] = [] eq(uu2, [], "uu2 == []") # Test __contains__ for i in u2: self.assertTrue(i in u2, "i in u2") for i in min(u2)-1, max(u2)+1: self.assertTrue(i not in u2, "i not in u2") # Test __delslice__ uu2 = u2[:] del uu2[1:2] del uu2[0:1] eq(uu2, [], "uu2 == []") uu2 = u2[:] del uu2[1:] del uu2[:1] eq(uu2, [], "uu2 == []") # Test __add__, __radd__, __mul__ and __rmul__ #self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1") self.assertTrue(u1 + [1] == u2, "u1 + [1] == u2") #self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]") self.assertTrue(u2 == u2*1 == 1*u2, "u2 == u2*1 == 1*u2") self.assertTrue(u2+u2 == u2*2 == 2*u2, "u2+u2 == u2*2 == 2*u2") self.assertTrue(u2+u2+u2 == u2*3 == 3*u2, "u2+u2+u2 == u2*3 == 3*u2") # Test append u = u1[:] u.append(1) eq(u, u2, "u == u2") # Test insert u = u2[:] u.insert(0, -1) eq(u, [-1, 0, 1], "u == [-1, 0, 1]") # Test pop u = pl([0, -1, 1]) u.pop() eq(u, [0, -1], "u == [0, -1]") u.pop(0) eq(u, [-1], "u == [-1]") # Test remove u = u2[:] u.remove(1) eq(u, u1, "u == u1") # Test count u = u2*3 eq(u.count(0), 3, "u.count(0) == 3") eq(u.count(1), 3, "u.count(1) == 3") eq(u.count(2), 0, "u.count(2) == 0") # Test index eq(u2.index(0), 0, "u2.index(0) == 0") eq(u2.index(1), 1, "u2.index(1) == 1") with self.assertRaises(ValueError): u2.index(2) # Test reverse u = u2[:] u.reverse() eq(u, [1, 0], "u == [1, 0]") u.reverse() eq(u, u2, "u == u2") # Test sort u = pl([1, 0]) u.sort() eq(u, u2, "u == u2") # Test keyword arguments to sort if PYTHON2: # pragma: no cover u.sort(cmp=lambda x, y: cmp(y, x)) eq(u, [1, 0], "u == [1, 0]") u.sort(key=lambda x: -x) eq(u, [1, 0], "u == [1, 0]") u.sort(reverse=True) eq(u, [1, 0], "u == [1, 0]") # Passing any other keyword arguments results in a TypeError with self.assertRaises(TypeError): u.sort(blah=True) # Test extend u = u1[:] u.extend(u2) eq(u, u1 + u2, "u == u1 + u2") # Test iadd u = u1[:] u += u2 eq(u, u1 + u2, "u == u1 + u2") # Test imul u = u1[:] u *= 3 eq(u, u1 + u1 + u1, "u == u1 + u1 + u1") def test_setslice(self): inst = self._makeOne() self.assertFalse(inst._p_changed) inst[:] = [1, 2, 3] self.assertEqual(inst, [1, 2, 3]) self.assertTrue(inst._p_changed) def test_delslice_all_nonempty_list(self): # Delete everything from a non-empty list inst = self._makeOne([1, 2, 3]) self.assertFalse(inst._p_changed) self.assertEqual(inst, [1, 2, 3]) del inst[:] self.assertEqual(inst, []) self.assertTrue(inst._p_changed) def test_delslice_sub_nonempty_list(self): # delete a sub-list from a non-empty list inst = self._makeOne([0, 1, 2, 3]) self.assertFalse(inst._p_changed) del inst[1:2] self.assertEqual(inst, [0, 2, 3]) self.assertTrue(inst._p_changed) def test_delslice_empty_nonempty_list(self): # delete an empty sub-list from a non-empty list inst = self._makeOne([0, 1, 2, 3]) self.assertFalse(inst._p_changed) del inst[1:1] self.assertEqual(inst, [0, 1, 2, 3]) self.assertFalse(inst._p_changed) def test_delslice_all_empty_list(self): inst = self._makeOne([]) self.assertFalse(inst._p_changed) self.assertEqual(inst, []) del inst[:] self.assertEqual(inst, []) self.assertFalse(inst._p_changed) def test_iadd(self): inst = self._makeOne() self.assertFalse(inst._p_changed) inst += [1, 2, 3] self.assertEqual(inst, [1, 2, 3]) self.assertTrue(inst._p_changed) def test_extend(self): inst = self._makeOne() self.assertFalse(inst._p_changed) inst.extend([1, 2, 3]) self.assertEqual(inst, [1, 2, 3]) self.assertTrue(inst._p_changed) def test_imul(self): inst = self._makeOne([1]) self.assertFalse(inst._p_changed) inst *= 2 self.assertEqual(inst, [1, 1]) self.assertTrue(inst._p_changed) def test_append(self): inst = self._makeOne() self.assertFalse(inst._p_changed) inst.append(1) self.assertEqual(inst, [1]) self.assertTrue(inst._p_changed) def test_clear_nonempty(self): inst = self._makeOne([1, 2, 3, 4]) self.assertFalse(inst._p_changed) inst.clear() self.assertEqual(inst, []) self.assertTrue(inst._p_changed) def test_clear_empty(self): inst = self._makeOne([]) self.assertFalse(inst._p_changed) inst.clear() self.assertEqual(inst, []) self.assertFalse(inst._p_changed) def test_insert(self): inst = self._makeOne() self.assertFalse(inst._p_changed) inst.insert(0, 1) self.assertEqual(inst, [1]) self.assertTrue(inst._p_changed) def test_remove(self): inst = self._makeOne([1]) self.assertFalse(inst._p_changed) inst.remove(1) self.assertEqual(inst, []) self.assertTrue(inst._p_changed) def test_reverse(self): inst = self._makeOne([2, 1]) self.assertFalse(inst._p_changed) inst.reverse() self.assertEqual(inst, [1, 2]) self.assertTrue(inst._p_changed) def test_getslice_same_class(self): class MyList(self._getTargetClass()): pass inst = MyList() inst._p_jar = self._makeJar() # Entire thing, empty. inst2 = inst[:] self.assertIsNot(inst, inst2) self.assertEqual(inst, inst2) self.assertIsInstance(inst2, MyList) # The _p_jar is *not* propagated. self.assertIsNotNone(inst._p_jar) self.assertIsNone(inst2._p_jar) # Partial inst.extend((1, 2, 3)) inst2 = inst[1:2] self.assertEqual(inst2, [2]) self.assertIsInstance(inst2, MyList) self.assertIsNone(inst2._p_jar) def test_copy(self): inst = self._makeOne() inst.append(42) copy_test(self, inst) def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test_mapping.py0000644000076500000240000002356700000000000023076 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest from persistent.tests.utils import TrivialJar from persistent.tests.utils import copy_test # pylint:disable=blacklisted-name, protected-access class Test_default(unittest.TestCase): def _getTargetClass(self): from persistent.mapping import default return default def _makeOne(self, func): return self._getTargetClass()(func) def test___get___from_class(self): def _test(inst): raise AssertionError("Must not be caled") descr = self._makeOne(_test) class Foo(object): testing = descr self.assertIs(Foo.testing, descr) def test___get___from_instance(self): _called_with = [] def _test(inst): _called_with.append(inst) return 'TESTING' descr = self._makeOne(_test) class Foo(object): testing = descr foo = Foo() self.assertEqual(foo.testing, 'TESTING') self.assertEqual(_called_with, [foo]) class PersistentMappingTests(unittest.TestCase): def _getTargetClass(self): from persistent.mapping import PersistentMapping return PersistentMapping def _makeJar(self): return TrivialJar() def _makeOne(self, *args, **kwargs): inst = self._getTargetClass()(*args, **kwargs) inst._p_jar = self._makeJar() return inst def test_volatile_attributes_not_persisted(self): # http://www.zope.org/Collectors/Zope/2052 m = self._makeOne() m.foo = 'bar' m._v_baz = 'qux' state = m.__getstate__() self.assertTrue('foo' in state) self.assertFalse('_v_baz' in state) def testTheWorld(self): # Test constructors l0 = {} l1 = {0:0} l2 = {0:0, 1:1} u = self._makeOne() u0 = self._makeOne(l0) u1 = self._makeOne(l1) u2 = self._makeOne(l2) uu = self._makeOne(u) uu0 = self._makeOne(u0) uu1 = self._makeOne(u1) uu2 = self._makeOne(u2) class OtherMapping(dict): def __init__(self, initmapping): dict.__init__(self) self.__data = initmapping def items(self): raise AssertionError("Not called") self._makeOne(OtherMapping(u0)) self._makeOne([(0, 0), (1, 1)]) # Test __repr__ eq = self.assertEqual eq(str(u0), str(l0), "str(u0) == str(l0)") eq(repr(u1), repr(l1), "repr(u1) == repr(l1)") # Test __cmp__ and __len__ try: cmp except NameError: def cmp(a, b): if a == b: return 0 if hasattr(a, 'items'): a = sorted(a.items()) b = sorted(b.items()) if a < b: return -1 return 1 def mycmp(a, b): r = cmp(a, b) if r < 0: return -1 if r > 0: return 1 return r to_test = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2] for a in to_test: for b in to_test: eq(mycmp(a, b), mycmp(len(a), len(b)), "mycmp(a, b) == mycmp(len(a), len(b))") # Test __getitem__ for i, val in enumerate(u2): eq(val, i, "u2[i] == i") # Test get for i in range(len(u2)): eq(u2.get(i), i, "u2.get(i) == i") eq(u2.get(i, 5), i, "u2.get(i, 5) == i") for i in min(u2)-1, max(u2)+1: eq(u2.get(i), None, "u2.get(i) == None") eq(u2.get(i, 5), 5, "u2.get(i, 5) == 5") # Test __setitem__ uu2[0] = 0 uu2[1] = 100 uu2[2] = 200 # Test __delitem__ del uu2[1] del uu2[0] with self.assertRaises(KeyError): del uu2[0] # Test __contains__ for i in u2: self.assertTrue(i in u2, "i in u2") for i in min(u2)-1, max(u2)+1: self.assertTrue(i not in u2, "i not in u2") # Test update l = {"a":"b"} u = self._makeOne(l) u.update(u2) for i in u: self.assertTrue(i in l or i in u2, "i in l or i in u2") for i in l: self.assertTrue(i in u, "i in u") for i in u2: self.assertTrue(i in u, "i in u") # Test setdefault x = u2.setdefault(0, 5) eq(x, 0, "u2.setdefault(0, 5) == 0") x = u2.setdefault(5, 5) eq(x, 5, "u2.setdefault(5, 5) == 5") self.assertTrue(5 in u2, "5 in u2") # Test pop x = u2.pop(1) eq(x, 1, "u2.pop(1) == 1") self.assertTrue(1 not in u2, "1 not in u2") with self.assertRaises(KeyError): u2.pop(1) x = u2.pop(1, 7) eq(x, 7, "u2.pop(1, 7) == 7") # Test popitem items = list(u2.items()) key, value = u2.popitem() self.assertTrue((key, value) in items, "key, value in items") self.assertTrue(key not in u2, "key not in u2") # Test clear u2.clear() eq(u2, {}, "u2 == {}") def test___repr___converts_legacy_container_attr(self): # In the past, PM used a _container attribute. For some time, the # implementation continued to use a _container attribute in pickles # (__get/setstate__) to be compatible with older releases. This isn't # really necessary any more. In fact, releases for which this might # matter can no longer share databases with current releases. Because # releases as recent as 3.9.0b5 still use _container in saved state, we # need to accept such state, but we stop producing it. pm = self._makeOne() self.assertEqual(pm.__dict__, {'data': {}}) # Make it look like an older instance pm.__dict__.clear() pm.__dict__['_container'] = {'a': 1} self.assertEqual(pm.__dict__, {'_container': {'a': 1}}) pm._p_changed = 0 self.assertEqual(repr(pm), "{'a': 1}") self.assertEqual(pm.__dict__, {'data': {'a': 1}}) self.assertEqual(pm.__getstate__(), {'data': {'a': 1}}) def test_update_keywords(self): # Prior to https://github.com/zopefoundation/persistent/issues/126, # PersistentMapping didn't accept keyword arguments to update as # the builtin dict and the UserDict do. # Here we make sure it does. We use some names that have been # seen to be special in signatures as well to make sure that # we don't interpret them incorrectly. pm = self._makeOne() # Our older implementation was ``def update(self, b)``, so ``b`` # is potentially a keyword argument in the wild; the behaviour in that # corner case has changed. pm.update(b={'a': 42}) self.assertEqual(pm, {'b': {'a': 42}}) pm = self._makeOne() # Our previous implementation would explode with a TypeError pm.update(b=42) self.assertEqual(pm, {'b': 42}) pm = self._makeOne() # ``other`` shows up in a Python 3 signature. pm.update(other=42) self.assertEqual(pm, {'other': 42}) pm = self._makeOne() pm.update(other={'a': 42}) self.assertEqual(pm, {'other': {'a': 42}}) pm = self._makeOne() pm.update(a=1, b=2) self.assertEqual(pm, {'a': 1, 'b': 2}) def test_clear_nonempty(self): pm = self._makeOne({'a': 42}) self.assertFalse(pm._p_changed) pm.clear() self.assertTrue(pm._p_changed) def test_clear_empty(self): pm = self._makeOne() self.assertFalse(pm._p_changed) pm.clear() self.assertFalse(pm._p_changed) def test_clear_no_jar(self): # https://github.com/zopefoundation/persistent/issues/139 self._makeOne = self._getTargetClass() self.test_clear_empty() pm = self._makeOne(a=42) pm.clear() self.assertFalse(pm._p_changed) def test_clear_empty_legacy_container(self): pm = self._makeOne() pm.__dict__['_container'] = pm.__dict__.pop('data') self.assertFalse(pm._p_changed) pm.clear() # Migration happened self.assertIn('data', pm.__dict__) # and we are marked as changed. self.assertTrue(pm._p_changed) def test_copy(self): pm = self._makeOne() pm['key'] = 42 copy = copy_test(self, pm) self.assertEqual(42, copy['key']) def test_copy_legacy_container(self): pm = self._makeOne() pm['key'] = 42 pm.__dict__['_container'] = pm.__dict__.pop('data') self.assertNotIn('data', pm.__dict__) self.assertIn('_container', pm.__dict__) copy = copy_test(self, pm) self.assertNotIn('_container', copy.__dict__) self.assertIn('data', copy.__dict__) self.assertEqual(42, copy['key']) class Test_legacy_PersistentDict(unittest.TestCase): def _getTargetClass(self): from persistent.dict import PersistentDict return PersistentDict def test_PD_is_alias_to_PM(self): from persistent.mapping import PersistentMapping self.assertIs(self._getTargetClass(), PersistentMapping) def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test_persistence.py0000644000076500000240000022222100000000000023753 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2011 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import re import unittest from persistent._compat import copy_reg from persistent._compat import PYPY from persistent._compat import PYTHON3 as PY3 from persistent.tests.utils import skipIfNoCExtension _is_pypy3 = PYPY and PY3 # pylint:disable=R0904,W0212,E1101 # pylint:disable=attribute-defined-outside-init,too-many-lines # pylint:disable=blacklisted-name,useless-object-inheritance # Hundreds of unused jar and OID vars make this useless # pylint:disable=unused-variable class _Persistent_Base(object): # py2/3 compat assertRaisesRegex = getattr(unittest.TestCase, 'assertRaisesRegex', unittest.TestCase.assertRaisesRegexp) def _getTargetClass(self): # concrete testcase classes must override raise NotImplementedError() def _makeCache(self, jar): # concrete testcase classes must override raise NotImplementedError() def _makeRealCache(self, jar): return self._makeCache(jar) def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def _makeJar(self): from zope.interface import implementer from persistent.interfaces import IPersistentDataManager @implementer(IPersistentDataManager) class _Jar(object): _cache = None # Set this to a value to have our `setstate` # pass it through to the object's __setstate__ setstate_calls_object = None # Set this to a value to have our `setstate` # set the _p_serial of the object setstate_sets_serial = None def __init__(self): self._loaded = [] self._registered = [] def setstate(self, obj): self._loaded.append(obj._p_oid) if self.setstate_calls_object is not None: obj.__setstate__(self.setstate_calls_object) if self.setstate_sets_serial is not None: obj._p_serial = self.setstate_sets_serial def register(self, obj): self._registered.append(obj._p_oid) jar = _Jar() jar._cache = self._makeCache(jar) return jar def _makeBrokenJar(self): from zope.interface import implementer from persistent.interfaces import IPersistentDataManager @implementer(IPersistentDataManager) class _BrokenJar(object): def __init__(self): self.called = 0 def register(self, ob): self.called += 1 raise NotImplementedError() def setstate(self, ob): raise NotImplementedError() jar = _BrokenJar() jar._cache = self._makeCache(jar) return jar def _makeOneWithJar(self, klass=None, broken_jar=False): OID = b'\x01' * 8 if klass is not None: inst = klass() else: inst = self._makeOne() jar = self._makeJar() if not broken_jar else self._makeBrokenJar() jar._cache.new_ghost(OID, inst) # assigns _p_jar, _p_oid return inst, jar, OID def test_class_conforms_to_IPersistent(self): from zope.interface.verify import verifyClass from persistent.interfaces import IPersistent verifyClass(IPersistent, self._getTargetClass()) def test_instance_conforms_to_IPersistent(self): from zope.interface.verify import verifyObject from persistent.interfaces import IPersistent verifyObject(IPersistent, self._makeOne()) def test_instance_cannot_be_weakly_referenced(self): if PYPY: # pragma: no cover self.skipTest('On PyPy, everything can be weakly referenced') import weakref inst = self._makeOne() with self.assertRaises(TypeError): weakref.ref(inst) def test_ctor(self): from persistent.persistence import _INITIAL_SERIAL inst = self._makeOne() self.assertEqual(inst._p_jar, None) self.assertEqual(inst._p_oid, None) self.assertEqual(inst._p_serial, _INITIAL_SERIAL) self.assertEqual(inst._p_changed, False) self.assertEqual(inst._p_sticky, False) self.assertEqual(inst._p_status, 'unsaved') def test_del_jar_no_jar(self): inst = self._makeOne() del inst._p_jar # does not raise self.assertEqual(inst._p_jar, None) def test_del_jar_while_in_cache(self): inst, _, OID = self._makeOneWithJar() def _test(): del inst._p_jar self.assertRaises(ValueError, _test) def test_del_jar_like_ZODB_abort(self): # When a ZODB connection aborts, it removes registered objects from # the cache, deletes their jar, deletes their OID, and finally sets # p_changed to false inst, jar, OID = self._makeOneWithJar() del jar._cache[OID] del inst._p_jar self.assertEqual(inst._p_jar, None) def test_del_jar_of_inactive_object_that_has_no_state(self): # If an object is ghosted, and we try to delete its # jar, we shouldn't activate the object. # Simulate a POSKeyError on _p_activate; this can happen aborting # a transaction using ZEO broken_jar = self._makeBrokenJar() inst = self._makeOne() inst._p_oid = 42 inst._p_jar = broken_jar # make it inactive inst._p_deactivate() self.assertEqual(inst._p_status, "ghost") # delete the jar; if we activated the object, the broken # jar would raise NotImplementedError del inst._p_jar def test_assign_p_jar_w_new_jar(self): inst, jar, OID = self._makeOneWithJar() new_jar = self._makeJar() with self.assertRaisesRegex(ValueError, "can not change _p_jar of cached object"): inst._p_jar = new_jar def test_assign_p_jar_w_valid_jar(self): jar = self._makeJar() inst = self._makeOne() inst._p_jar = jar self.assertEqual(inst._p_status, 'saved') self.assertTrue(inst._p_jar is jar) inst._p_jar = jar # reassign only to same DM def test_assign_p_jar_not_in_cache_allowed(self): jar = self._makeJar() inst = self._makeOne() inst._p_jar = jar # Both of these are allowed inst._p_jar = self._makeJar() inst._p_jar = None self.assertEqual(inst._p_jar, None) def test_assign_p_oid_w_invalid_oid(self): inst, jar, OID = self._makeOneWithJar() with self.assertRaisesRegex(ValueError, 'can not change _p_oid of cached object'): inst._p_oid = object() def test_assign_p_oid_w_valid_oid(self): OID = b'\x01' * 8 inst = self._makeOne() inst._p_oid = OID self.assertEqual(inst._p_oid, OID) inst._p_oid = OID # reassign only same OID def test_assign_p_oid_w_new_oid_wo_jar(self): OID1 = b'\x01' * 8 OID2 = b'\x02' * 8 inst = self._makeOne() inst._p_oid = OID1 inst._p_oid = OID2 self.assertEqual(inst._p_oid, OID2) def test_assign_p_oid_w_None_wo_jar(self): OID1 = b'\x01' * 8 inst = self._makeOne() inst._p_oid = OID1 inst._p_oid = None self.assertEqual(inst._p_oid, None) def test_assign_p_oid_w_new_oid_w_jar(self): inst, jar, OID = self._makeOneWithJar() new_OID = b'\x02' * 8 def _test(): inst._p_oid = new_OID self.assertRaises(ValueError, _test) def test_assign_p_oid_not_in_cache_allowed(self): jar = self._makeJar() inst = self._makeOne() inst._p_jar = jar inst._p_oid = 1 # anything goes inst._p_oid = 42 self.assertEqual(inst._p_oid, 42) def test_delete_p_oid_wo_jar(self): OID = b'\x01' * 8 inst = self._makeOne() inst._p_oid = OID del inst._p_oid self.assertEqual(inst._p_oid, None) def test_delete_p_oid_w_jar(self): inst, jar, OID = self._makeOneWithJar() with self.assertRaises(ValueError): del inst._p_oid def test_delete_p_oid_of_subclass_calling_p_delattr(self): class P(self._getTargetClass()): def __delattr__(self, name): super(P, self)._p_delattr(name) raise AssertionError("Should not get here") inst, _jar, _oid = self._makeOneWithJar(klass=P) with self.assertRaises(ValueError): del inst._p_oid def test_del_oid_like_ZODB_abort(self): # When a ZODB connection aborts, it removes registered objects from # the cache, deletes their jar, deletes their OID, and finally sets # p_changed to false inst, jar, OID = self._makeOneWithJar() del jar._cache[OID] del inst._p_oid self.assertEqual(inst._p_oid, None) def test_assign_p_serial_w_invalid_type(self): inst = self._makeOne() def _test(): inst._p_serial = object() self.assertRaises(ValueError, _test) def test_assign_p_serial_w_None(self): inst = self._makeOne() def _test(): inst._p_serial = None self.assertRaises(ValueError, _test) def test_assign_p_serial_too_short(self): inst = self._makeOne() def _test(): inst._p_serial = b'\x01\x02\x03' self.assertRaises(ValueError, _test) def test_assign_p_serial_too_long(self): inst = self._makeOne() def _test(): inst._p_serial = b'\x01\x02\x03' * 3 self.assertRaises(ValueError, _test) def test_assign_p_serial_w_valid_serial(self): SERIAL = b'\x01' * 8 inst = self._makeOne() inst._p_serial = SERIAL self.assertEqual(inst._p_serial, SERIAL) def test_delete_p_serial(self): from persistent.persistence import _INITIAL_SERIAL SERIAL = b'\x01' * 8 inst = self._makeOne() inst._p_serial = SERIAL self.assertEqual(inst._p_serial, SERIAL) del inst._p_serial self.assertEqual(inst._p_serial, _INITIAL_SERIAL) def test_query_p_changed_unsaved(self): inst = self._makeOne() self.assertEqual(inst._p_changed, False) def test_query_p_changed_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertEqual(inst._p_changed, None) def test_query_p_changed_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() self.assertEqual(inst._p_changed, False) def test_query_p_changed_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True self.assertEqual(inst._p_changed, True) def test_assign_p_changed_none_from_unsaved(self): inst = self._makeOne() inst._p_changed = None self.assertEqual(inst._p_status, 'unsaved') def test_assign_p_changed_true_from_unsaved(self): inst = self._makeOne() inst._p_changed = True self.assertEqual(inst._p_status, 'unsaved') def test_assign_p_changed_false_from_unsaved(self): inst = self._makeOne() inst._p_changed = False self.assertEqual(inst._p_status, 'unsaved') def test_assign_p_changed_none_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_changed = None self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_true_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_changed = True self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), [OID]) self.assertEqual(list(jar._registered), [OID]) def test_assign_p_changed_false_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_changed = False self.assertEqual(inst._p_status, 'ghost') # ??? this is what C does self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_none_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] inst._p_changed = None self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_true_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX jar._loaded[:] = [] inst._p_changed = True self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), [OID]) def test_assign_p_changed_false_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] inst._p_changed = False self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_none_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_changed = None # assigning None is ignored when dirty self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_true_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_changed = True self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_false_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_changed = False self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_none_when_sticky(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_changed = None self.assertEqual(inst._p_status, 'sticky') self.assertEqual(inst._p_changed, False) self.assertEqual(inst._p_sticky, True) def test_delete_p_changed_from_unsaved(self): inst = self._makeOne() del inst._p_changed self.assertEqual(inst._p_status, 'unsaved') def test_delete_p_changed_from_unsaved_w_dict(self): class Derived(self._getTargetClass()): pass inst = Derived() inst.foo = 'bar' del inst._p_changed self.assertEqual(inst._p_status, 'unsaved') self.assertEqual(inst.foo, 'bar') def test_delete_p_changed_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() del inst._p_changed self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_delete_p_changed_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] jar._registered = [] del inst._p_changed self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_delete_p_changed_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] del inst._p_changed self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_delete_p_changed_when_sticky(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True del inst._p_changed self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_changed, None) self.assertEqual(inst._p_sticky, False) def test_assign_p_sticky_true_when_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() # XXX def _test(): inst._p_sticky = True self.assertRaises(ValueError, _test) def test_assign_p_sticky_false_when_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() # XXX def _test(): inst._p_sticky = False self.assertRaises(ValueError, _test) def test_assign_p_sticky_true_non_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True self.assertTrue(inst._p_sticky) def test_assign_p_sticky_false_non_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = False self.assertFalse(inst._p_sticky) def test__p_status_unsaved(self): inst = self._makeOne() self.assertEqual(inst._p_status, 'unsaved') def test__p_status_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertEqual(inst._p_status, 'ghost') def test__p_status_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_changed = True self.assertEqual(inst._p_status, 'changed') def test__p_status_changed_sticky(self): # 'sticky' is not a state, but a separate flag. inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True inst._p_sticky = True self.assertEqual(inst._p_status, 'sticky') def test__p_status_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False self.assertEqual(inst._p_status, 'saved') def test__p_status_saved_sticky(self): # 'sticky' is not a state, but a separate flag. inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = False inst._p_sticky = True self.assertEqual(inst._p_status, 'sticky') def test__p_mtime_no_serial(self): inst = self._makeOne() self.assertEqual(inst._p_mtime, None) def test__p_mtime_w_serial(self): from persistent.timestamp import TimeStamp WHEN_TUPLE = (2011, 2, 15, 13, 33, 27.5) ts = TimeStamp(*WHEN_TUPLE) inst, jar, OID = self._makeOneWithJar() inst._p_serial = ts.raw() self.assertEqual(inst._p_mtime, ts.timeTime()) def test__p_mtime_activates_object(self): # Accessing _p_mtime implicitly unghostifies the object from persistent.timestamp import TimeStamp WHEN_TUPLE = (2011, 2, 15, 13, 33, 27.5) ts = TimeStamp(*WHEN_TUPLE) inst, jar, OID = self._makeOneWithJar() jar.setstate_sets_serial = ts.raw() inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_mtime, ts.timeTime()) self.assertEqual(inst._p_status, 'saved') def test__p_state_unsaved(self): inst = self._makeOne() inst._p_changed = True self.assertEqual(inst._p_state, 0) def test__p_state_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertEqual(inst._p_state, -1) def test__p_state_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_changed = True self.assertEqual(inst._p_state, 1) def test__p_state_changed_sticky(self): # 'sticky' is not a state, but a separate flag. inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True inst._p_sticky = True self.assertEqual(inst._p_state, 2) def test__p_state_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False self.assertEqual(inst._p_state, 0) def test__p_state_saved_sticky(self): # 'sticky' is not a state, but a separate flag. inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = False inst._p_sticky = True self.assertEqual(inst._p_state, 2) def test_query_p_estimated_size_new(self): inst = self._makeOne() self.assertEqual(inst._p_estimated_size, 0) def test_query_p_estimated_size_del(self): inst = self._makeOne() inst._p_estimated_size = 123 self.assertEqual(inst._p_estimated_size, 128) del inst._p_estimated_size self.assertEqual(inst._p_estimated_size, 0) def test_assign_p_estimated_size_wrong_type(self): inst = self._makeOne() with self.assertRaises(TypeError): inst._p_estimated_size = None try: constructor = long except NameError: constructor = str with self.assertRaises(TypeError): inst._p_estimated_size = constructor(1) def test_assign_p_estimated_size_negative(self): inst = self._makeOne() def _test(): inst._p_estimated_size = -1 self.assertRaises(ValueError, _test) def test_assign_p_estimated_size_small(self): inst = self._makeOne() inst._p_estimated_size = 123 self.assertEqual(inst._p_estimated_size, 128) def test_assign_p_estimated_size_just_over_threshold(self): inst = self._makeOne() inst._p_estimated_size = 1073741697 self.assertEqual(inst._p_estimated_size, 16777215 * 64) def test_assign_p_estimated_size_bigger(self): inst = self._makeOne() inst._p_estimated_size = 1073741697 * 2 self.assertEqual(inst._p_estimated_size, 16777215 * 64) def test___getattribute___p__names(self): NAMES = ['_p_jar', '_p_oid', '_p_changed', '_p_serial', '_p_state', '_p_estimated_size', '_p_sticky', '_p_status', ] inst, jar, OID = self._makeOneWithJar() self._clearMRU(jar) for name in NAMES: getattr(inst, name) self._checkMRU(jar, []) # _p_mtime is special, it activates the object getattr(inst, '_p_mtime') self._checkMRU(jar, [OID]) def test___getattribute__special_name(self): from persistent.persistence import SPECIAL_NAMES inst, jar, OID = self._makeOneWithJar() self._clearMRU(jar) for name in SPECIAL_NAMES: getattr(inst, name, None) self._checkMRU(jar, []) def test___getattribute__normal_name_from_unsaved(self): class Derived(self._getTargetClass()): normal = 'value' inst = Derived() self.assertEqual(getattr(inst, 'normal', None), 'value') def test___getattribute__normal_name_from_ghost(self): class Derived(self._getTargetClass()): normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_deactivate() self._clearMRU(jar) self.assertEqual(getattr(inst, 'normal', None), 'value') self._checkMRU(jar, [OID]) def test___getattribute__normal_name_from_saved(self): class Derived(self._getTargetClass()): normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = False self._clearMRU(jar) self.assertEqual(getattr(inst, 'normal', None), 'value') self._checkMRU(jar, [OID]) def test___getattribute__normal_name_from_changed(self): class Derived(self._getTargetClass()): normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = True self._clearMRU(jar) self.assertEqual(getattr(inst, 'normal', None), 'value') self._checkMRU(jar, [OID]) def test___getattribute___non_cooperative(self): # Getting attributes is NOT cooperative with the superclass. # This comes from the C implementation and is maintained # for backwards compatibility. (For example, Persistent and # ExtensionClass.Base/Acquisition take special care to mix together.) class Base(object): def __getattribute__(self, name): if name == 'magic': return 42 return super(Base, self).__getattribute__(name) # pragma: no cover self.assertEqual(getattr(Base(), 'magic'), 42) class Derived(self._getTargetClass(), Base): pass self.assertRaises(AttributeError, getattr, Derived(), 'magic') def test___setattr___p__names(self): SERIAL = b'\x01' * 8 inst, jar, OID = self._makeOneWithJar() inst._p_activate() NAMES = [('_p_jar', jar), ('_p_oid', OID), ('_p_changed', False), ('_p_serial', SERIAL), ('_p_estimated_size', 0), ('_p_sticky', False), ] self._clearMRU(jar) for name, value in NAMES: setattr(inst, name, value) self._checkMRU(jar, []) def test___setattr___v__name(self): class Derived(self._getTargetClass()): pass inst, jar, OID = self._makeOneWithJar(Derived) self._clearMRU(jar) inst._v_foo = 'bar' self.assertEqual(inst._p_status, 'saved') self._checkMRU(jar, []) def test___setattr__normal_name_from_unsaved(self): class Derived(self._getTargetClass()): normal = 'before' inst = Derived() setattr(inst, 'normal', 'after') self.assertEqual(getattr(inst, 'normal', None), 'after') self.assertEqual(inst._p_status, 'unsaved') def test___setattr__normal_name_from_ghost(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_deactivate() self._clearMRU(jar) setattr(inst, 'normal', 'after') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'after') self.assertEqual(inst._p_status, 'changed') def test___setattr__normal_name_from_saved(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = False self._clearMRU(jar) setattr(inst, 'normal', 'after') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'after') self.assertEqual(inst._p_status, 'changed') def test___setattr__normal_name_from_changed(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = True self._clearMRU(jar) jar._registered = [] setattr(inst, 'normal', 'after') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, []) self.assertEqual(getattr(inst, 'normal', None), 'after') self.assertEqual(inst._p_status, 'changed') def test___delattr___p__names(self): NAMES = ['_p_changed', '_p_serial', ] inst, jar, OID = self._makeOneWithJar() self._clearMRU(jar) jar._registered = [] for name in NAMES: delattr(inst, name) self._checkMRU(jar, []) self.assertEqual(jar._registered, []) def test___delattr__normal_name_from_unsaved(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() delattr(inst, 'normal') self.assertEqual(getattr(inst, 'normal', None), 'before') def test___delattr__normal_name_from_ghost(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_deactivate() self._clearMRU(jar) jar._registered = [] def _test(): delattr(inst, 'normal') self.assertRaises(AttributeError, _test) self.assertEqual(inst._p_status, 'changed') # ??? this is what C does self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'before') def test___delattr__normal_name_from_saved(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = False self._clearMRU(jar) jar._registered = [] delattr(inst, 'normal') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'before') def test___delattr__normal_name_from_changed(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = True self._clearMRU(jar) jar._registered = [] delattr(inst, 'normal') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, []) self.assertEqual(getattr(inst, 'normal', None), 'before') def test___getstate__(self): inst = self._makeOne() self.assertEqual(inst.__getstate__(), None) def test___getstate___derived_w_dict(self): class Derived(self._getTargetClass()): pass inst = Derived() inst.foo = 'bar' inst._p_baz = 'bam' inst._v_qux = 'spam' self.assertEqual(inst.__getstate__(), {'foo': 'bar'}) def test___getstate___derived_w_slots(self): class Derived(self._getTargetClass()): __slots__ = ('foo', 'baz', '_p_baz', '_v_qux') inst = Derived() inst.foo = 'bar' inst._p_baz = 'bam' inst._v_qux = 'spam' self.assertEqual(inst.__getstate__(), (None, {'foo': 'bar'})) def test___getstate___derived_w_slots_in_base_and_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) class Derived(Base): __slots__ = ('baz', 'qux',) inst = Derived() inst.foo = 'bar' inst.baz = 'bam' inst.qux = 'spam' self.assertEqual(inst.__getstate__(), (None, {'foo': 'bar', 'baz': 'bam', 'qux': 'spam'})) def test___getstate___derived_w_slots_in_base_but_not_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) class Derived(Base): pass inst = Derived() inst.foo = 'bar' inst.baz = 'bam' inst.qux = 'spam' self.assertEqual(inst.__getstate__(), ({'baz': 'bam', 'qux': 'spam'}, {'foo': 'bar'})) def test___setstate___empty(self): inst = self._makeOne() inst.__setstate__(None) # doesn't raise, but doesn't change anything def test___setstate___nonempty(self): from persistent.persistence import _INITIAL_SERIAL inst = self._makeOne() self.assertRaises((ValueError, TypeError), inst.__setstate__, {'bogus': 1}) self.assertEqual(inst._p_jar, None) self.assertEqual(inst._p_oid, None) self.assertEqual(inst._p_serial, _INITIAL_SERIAL) self.assertEqual(inst._p_changed, False) self.assertEqual(inst._p_sticky, False) def test___setstate___nonempty_derived_w_dict(self): class Derived(self._getTargetClass()): pass inst = Derived() inst.foo = 'bar' inst.__setstate__({'baz': 'bam'}) self.assertEqual(inst.__dict__, {'baz': 'bam'}) def test___setstate___nonempty_derived_w_dict_w_two_keys(self): class Derived(self._getTargetClass()): pass inst = Derived() inst.foo = 'bar' inst.__setstate__({'baz': 'bam', 'biz': 'boz'}) self.assertEqual(inst.__dict__, {'baz': 'bam', 'biz': 'boz'}) def test___setstate___derived_w_slots(self): class Derived(self._getTargetClass()): __slots__ = ('foo', '_p_baz', '_v_qux') inst = Derived() inst.__setstate__((None, {'foo': 'bar'})) self.assertEqual(inst.foo, 'bar') def test___setstate___derived_w_slots_in_base_classes(self): class Base(self._getTargetClass()): __slots__ = ('foo',) class Derived(Base): __slots__ = ('baz', 'qux',) inst = Derived() inst.__setstate__((None, {'foo': 'bar', 'baz': 'bam', 'qux': 'spam'})) self.assertEqual(inst.foo, 'bar') self.assertEqual(inst.baz, 'bam') self.assertEqual(inst.qux, 'spam') def test___setstate___derived_w_slots_in_base_but_not_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) class Derived(Base): pass inst = Derived() inst.__setstate__(({'baz': 'bam', 'qux': 'spam'}, {'foo': 'bar'})) self.assertEqual(inst.foo, 'bar') self.assertEqual(inst.baz, 'bam') self.assertEqual(inst.qux, 'spam') if not _is_pypy3: def test___setstate___interns_dict_keys(self): class Derived(self._getTargetClass()): pass inst1 = Derived() inst2 = Derived() key1 = 'key' key2 = 'ke'; key2 += 'y' # construct in a way that won't intern the literal self.assertFalse(key1 is key2) inst1.__setstate__({key1: 1}) inst2.__setstate__({key2: 2}) key1 = list(inst1.__dict__.keys())[0] key2 = list(inst2.__dict__.keys())[0] self.assertTrue(key1 is key2) from persistent._compat import IterableUserDict inst1 = Derived() inst2 = Derived() key1 = 'key' key2 = 'ke'; key2 += 'y' # construct in a way that won't intern the literal self.assertFalse(key1 is key2) state1 = IterableUserDict({key1: 1}) state2 = IterableUserDict({key2: 2}) k1 = list(state1.keys())[0] k2 = list(state2.keys())[0] self.assertFalse(k1 is k2) # verify inst1.__setstate__(state1) inst2.__setstate__(state2) key1 = list(inst1.__dict__.keys())[0] key2 = list(inst2.__dict__.keys())[0] self.assertTrue(key1 is key2) def test___setstate___doesnt_fail_on_non_string_keys(self): class Derived(self._getTargetClass()): pass inst1 = Derived() inst1.__setstate__({1: 2}) self.assertTrue(1 in inst1.__dict__) class MyStr(str): pass mystr = MyStr('mystr') inst1.__setstate__({mystr: 2}) self.assertTrue(mystr in inst1.__dict__) def test___setstate___doesnt_fail_on_non_dict(self): class Derived(self._getTargetClass()): pass inst1 = Derived() from persistent._compat import IterableUserDict state = IterableUserDict({'foobar': [1, 2]}) inst1.__setstate__(state) self.assertTrue(hasattr(inst1, 'foobar')) def test___reduce__(self): inst = self._makeOne() first, second, third = inst.__reduce__() self.assertTrue(first is copy_reg.__newobj__) self.assertEqual(second, (self._getTargetClass(),)) self.assertEqual(third, None) def test___reduce__w_subclass_having_getnewargs(self): class Derived(self._getTargetClass()): def __getnewargs__(self): return ('a', 'b') inst = Derived() first, second, third = inst.__reduce__() self.assertTrue(first is copy_reg.__newobj__) self.assertEqual(second, (Derived, 'a', 'b')) self.assertEqual(third, {}) def test___reduce__w_subclass_having_getstate(self): class Derived(self._getTargetClass()): def __getstate__(self): return {} inst = Derived() first, second, third = inst.__reduce__() self.assertTrue(first is copy_reg.__newobj__) self.assertEqual(second, (Derived,)) self.assertEqual(third, {}) def test___reduce__w_subclass_having_getnewargs_and_getstate(self): class Derived(self._getTargetClass()): def __getnewargs__(self): return ('a', 'b') def __getstate__(self): return {'foo': 'bar'} inst = Derived() first, second, third = inst.__reduce__() self.assertTrue(first is copy_reg.__newobj__) self.assertEqual(second, (Derived, 'a', 'b')) self.assertEqual(third, {'foo': 'bar'}) def _get_cucumber(self, name): # Checks that it's actually a subclass of what we're testing; # if it isn't, the test is skipped. The cucumbers module can # only subclass either the C or Python implementation, not # both from persistent.tests import cucumbers cls = getattr(cucumbers, name) if not issubclass(cls, self._getTargetClass()): self.skipTest("Cucumber is not correct implementation") return cls def test_pickle_roundtrip_simple(self): import pickle # XXX s.b. 'examples' Simple = self._get_cucumber('Simple') inst = Simple('testing') copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_getnewargs_and_getstate(self): import pickle # XXX s.b. 'examples' Custom = self._get_cucumber('Custom') inst = Custom('x', 'y') copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_slots_missing_slot(self): import pickle # XXX s.b. 'examples' SubSlotted = self._get_cucumber('SubSlotted') inst = SubSlotted('x', 'y', 'z') copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_slots_filled_slot(self): import pickle # XXX s.b. 'examples' SubSlotted = self._get_cucumber('SubSlotted') inst = SubSlotted('x', 'y', 'z') inst.s4 = 'a' copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_slots_and_empty_dict(self): import pickle # XXX s.b. 'examples' SubSubSlotted = self._get_cucumber('SubSubSlotted') inst = SubSubSlotted('x', 'y', 'z') copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_slots_and_filled_dict(self): import pickle # XXX s.b. 'examples' SubSubSlotted = self._get_cucumber('SubSubSlotted') inst = SubSubSlotted('x', 'y', 'z', foo='bar', baz='bam') inst.s4 = 'a' copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test__p_activate_from_unsaved(self): inst = self._makeOne() inst._p_activate() # noop w/o jar self.assertEqual(inst._p_status, 'unsaved') def test__p_activate_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_activate() self.assertEqual(inst._p_status, 'saved') def test__p_activate_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_changed = False inst._p_activate() # noop from 'saved' state self.assertEqual(inst._p_status, 'saved') def test__p_activate_only_sets_state_once(self): inst, jar, OID = self._makeOneWithJar() # No matter how many times we call _p_activate, it # only sets state once, the first time inst._p_invalidate() # make it a ghost self.assertEqual(list(jar._loaded), []) inst._p_activate() self.assertEqual(list(jar._loaded), [OID]) inst._p_activate() self.assertEqual(list(jar._loaded), [OID]) def test__p_activate_leaves_object_in_saved_even_if_object_mutated_self(self): # If the object's __setstate__ set's attributes # when called by p_activate, the state is still # 'saved' when done. Furthemore, the object is not # registered with the jar class WithSetstate(self._getTargetClass()): state = None def __setstate__(self, state): self.state = state inst, jar, OID = self._makeOneWithJar(klass=WithSetstate) inst._p_invalidate() # make it a ghost self.assertEqual(inst._p_status, 'ghost') jar.setstate_calls_object = 42 inst._p_activate() # It get loaded self.assertEqual(list(jar._loaded), [OID]) # and __setstate__ got called to mutate the object self.assertEqual(inst.state, 42) # but it's still in the saved state self.assertEqual(inst._p_status, 'saved') # and it is not registered as changed by the jar self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_unsaved(self): inst = self._makeOne() inst._p_deactivate() self.assertEqual(inst._p_status, 'unsaved') def test__p_deactivate_from_unsaved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() inst._p_changed = True inst._p_deactivate() self.assertEqual(inst._p_status, 'unsaved') self.assertEqual(inst.__dict__, {'normal': 'after'}) def test__p_deactivate_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] inst._p_deactivate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_saved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() jar._loaded = [] inst._p_deactivate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst.__dict__, {}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_changed(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst.normal = 'after' jar._loaded = [] jar._registered = [] inst._p_deactivate() # assigning None is ignored when dirty self.assertEqual(inst._p_status, 'changed') self.assertEqual(inst.__dict__, {'normal': 'after'}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_changed_w_dict(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_deactivate() # assigning None is ignored when dirty self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_when_sticky(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_deactivate() self.assertEqual(inst._p_status, 'sticky') self.assertEqual(inst._p_changed, False) self.assertEqual(inst._p_sticky, True) def test__p_invalidate_from_unsaved(self): inst = self._makeOne() inst._p_invalidate() self.assertEqual(inst._p_status, 'unsaved') def test__p_invalidate_from_unsaved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() inst._p_invalidate() self.assertEqual(inst._p_status, 'unsaved') self.assertEqual(inst.__dict__, {'normal': 'after'}) def test__p_invalidate_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] jar._registered = [] inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_saved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() jar._loaded = [] jar._registered = [] inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst.__dict__, {}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_changed_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst.__dict__, {}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_changed_w_slots(self): class Derived(self._getTargetClass()): __slots__ = { 'myattr1': 'value1', 'myattr2': 'value2', 'unset': None } def __init__(self): self.myattr1 = 'value1' self.myattr2 = 'value2' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] for slot, expected_value in Derived.__slots__.items(): slot_descriptor = getattr(Derived, slot) if expected_value: self.assertEqual(slot_descriptor.__get__(inst), expected_value) else: with self.assertRaises(AttributeError): slot_descriptor.__get__(inst) inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) for slot in Derived.__slots__: __traceback_info__ = slot, inst slot_descriptor = getattr(Derived, slot) with self.assertRaises(AttributeError): slot_descriptor.__get__(inst) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_changed_w_slots_compat(self): # check that (for backward-compatibility reason) slots are not released # for classes where __new__ is overwritten. Attributes in __dict__ # should be always released. class Derived(self._getTargetClass()): __slots__ = ('myattr1', 'myattr2', '__dict__') def __new__(cls): obj = cls.__base__.__new__(cls) obj.myattr1 = 'value1' obj.myattr2 = 'value2' obj.foo = 'foo1' # .foo & .bar are in __dict__ obj.bar = 'bar2' return obj inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] self.assertEqual(Derived.myattr1.__get__(inst), 'value1') self.assertEqual(Derived.myattr2.__get__(inst), 'value2') self.assertEqual(inst.__dict__, {'foo': 'foo1', 'bar': 'bar2'}) inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(Derived.myattr1.__get__(inst), 'value1') self.assertEqual(Derived.myattr2.__get__(inst), 'value2') self.assertEqual(inst.__dict__, {}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_p_invalidate_with_slots_broken_jar(self): # If jar.setstate() raises a POSKeyError (or any error) # clearing an object with unset slots doesn't result in a # SystemError, the original error is propagated class Derived(self._getTargetClass()): __slots__ = ('slot1',) # Pre-cache in __slotnames__; cpersistent goes directly for this # and avoids a call to copy_reg. (If it calls the python code in # copy_reg, the pending exception will be immediately propagated by # copy_reg, not by us.) copy_reg._slotnames(Derived) inst, jar, OID = self._makeOneWithJar(Derived, broken_jar=True) inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertRaises(NotImplementedError, inst._p_activate) def test__p_invalidate_from_sticky(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True self.assertEqual(inst._p_status, 'sticky') inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_changed, None) self.assertEqual(inst._p_sticky, False) def test__p_invalidate_from_sticky_w_dict(self): class Derived(self._getTargetClass()): def __init__(self): self.normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_changed, None) self.assertEqual(inst._p_sticky, False) self.assertEqual(inst.__dict__, {}) def test__p_getattr_w__p__names(self): NAMES = ['_p_jar', '_p_oid', '_p_changed', '_p_serial', '_p_mtime', '_p_state', '_p_estimated_size', '_p_sticky', '_p_status', ] inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() for name in NAMES: self.assertTrue(inst._p_getattr(name)) self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self._checkMRU(jar, []) def test__p_getattr_w_special_names(self): from persistent.persistence import SPECIAL_NAMES inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() for name in SPECIAL_NAMES: self.assertTrue(inst._p_getattr(name)) self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self._checkMRU(jar, []) def test__p_getattr_w_normal_name(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertFalse(inst._p_getattr('normal')) self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), [OID]) self._checkMRU(jar, [OID]) def test__p_setattr_w__p__name(self): SERIAL = b'\x01' * 8 inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertTrue(inst._p_setattr('_p_serial', SERIAL)) self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_serial, SERIAL) self.assertEqual(list(jar._loaded), []) self._checkMRU(jar, []) def test__p_setattr_w_normal_name(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertFalse(inst._p_setattr('normal', 'value')) # _p_setattr doesn't do the actual write for normal names self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), [OID]) self._checkMRU(jar, [OID]) def test__p_delattr_w__p__names(self): NAMES = ['_p_changed', '_p_serial', ] inst, jar, OID = self._makeOneWithJar() inst._p_changed = True jar._loaded = [] for name in NAMES: self.assertTrue(inst._p_delattr(name)) self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_changed, None) self.assertEqual(list(jar._loaded), []) self._checkMRU(jar, []) def test__p_delattr_w_normal_name(self): class Derived(self._getTargetClass()): normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_deactivate() self.assertFalse(inst._p_delattr('normal')) # _p_delattr doesn't do the actual delete for normal names self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), [OID]) self._checkMRU(jar, [OID]) def test_set__p_changed_w_broken_jar(self): # When an object is modified, it registers with its data manager. # If that registration fails, the exception is propagated and the # object stays in the up-to-date state. # It shouldn't change to the modified state, because it won't # be saved when the transaction commits. class P(self._getTargetClass()): def __init__(self): self.x = 0 p = P() p._p_oid = b'1' p._p_jar = self._makeBrokenJar() self.assertEqual(p._p_state, 0) self.assertEqual(p._p_jar.called, 0) def _try(): p._p_changed = 1 self.assertRaises(NotImplementedError, _try) self.assertEqual(p._p_jar.called, 1) self.assertEqual(p._p_state, 0) def test__p_activate_w_broken_jar(self): # Make sure that exceptions that occur inside the data manager's # ``setstate()`` method propagate out to the caller. class P(self._getTargetClass()): def __init__(self): self.x = 0 p = P() p._p_oid = b'1' p._p_jar = self._makeBrokenJar() p._p_deactivate() self.assertEqual(p._p_state, -1) self.assertRaises(NotImplementedError, p._p_activate) self.assertEqual(p._p_state, -1) def test__ancient_dict_layout_bug(self): # We once had a bug in the `Persistent` class that calculated an # incorrect offset for the ``__dict__`` attribute. It assigned # ``__dict__`` and ``_p_jar`` to the same location in memory. # This is a simple test to make sure they have different locations. class P(self._getTargetClass()): def __init__(self): self.x = 0 def inc(self): self.x += 1 p = P() p.inc() p.inc() self.assertTrue('x' in p.__dict__) self.assertTrue(p._p_jar is None) def test_w_diamond_inheritance(self): class A(self._getTargetClass()): pass class B(self._getTargetClass()): pass class C(A, B): pass class D(object): pass class E(D, B): pass # no raise A(), B(), C(), D(), E() def test_w_alternate_metaclass(self): class alternateMeta(type): pass class alternate(object): __metaclass__ = alternateMeta class mixedMeta(alternateMeta, type): pass # no raise class mixed1(alternate, self._getTargetClass()): pass class mixed2(self._getTargetClass(), alternate): pass def test_setattr_in_subclass_is_not_called_creating_an_instance(self): class subclass(self._getTargetClass()): _v_setattr_called = False def __setattr__(self, name, value): raise AssertionError("Should not be called") inst = subclass() self.assertEqual(object.__getattribute__(inst, '_v_setattr_called'), False) def test_can_set__p_attrs_if_subclass_denies_setattr(self): # ZODB defines a PersistentBroken subclass that only lets us # set things that start with _p, so make sure we can do that class Broken(self._getTargetClass()): def __setattr__(self, name, value): if name.startswith('_p_'): super(Broken, self).__setattr__(name, value) else: raise AssertionError("Can't change broken objects") KEY = b'123' jar = self._makeJar() broken = Broken() broken._p_oid = KEY broken._p_jar = jar broken._p_changed = True broken._p_changed = 0 def test_p_invalidate_calls_p_deactivate(self): class P(self._getTargetClass()): deactivated = False def _p_deactivate(self): self.deactivated = True p = P() p._p_invalidate() self.assertTrue(p.deactivated) def test_new_ghost_success_not_already_ghost_dict(self): # https://github.com/zopefoundation/persistent/issues/49 # calling new_ghost on an object that already has state just changes # its flags, it doesn't destroy the state. from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE class TestPersistent(self._getTargetClass()): pass KEY = b'123' jar = self._makeJar() cache = self._makeRealCache(jar) candidate = TestPersistent() candidate.set_by_new = 1 self.assertEqual(candidate._p_state, UPTODATE) cache.new_ghost(KEY, candidate) self.assertIs(cache.get(KEY), candidate) self.assertEqual(candidate._p_oid, KEY) self.assertEqual(candidate._p_state, GHOST) self.assertEqual(candidate.set_by_new, 1) def test_new_ghost_success_not_already_ghost_slot(self): # https://github.com/zopefoundation/persistent/issues/49 # calling new_ghost on an object that already has state just changes # its flags, it doesn't destroy the state. from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE class TestPersistent(self._getTargetClass()): __slots__ = ('set_by_new', '__weakref__') KEY = b'123' jar = self._makeJar() cache = self._makeRealCache(jar) candidate = TestPersistent() candidate.set_by_new = 1 self.assertEqual(candidate._p_state, UPTODATE) cache.new_ghost(KEY, candidate) self.assertIs(cache.get(KEY), candidate) self.assertEqual(candidate._p_oid, KEY) self.assertEqual(candidate._p_state, GHOST) self.assertEqual(candidate.set_by_new, 1) # The number 12345678 as a p64, 8-byte string _PACKED_OID = b'\x00\x00\x00\x00\x00\xbcaN' # The number 12345678 printed in hex _HEX_OID = '0xbc614e' def _normalize_repr(self, r): # addresses r = re.sub(r'at 0x[0-9a-fA-F]*', 'at 0xdeadbeef', r) # Python 3.7 removed the trailing , in exception reprs r = r.replace("',)", "')") return r def _normalized_repr(self, o): return self._normalize_repr(repr(o)) def test_repr_no_oid_no_jar(self): p = self._makeOne() result = self._normalized_repr(p) self.assertEqual(result, '') def test_repr_no_oid_in_jar(self): p = self._makeOne() class Jar(object): def __repr__(self): return '' p._p_jar = Jar() result = self._normalized_repr(p) self.assertEqual( result, ">") def test_repr_oid_no_jar(self): p = self._makeOne() p._p_oid = self._PACKED_OID result = self._normalized_repr(p) self.assertEqual( result, "") def test_64bit_oid(self): import struct p = self._makeOne() oid_value = 2 << 62 self.assertEqual(oid_value.bit_length(), 64) oid = struct.pack(">Q", oid_value) self.assertEqual(oid, b'\x80\x00\x00\x00\x00\x00\x00\x00') p._p_oid = oid result = self._normalized_repr(p) self.assertEqual( result, '' ) def test_repr_no_oid_repr_jar_raises_exception(self): p = self._makeOne() class Jar(object): def __repr__(self): raise Exception('jar repr failed') p._p_jar = Jar() result = self._normalized_repr(p) self.assertEqual( result, "") def test_repr_oid_raises_exception_no_jar(self): p = self._makeOne() class BadOID(bytes): def __repr__(self): raise Exception("oid repr failed") # Our OID is bytes, 8 bytes long. We don't call its repr. p._p_oid = BadOID(self._PACKED_OID) result = self._normalized_repr(p) self.assertEqual( result, "") # Anything other than 8 bytes, though, we do. p._p_oid = BadOID(b'1234567') result = self._normalized_repr(p) self.assertEqual( result, "") def test_repr_oid_and_jar_raise_exception(self): p = self._makeOne() class BadOID(bytes): def __repr__(self): raise Exception("oid repr failed") p._p_oid = BadOID(b'1234567') class Jar(object): def __repr__(self): raise Exception('jar repr failed') p._p_jar = Jar() result = self._normalized_repr(p) self.assertEqual( result, "") def test_repr_no_oid_repr_jar_raises_baseexception(self): p = self._makeOne() class Jar(object): def __repr__(self): raise BaseException('jar repr failed') p._p_jar = Jar() with self.assertRaisesRegex(BaseException, 'jar repr failed'): repr(p) def test_repr_oid_raises_baseexception_no_jar(self): p = self._makeOne() class BadOID(bytes): def __repr__(self): raise BaseException("oid repr failed") p._p_oid = BadOID(b'12345678') # An 8 byte byte string doesn't have repr called. repr(p) # Anything other does. p._p_oid = BadOID(b'1234567') with self.assertRaisesRegex(BaseException, 'oid repr failed'): repr(p) def test_repr_oid_and_jar(self): p = self._makeOne() p._p_oid = self._PACKED_OID class Jar(object): def __repr__(self): return '' p._p_jar = Jar() result = self._normalized_repr(p) self.assertEqual( result, ">") def test__p_repr(self): class P(self._getTargetClass()): def _p_repr(self): return "Override" p = P() self.assertEqual("Override", repr(p)) def test__p_repr_exception(self): class P(self._getTargetClass()): def _p_repr(self): raise Exception("_p_repr failed") p = P() result = self._normalized_repr(p) self.assertEqual( result, "") p._p_oid = self._PACKED_OID result = self._normalized_repr(p) self.assertEqual( result, "") class Jar(object): def __repr__(self): return '' p._p_jar = Jar() result = self._normalized_repr(p) self.assertEqual( result, " _p_repr Exception('_p_repr failed')>") def test__p_repr_in_instance_ignored(self): class P(self._getTargetClass()): pass p = P() p._p_repr = lambda: "Instance" result = self._normalized_repr(p) self.assertEqual(result, '') def test__p_repr_baseexception(self): class P(self._getTargetClass()): def _p_repr(self): raise BaseException("_p_repr failed") p = P() with self.assertRaisesRegex(BaseException, '_p_repr failed'): repr(p) class PyPersistentTests(unittest.TestCase, _Persistent_Base): def _getTargetClass(self): from persistent.persistence import PersistentPy assert PersistentPy.__module__ == 'persistent.persistence', PersistentPy.__module__ return PersistentPy def _makeCache(self, jar): class _Cache(object): def __init__(self, jar): self._jar = jar self._mru = [] self._data = {} def mru(self, oid): self._mru.append(oid) def new_ghost(self, oid, obj): obj._p_jar = self._jar obj._p_oid = oid self._data[oid] = obj def get(self, oid): return self._data.get(oid) def __delitem__(self, oid): del self._data[oid] def update_object_size_estimation(self, oid, new_size): pass return _Cache(jar) def _makeRealCache(self, jar): from persistent.picklecache import PickleCachePy as PickleCache return PickleCache(jar, 10) def _checkMRU(self, jar, value): self.assertEqual(list(jar._cache._mru), value) def _clearMRU(self, jar): jar._cache._mru[:] = [] def test_accessed_with_jar_and_oid_but_not_in_cache(self): # This scenario arises in ZODB: ZODB.serialize.ObjectWriter # can assign a jar and an oid to newly seen persistent objects, # but because they are newly created, they aren't in the # pickle cache yet. # Nothing should blow up when this happens KEY = b'123' jar = self._makeJar() c1 = self._makeOne() c1._p_oid = KEY c1._p_jar = jar def mru(oid): # Mimic what the real cache does if oid not in jar._cache._mru: raise KeyError(oid) raise AssertionError("Should never get here") jar._cache.mru = mru c1._p_accessed() self._checkMRU(jar, []) def test_accessed_invalidated_with_jar_and_oid_but_no_cache(self): # This scenario arises in ZODB tests where the jar is faked KEY = b'123' class Jar(object): accessed = False def __getattr__(self, name): if name == '_cache': self.accessed = True raise AttributeError(name) def register(self, *args): pass c1 = self._makeOne() c1._p_oid = KEY c1._p_jar = Jar() c1._p_changed = True self.assertEqual(c1._p_state, 1) c1._p_accessed() self.assertTrue(c1._p_jar.accessed) c1._p_jar.accessed = False c1._p_invalidate_deactivate_helper() self.assertTrue(c1._p_jar.accessed) c1._p_jar.accessed = False c1._Persistent__flags = None # coverage c1._p_invalidate_deactivate_helper() self.assertTrue(c1._p_jar.accessed) def test_p_activate_with_jar_without_oid(self): # Works, but nothing happens inst = self._makeOne() inst._p_jar = object() inst._p_oid = None object.__setattr__(inst, '_Persistent__flags', None) inst._p_activate() def test_p_accessed_with_jar_without_oid(self): # Works, but nothing happens inst = self._makeOne() inst._p_jar = object() inst._p_accessed() def test_p_accessed_with_jar_with_oid_as_ghost(self): # Works, but nothing happens inst = self._makeOne() inst._p_jar = object() inst._p_oid = 42 inst._Persistent__flags = None inst._p_accessed() @skipIfNoCExtension class CPersistentTests(unittest.TestCase, _Persistent_Base): def _getTargetClass(self): from persistent._compat import _c_optimizations_available as get_c return get_c()['persistent.persistence'].Persistent def _checkMRU(self, jar, value): pass # Figure this out later def _clearMRU(self, jar): pass # Figure this out later def _makeCache(self, jar): from persistent._compat import _c_optimizations_available as get_c PickleCache = get_c()['persistent.picklecache'].PickleCache return PickleCache(jar) @skipIfNoCExtension class Test_simple_new(unittest.TestCase): def _callFUT(self, x): from persistent._compat import _c_optimizations_available as get_c simple_new = get_c()['persistent.persistence'].simple_new return simple_new(x) def test_w_non_type(self): self.assertRaises(TypeError, self._callFUT, '') def test_w_type(self): TO_CREATE = [type, list, tuple, object, dict] for typ in TO_CREATE: self.assertTrue(isinstance(self._callFUT(typ), typ)) def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test_picklecache.py0000644000076500000240000011736700000000000023700 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2009 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import gc import unittest from persistent.interfaces import UPTODATE from persistent._compat import PYPY from persistent.tests.utils import skipIfNoCExtension # pylint:disable=protected-access,too-many-lines,too-many-public-methods # pylint:disable=attribute-defined-outside-init,redefined-outer-name _marker = object() class DummyPersistent(object): _Persistent__ring = None def _p_invalidate(self): from persistent.interfaces import GHOST self._p_state = GHOST _p_deactivate = _p_invalidate def _p_invalidate_deactivate_helper(self, clear=True): self._p_invalidate() def _p_activate(self): self._p_state = UPTODATE class DummyConnection(object): pass def _len(seq): return len(list(seq)) class PickleCacheTestMixin(object): # py2/3 compat assertRaisesRegex = getattr(unittest.TestCase, 'assertRaisesRegex', unittest.TestCase.assertRaisesRegexp) def _getTargetClass(self): from persistent.picklecache import PickleCachePy as BasePickleCache class PickleCache(BasePickleCache): _CACHEABLE_TYPES = BasePickleCache._CACHEABLE_TYPES + (DummyPersistent,) return PickleCache def _getTargetInterface(self): from persistent.interfaces import IPickleCache return IPickleCache def _makeOne(self, jar=None, target_size=10): if jar is None: jar = DummyConnection() return self._getTargetClass()(jar, target_size) def _getDummyPersistentClass(self): return DummyPersistent def _getRealPersistentClass(self): from persistent.persistence import PersistentPy return PersistentPy def _makePersist(self, state=None, oid=b'foo', jar=_marker, kind=_marker): from persistent.interfaces import GHOST if state is None: state = GHOST if jar is _marker: jar = DummyConnection() kind = self._getDummyPersistentClass() if kind is _marker else kind persist = kind() try: persist._p_state = state except AttributeError: pass persist._p_oid = oid persist._p_jar = jar return persist def test_class_conforms_to_IPickleCache(self): from zope.interface.verify import verifyClass verifyClass(self._getTargetInterface(), self._getTargetClass()) def test_instance_conforms_to_IPickleCache(self): from zope.interface.verify import verifyObject verifyObject(self._getTargetInterface(), self._makeOne()) def test_empty(self): cache = self._makeOne() self.assertEqual(len(cache), 0) self.assertEqual(_len(cache.items()), 0) self.assertEqual(_len(cache.klass_items()), 0) self.assertEqual(cache.ringlen(), 0) self.assertEqual(len(cache.lru_items()), 0) self.assertEqual(cache.cache_size, 10) self.assertEqual(cache.cache_drain_resistance, 0) self.assertEqual(cache.cache_non_ghost_count, 0) self.assertEqual(dict(cache.cache_data), {}) self.assertEqual(cache.cache_klass_count, 0) def test___getitem___nonesuch_raises_KeyError(self): cache = self._makeOne() self.assertRaises(KeyError, lambda: cache['nonesuch']) def test_get_nonesuch_no_default(self): cache = self._makeOne() self.assertEqual(cache.get('nonesuch'), None) def test_get_nonesuch_w_default(self): cache = self._makeOne() default = object self.assertIs(cache.get('nonesuch', default), default) def test___setitem___non_string_oid_raises_TypeError(self): cache = self._makeOne() with self.assertRaises(TypeError): cache[object()] = self._makePersist() def test___setitem___duplicate_oid_same_obj(self): KEY = b'original' cache = self._makeOne() original = self._makePersist(oid=KEY) cache[KEY] = original cache[KEY] = original def test___setitem___duplicate_oid_raises_ValueError(self): KEY = b'original' cache = self._makeOne() original = self._makePersist(oid=KEY) cache[KEY] = original duplicate = self._makePersist(oid=KEY) with self.assertRaises(ValueError): cache[KEY] = duplicate def test___setitem___ghost(self): from persistent.interfaces import GHOST KEY = b'ghost' cache = self._makeOne() ghost = self._makePersist(state=GHOST, oid=KEY) cache[KEY] = ghost self.assertEqual(len(cache), 1) items = list(cache.items()) self.assertEqual(len(items), 1) self.assertEqual(_len(cache.klass_items()), 0) self.assertEqual(items[0][0], KEY) self.assertIs(items[0][1], ghost) self.assertIs(cache[KEY], ghost) return cache def test___setitem___mismatch_key_oid(self): KEY = b'uptodate' cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE) with self.assertRaises(ValueError): cache[KEY] = uptodate def test___setitem___non_ghost(self): KEY = b'uptodate' cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE, oid=KEY) cache[KEY] = uptodate self.assertEqual(len(cache), 1) items = list(cache.items()) self.assertEqual(len(items), 1) self.assertEqual(_len(cache.klass_items()), 0) self.assertEqual(items[0][0], KEY) self.assertEqual(cache.ringlen(), 1) self.assertTrue(items[0][1] is uptodate) self.assertTrue(cache[KEY] is uptodate) self.assertTrue(cache.get(KEY) is uptodate) def test___setitem___persistent_class(self): KEY = b'pclass' class pclass(object): _p_oid = KEY _p_jar = DummyConnection() cache = self._makeOne(pclass._p_jar) cache[KEY] = pclass kitems = list(cache.klass_items()) self.assertEqual(len(cache), 1) self.assertEqual(len(kitems), 1) self.assertEqual(kitems[0][0], KEY) self.assertIs(kitems[0][1], pclass) self.assertIs(cache[KEY], pclass) self.assertIs(cache.get(KEY), pclass) return cache def test___delitem___non_string_oid_raises_TypeError(self): cache = self._makeOne() with self.assertRaises(TypeError): del cache[object()] def test___delitem___nonesuch_raises_KeyError(self): cache = self._makeOne() with self.assertRaises(KeyError): del cache[b'nonesuch'] def test___delitem___w_persistent_class(self): KEY = b'pclass' cache = self._makeOne() class pclass(object): _p_oid = KEY _p_jar = DummyConnection() cache = self._makeOne() cache[KEY] = pclass del cache[KEY] self.assertIs(cache.get(KEY, self), self) self.assertEqual(cache.ringlen(), 0) return cache, KEY def test___delitem___w_normal_object(self): KEY = b'uptodate' cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE, oid=KEY) cache[KEY] = uptodate del cache[KEY] self.assertTrue(cache.get(KEY, self) is self) def test___delitem___w_ghost(self): from persistent.interfaces import GHOST cache = self._makeOne() KEY = b'ghost' ghost = self._makePersist(state=GHOST, oid=KEY) cache[KEY] = ghost del cache[KEY] self.assertTrue(cache.get(KEY, self) is self) def test___delitem___w_remaining_object(self): cache = self._makeOne() REMAINS = b'remains' UPTODATE = b'uptodate' remains = self._makePersist(state=UPTODATE, oid=REMAINS) uptodate = self._makePersist(state=UPTODATE, oid=UPTODATE) cache[REMAINS] = remains cache[UPTODATE] = uptodate del cache[UPTODATE] self.assertTrue(cache.get(UPTODATE, self) is self) self.assertTrue(cache.get(REMAINS, self) is remains) def test_lruitems(self): cache = self._makeOne() ONE = b'one' TWO = b'two' THREE = b'three' cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) cache[TWO] = self._makePersist(oid=b'two', state=UPTODATE) cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], TWO) self.assertEqual(items[2][0], THREE) def _numbered_oid(self, i): # Python 3.4 doesn't support % on bytes, # so we go the long way oid_s = 'oid_%04d' % i return oid_s.encode('ascii') def _populate_cache(self, cache, count=100, state_0=UPTODATE, state_rest=UPTODATE): oids = [] for i in range(100): oid = self._numbered_oid(i) oids.append(oid) state = state_0 if i == 0 else state_rest cache[oid] = self._makePersist(oid=oid, state=state) return oids def test_incrgc_simple(self): cache = self._makeOne() oids = self._populate_cache(cache) self.assertEqual(cache.cache_non_ghost_count, 100) cache.incrgc() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 10) items = cache.lru_items() self.assertEqual(_len(items), 10) self.assertEqual(items[0][0], b'oid_0090') self.assertEqual(items[1][0], b'oid_0091') self.assertEqual(items[2][0], b'oid_0092') self.assertEqual(items[3][0], b'oid_0093') self.assertEqual(items[4][0], b'oid_0094') self.assertEqual(items[5][0], b'oid_0095') self.assertEqual(items[6][0], b'oid_0096') self.assertEqual(items[7][0], b'oid_0097') self.assertEqual(items[8][0], b'oid_0098') self.assertEqual(items[9][0], b'oid_0099') for oid in oids[:90]: self.assertIsNone(cache.get(oid)) for oid in oids[90:]: self.assertIsNotNone(cache.get(oid)) def test_incrgc_w_smaller_drain_resistance(self): cache = self._makeOne() cache.cache_drain_resistance = 2 self._populate_cache(cache) self.assertEqual(cache.cache_non_ghost_count, 100) cache.incrgc() self.assertEqual(cache.cache_non_ghost_count, 10) def test_incrgc_w_larger_drain_resistance(self): cache = self._makeOne() cache.cache_drain_resistance = 2 cache.cache_size = 90 self._populate_cache(cache) self.assertEqual(cache.cache_non_ghost_count, 100) cache.incrgc() self.assertEqual(cache.cache_non_ghost_count, 49) def test_full_sweep(self): cache = self._makeOne() oids = self._populate_cache(cache) self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) for oid in oids: self.assertTrue(cache.get(oid) is None) def test_minimize(self): cache = self._makeOne() oids = self._populate_cache(cache) self.assertEqual(cache.cache_non_ghost_count, 100) cache.minimize() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) for oid in oids: self.assertTrue(cache.get(oid) is None) def test_minimize_turns_into_ghosts(self): from persistent.interfaces import GHOST cache = self._makeOne() oid = self._numbered_oid(1) obj = cache[oid] = self._makePersist(oid=oid, state=UPTODATE) self.assertEqual(cache.cache_non_ghost_count, 1) cache.minimize() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) self.assertEqual(obj._p_state, GHOST) def test_new_ghost_non_persistent_object(self): cache = self._makeOne() with self.assertRaises((AttributeError, TypeError)): cache.new_ghost(b'123', object()) def test_new_ghost_obj_already_has_oid(self): from persistent.interfaces import GHOST candidate = self._makePersist(oid=b'123', state=GHOST) cache = self._makeOne() with self.assertRaises(ValueError): cache.new_ghost(b'123', candidate) def test_new_ghost_obj_already_has_jar(self): cache = self._makeOne() candidate = self._makePersist(oid=None, jar=object()) with self.assertRaises(ValueError): cache.new_ghost(b'123', candidate) def test_new_ghost_obj_already_in_cache(self): KEY = b'123' cache = self._makeOne() candidate = self._makePersist(oid=KEY) cache[KEY] = candidate # Now, normally we can't get in the cache without an oid and jar # (the C implementation doesn't allow it), so if we try to create # a ghost, we get the value error self.assertRaises(ValueError, cache.new_ghost, KEY, candidate) return cache, KEY, candidate def test_new_ghost_success_already_ghost(self): from persistent.interfaces import GHOST KEY = b'123' jar = DummyConnection() cache = self._makeOne(jar) candidate = self._makePersist(oid=None, jar=None) cache.new_ghost(KEY, candidate) self.assertTrue(cache.get(KEY) is candidate) self.assertEqual(candidate._p_oid, KEY) self.assertEqual(candidate._p_jar, jar) self.assertEqual(candidate._p_state, GHOST) def test_new_ghost_success_not_already_ghost(self): from persistent.interfaces import GHOST KEY = b'123' jar = DummyConnection() cache = self._makeOne(jar) candidate = self._makePersist(oid=None, jar=None, state=UPTODATE) cache.new_ghost(KEY, candidate) self.assertTrue(cache.get(KEY) is candidate) self.assertEqual(candidate._p_oid, KEY) self.assertEqual(candidate._p_jar, jar) self.assertEqual(candidate._p_state, GHOST) def test_new_ghost_w_pclass_non_ghost(self): KEY = b'123' class Pclass(object): _p_oid = None _p_jar = None cache = self._makeOne() cache.new_ghost(KEY, Pclass) self.assertTrue(cache.get(KEY) is Pclass) self.assertEqual(Pclass._p_oid, KEY) return cache, Pclass, KEY def test_new_ghost_w_pclass_ghost(self): KEY = b'123' class Pclass(object): _p_oid = None _p_jar = None cache = self._makeOne() cache.new_ghost(KEY, Pclass) self.assertTrue(cache.get(KEY) is Pclass) self.assertEqual(Pclass._p_oid, KEY) return cache, Pclass, KEY def test_invalidate_miss_single(self): KEY = b'123' cache = self._makeOne() cache.invalidate(KEY) # doesn't raise def test_invalidate_miss_multiple(self): KEY = b'123' KEY2 = b'456' cache = self._makeOne() cache.invalidate([KEY, KEY2]) # doesn't raise def test_invalidate_hit_single_non_ghost(self): from persistent.interfaces import GHOST KEY = b'123' jar = DummyConnection() cache = self._makeOne(jar) candidate = self._makePersist(oid=b'123', jar=jar, state=UPTODATE) cache[KEY] = candidate self.assertEqual(cache.ringlen(), 1) cache.invalidate(KEY) self.assertEqual(cache.ringlen(), 0) self.assertEqual(candidate._p_state, GHOST) def test_invalidate_hit_multiple_non_ghost(self): from persistent.interfaces import GHOST KEY = b'123' KEY2 = b'456' jar = DummyConnection() cache = self._makeOne() c1 = self._makePersist(oid=KEY, jar=jar, state=UPTODATE) cache[KEY] = c1 c2 = self._makePersist(oid=KEY2, jar=jar, state=UPTODATE) cache[KEY2] = c2 self.assertEqual(cache.ringlen(), 2) # These should be in the opposite order of how they were # added to the ring to ensure ring traversal works cache.invalidate([KEY2, KEY]) self.assertEqual(cache.ringlen(), 0) self.assertEqual(c1._p_state, GHOST) self.assertEqual(c2._p_state, GHOST) def test_debug_info_w_persistent_class(self): KEY = b'pclass' class pclass(object): _p_oid = KEY _p_jar = DummyConnection() cache = self._makeOne(pclass._p_jar) pclass._p_state = UPTODATE cache[KEY] = pclass gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) # C and Python return different length tuples, # and the refcounts are off by one. oid = info[0][0] typ = info[0][2] self.assertEqual(oid, KEY) self.assertEqual(typ, 'type') return pclass, info[0] def test_debug_info_w_normal_object(self): KEY = b'uptodate' cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE, oid=KEY) cache[KEY] = uptodate gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) # C and Python return different length tuples, # and the refcounts are off by one. oid = info[0][0] typ = info[0][2] self.assertEqual(oid, KEY) self.assertEqual(typ, type(uptodate).__name__) return uptodate, info[0] def test_debug_info_w_ghost(self): from persistent.interfaces import GHOST KEY = b'ghost' cache = self._makeOne() ghost = self._makePersist(state=GHOST, oid=KEY) cache[KEY] = ghost gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) oid, _refc, typ, state = info[0] self.assertEqual(oid, KEY) self.assertEqual(typ, type(ghost).__name__) # In the C implementation, we couldn't actually set the _p_state # directly. self.assertEqual(state, ghost._p_state) return ghost, info[0] def test_setting_non_persistent_item(self): cache = self._makeOne() with self.assertRaisesRegex(TypeError, "Cache values must be persistent objects."): cache[b'12345678'] = object() def test_setting_without_jar(self): cache = self._makeOne() p = self._makePersist(jar=None) with self.assertRaisesRegex(ValueError, "Cached object jar missing"): cache[p._p_oid] = p def test_setting_already_cached(self): jar = DummyConnection() cache1 = self._makeOne(jar) p = self._makePersist(jar=jar) cache1[p._p_oid] = p cache2 = self._makeOne() with self.assertRaisesRegex(ValueError, "Cache values may only be in one cache"): cache2[p._p_oid] = p def test_cache_size(self): size = 42 cache = self._makeOne(target_size=size) self.assertEqual(cache.cache_size, size) cache.cache_size = 64 self.assertEqual(cache.cache_size, 64) def test_sweep_empty(self): cache = self._makeOne() # Python returns 0, C returns None self.assertFalse(cache.incrgc()) def test_invalidate_persistent_class_calls_p_invalidate(self): KEY = b'pclass' class pclass(object): _p_oid = KEY _p_jar = DummyConnection() invalidated = False @classmethod def _p_invalidate(cls): cls.invalidated = True cache = self._makeOne(pclass._p_jar) cache[KEY] = pclass cache.invalidate(KEY) self.assertTrue(pclass.invalidated) def test_cache_raw(self): raw = self._makePersist(kind=self._getRealPersistentClass()) cache = self._makeOne(raw._p_jar) cache[raw._p_oid] = raw self.assertEqual(1, len(cache)) self.assertIs(cache.get(raw._p_oid), raw) del raw self.assertEqual(1, len(cache)) class PythonPickleCacheTests(PickleCacheTestMixin, unittest.TestCase): # Tests that depend on the implementation details of the # Python PickleCache and the Python persistent object. # Anything that involves directly setting the _p_state of a persistent # object has to be here, we can't do that in the C implementation. def _getTargetInterface(self): from persistent.interfaces import IExtendedPickleCache return IExtendedPickleCache def test_sweep_of_non_deactivating_object(self): jar = DummyConnection() cache = self._makeOne(jar) p = self._makePersist(jar=jar) p._p_state = 0 # non-ghost, get in the ring cache[p._p_oid] = p def bad_deactivate(): "Doesn't call super, for it's own reasons, so can't be ejected" p._p_deactivate = bad_deactivate cache._SWEEPABLE_TYPES = DummyPersistent self.assertEqual(cache.full_sweep(), 0) del cache._SWEEPABLE_TYPES del p._p_deactivate self.assertEqual(cache.full_sweep(), 1) def test_update_object_size_estimation_simple(self): cache = self._makeOne() p = self._makePersist(jar=cache.jar) cache[p._p_oid] = p # The cache accesses the private attribute directly to bypass # the bit conversion. # Note that the _p_estimated_size is set *after* # the update call is made in ZODB's serialize p._Persistent__size = 0 cache.update_object_size_estimation(p._p_oid, 2) self.assertEqual(cache.total_estimated_size, 64) # A missing object does nothing cache.update_object_size_estimation(None, 2) self.assertEqual(cache.total_estimated_size, 64) def test_reify_miss_single(self): KEY = b'123' cache = self._makeOne() self.assertRaises(KeyError, cache.reify, KEY) def test_reify_miss_multiple(self): KEY = b'123' KEY2 = b'456' cache = self._makeOne() self.assertRaises(KeyError, cache.reify, [KEY, KEY2]) def test_reify_hit_single_non_ghost(self): KEY = b'123' jar = DummyConnection() cache = self._makeOne(jar) candidate = self._makePersist(oid=KEY, jar=jar, state=UPTODATE) cache[KEY] = candidate self.assertEqual(cache.ringlen(), 1) cache.reify(KEY) self.assertEqual(cache.ringlen(), 1) self.assertEqual(candidate._p_state, UPTODATE) def test_cannot_update_mru_while_already_locked(self): cache = self._makeOne() cache._is_sweeping_ring = True updated = cache.mru(None) self.assertFalse(updated) def test___delitem___w_persistent_class(self): cache, key = super(PythonPickleCacheTests, self).test___delitem___w_persistent_class() self.assertNotIn(key, cache.persistent_classes) def test___setitem___ghost(self): cache = super(PythonPickleCacheTests, self).test___setitem___ghost() self.assertEqual(cache.ringlen(), 0) def test___setitem___persistent_class(self): cache = super(PythonPickleCacheTests, self).test___setitem___persistent_class() self.assertEqual(_len(cache.items()), 0) def test_new_ghost_w_pclass_non_ghost(self): cache, Pclass, key = super(PythonPickleCacheTests, self).test_new_ghost_w_pclass_non_ghost() self.assertEqual(Pclass._p_jar, cache.jar) self.assertIs(cache.persistent_classes[key], Pclass) def test_new_ghost_w_pclass_ghost(self): cache, Pclass, key = super(PythonPickleCacheTests, self).test_new_ghost_w_pclass_ghost() self.assertEqual(Pclass._p_jar, cache.jar) self.assertIs(cache.persistent_classes[key], Pclass) def test_mru_nonesuch_raises_KeyError(self): cache = self._makeOne() self.assertRaises(KeyError, cache.mru, b'nonesuch') def test_mru_normal(self): ONE = b'one' TWO = b'two' THREE = b'three' cache = self._makeOne() cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) cache[TWO] = self._makePersist(oid=b'two', state=UPTODATE) cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) cache.mru(TWO) self.assertEqual(cache.ringlen(), 3) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], THREE) self.assertEqual(items[2][0], TWO) def test_mru_ghost(self): from persistent.interfaces import GHOST ONE = b'one' TWO = b'two' THREE = b'three' cache = self._makeOne() cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) two = cache[TWO] = self._makePersist(oid=b'two', state=GHOST) # two must live to survive gc self.assertIsNotNone(two) cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) cache.mru(TWO) self.assertEqual(cache.ringlen(), 2) items = cache.lru_items() self.assertEqual(_len(items), 2) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], THREE) def test_mru_was_ghost_now_active(self): from persistent.interfaces import GHOST ONE = b'one' TWO = b'two' THREE = b'three' cache = self._makeOne() cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) two = cache[TWO] = self._makePersist(oid=b'two', state=GHOST) cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) two._p_state = UPTODATE cache.mru(TWO) self.assertEqual(cache.ringlen(), 3) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], THREE) self.assertEqual(items[2][0], TWO) def test_mru_first(self): ONE = b'one' TWO = b'two' THREE = b'three' cache = self._makeOne() cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) cache[TWO] = self._makePersist(oid=b'two', state=UPTODATE) cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) cache.mru(ONE) self.assertEqual(cache.ringlen(), 3) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], TWO) self.assertEqual(items[1][0], THREE) self.assertEqual(items[2][0], ONE) def test_mru_last(self): ONE = b'one' TWO = b'two' THREE = b'three' cache = self._makeOne() cache[ONE] = self._makePersist(oid=b'one', state=UPTODATE) cache[TWO] = self._makePersist(oid=b'two', state=UPTODATE) cache[THREE] = self._makePersist(oid=b'three', state=UPTODATE) cache.mru(THREE) self.assertEqual(cache.ringlen(), 3) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], TWO) self.assertEqual(items[2][0], THREE) def test_invalidate_hit_pclass(self): KEY = b'123' class Pclass(object): _p_oid = KEY _p_jar = DummyConnection() cache = self._makeOne(Pclass._p_jar) cache[KEY] = Pclass self.assertIs(cache.persistent_classes[KEY], Pclass) cache.invalidate(KEY) self.assertNotIn(KEY, cache.persistent_classes) def test_debug_info_w_normal_object(self): obj, info = super(PythonPickleCacheTests, self).test_debug_info_w_normal_object() self.assertEqual(info[1], len(gc.get_referents(obj))) self.assertEqual(info[3], UPTODATE) def test_debug_info_w_ghost(self): ghost, info = super(PythonPickleCacheTests, self).test_debug_info_w_ghost() self.assertEqual(info[1], len(gc.get_referents(ghost))) def test_debug_info_w_persistent_class(self): pclass, info = super(PythonPickleCacheTests, self).test_debug_info_w_persistent_class() self.assertEqual(info[3], UPTODATE) self.assertEqual(info[1], len(gc.get_referents(pclass))) def test_full_sweep_w_sticky(self): from persistent.interfaces import STICKY cache = self._makeOne() oids = self._populate_cache(cache, state_0=STICKY) self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 1) self.assertTrue(cache.get(oids[0]) is not None) for oid in oids[1:]: self.assertTrue(cache.get(oid) is None) def test_full_sweep_w_changed(self): from persistent.interfaces import CHANGED cache = self._makeOne() oids = self._populate_cache(cache, state_0=CHANGED) self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 1) self.assertTrue(cache.get(oids[0]) is not None) for oid in oids[1:]: self.assertTrue(cache.get(oid) is None) def test_init_with_cacheless_jar(self): # Sometimes ZODB tests pass objects that don't # have a _cache class Jar(object): was_set = False def __setattr__(self, name, value): if name == '_cache': object.__setattr__(self, 'was_set', True) raise AttributeError(name) jar = Jar() self._makeOne(jar) self.assertTrue(jar.was_set) def test_invalidate_hit_multiple_mixed(self): from persistent.interfaces import GHOST KEY = b'123' KEY2 = b'456' jar = DummyConnection() cache = self._makeOne() c1 = self._makePersist(oid=KEY, jar=jar, state=GHOST) cache[KEY] = c1 c2 = self._makePersist(oid=KEY2, jar=jar, state=UPTODATE) cache[KEY2] = c2 self.assertEqual(cache.ringlen(), 1) cache.invalidate([KEY, KEY2]) self.assertEqual(cache.ringlen(), 0) self.assertEqual(c1._p_state, GHOST) self.assertEqual(c2._p_state, GHOST) def test_invalidate_hit_single_ghost(self): from persistent.interfaces import GHOST KEY = b'123' jar = DummyConnection() cache = self._makeOne(jar) candidate = self._makePersist(oid=b'123', jar=jar, state=GHOST) cache[KEY] = candidate self.assertEqual(cache.ringlen(), 0) cache.invalidate(KEY) self.assertEqual(cache.ringlen(), 0) self.assertEqual(candidate._p_state, GHOST) def test_reify_hit_multiple_mixed(self): from persistent.interfaces import GHOST KEY = b'123' KEY2 = b'456' jar = DummyConnection() cache = self._makeOne(jar) c1 = self._makePersist(oid=KEY, jar=jar, state=GHOST) cache[KEY] = c1 c2 = self._makePersist(oid=KEY2, jar=jar, state=UPTODATE) cache[KEY2] = c2 self.assertEqual(cache.ringlen(), 1) cache.reify([KEY, KEY2]) self.assertEqual(cache.ringlen(), 2) self.assertEqual(c1._p_state, UPTODATE) self.assertEqual(c2._p_state, UPTODATE) def test_reify_hit_single_ghost(self): from persistent.interfaces import GHOST KEY = b'123' jar = DummyConnection() cache = self._makeOne() candidate = self._makePersist(oid=KEY, jar=jar, state=GHOST) cache[KEY] = candidate self.assertEqual(cache.ringlen(), 0) cache.reify(KEY) self.assertEqual(cache.ringlen(), 1) items = cache.lru_items() self.assertEqual(items[0][0], KEY) self.assertTrue(items[0][1] is candidate) self.assertEqual(candidate._p_state, UPTODATE) def test_cache_garbage_collection_bytes_also_deactivates_object(self): class MyPersistent(self._getDummyPersistentClass()): def _p_deactivate(self): # mimic what the real persistent object does to update the cache # size; if we don't get deactivated by sweeping, the cache size # won't shrink so this also validates that _p_deactivate gets # called when ejecting an object. cache.update_object_size_estimation(self._p_oid, -1) cache = self._makeOne() cache.cache_size = 1000 oids = [] for i in range(100): oid = self._numbered_oid(i) oids.append(oid) o = cache[oid] = self._makePersist(oid=oid, kind=MyPersistent, state=UPTODATE) o._Persistent__size = 0 # must start 0, ZODB sets it AFTER updating the size cache.update_object_size_estimation(oid, 64) o._Persistent__size = 2 self.assertEqual(cache.cache_non_ghost_count, 100) # A GC at this point does nothing cache.incrgc() self.assertEqual(cache.cache_non_ghost_count, 100) self.assertEqual(len(cache), 100) # Now if we set a byte target: cache.cache_size_bytes = 1 # verify the change worked as expected self.assertEqual(cache.cache_size_bytes, 1) # verify our entrance assumption is fulfilled self.assertTrue(cache.cache_size > 100) self.assertTrue(cache.total_estimated_size > 1) # A gc shrinks the bytes cache.incrgc() self.assertEqual(cache.total_estimated_size, 0) # It also shrank the measured size of the cache, # though this may require a GC to be visible. if PYPY: # pragma: no cover gc.collect() self.assertEqual(len(cache), 1) def test_new_ghost_obj_already_in_cache(self): base_result = super(PythonPickleCacheTests, self).test_new_ghost_obj_already_in_cache() cache, key, candidate = base_result # If we're sneaky and remove the OID and jar, then we get the duplicate # key error. Removing them only works because we're not using a real # persistent object. candidate._p_oid = None self.assertRaises(ValueError, cache.new_ghost, key, candidate) candidate._p_jar = None self.assertRaises(KeyError, cache.new_ghost, key, candidate) def test_cache_garbage_collection_bytes_with_cache_size_0(self): class MyPersistent(self._getDummyPersistentClass()): def _p_deactivate(self): # mimic what the real persistent object does to update # the cache size; if we don't get deactivated by # sweeping, the cache size won't shrink so this also # validates that _p_deactivate gets called when # ejecting an object. cache.update_object_size_estimation(self._p_oid, -1) cache = self._makeOne() cache.cache_size = 0 cache.cache_size_bytes = 400 oids = [] for i in range(100): oid = self._numbered_oid(i) oids.append(oid) o = cache[oid] = self._makePersist(oid=oid, kind=MyPersistent, state=UPTODATE) # must start 0, ZODB sets it AFTER updating the size o._Persistent__size = 0 cache.update_object_size_estimation(oid, 1) o._Persistent__size = 1 del o # leave it only in the cache self.assertEqual(cache.cache_non_ghost_count, 100) self.assertEqual(cache.total_estimated_size, 64 * 100) cache.incrgc() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.total_estimated_size, 64 * 6) self.assertEqual(cache.cache_non_ghost_count, 6) self.assertEqual(len(cache), 6) cache.full_sweep() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.total_estimated_size, 0) self.assertEqual(cache.cache_non_ghost_count, 0) self.assertEqual(len(cache), 0) @skipIfNoCExtension class CPickleCacheTests(PickleCacheTestMixin, unittest.TestCase): def _getTargetClass(self): from persistent._compat import _c_optimizations_available as get_c return get_c()['persistent.picklecache'].PickleCache def _getRealPersistentClass(self): from persistent._compat import _c_optimizations_available as get_c return get_c()['persistent.persistence'].Persistent def _getDummyPersistentClass(self): class DummyPersistent(self._getRealPersistentClass()): __slots__ = () return DummyPersistent def test_inst_does_not_conform_to_IExtendedPickleCache(self): # Test that ``@use_c_impl`` is only applying the correct # interface declaration to the C implementation. from persistent.interfaces import IExtendedPickleCache from zope.interface.verify import verifyObject from zope.interface.exceptions import Invalid # We don't claim to implement it. self.assertFalse(IExtendedPickleCache.providedBy(self._makeOne())) # And we don't even provide everything it asks for. # (Exact error depends on version of zope.interface and what we # fail to implement. 5.0 probably raises MultipleInvalid). with self.assertRaises(Invalid): verifyObject(IExtendedPickleCache, self._makeOne(), tentative=True) def test___setitem___persistent_class(self): cache = super(CPickleCacheTests, self).test___setitem___persistent_class() self.assertEqual(_len(cache.items()), 1) def test_cache_garbage_collection_bytes_with_cache_size_0(self): class DummyConnection(object): def register(self, obj): pass dummy_connection = DummyConnection() dummy_connection.register(1) # for coveralls def makePersistent(oid): persist = self._getDummyPersistentClass()() persist._p_oid = oid persist._p_jar = dummy_connection return persist cache = self._getTargetClass()(dummy_connection) dummy_connection._cache = cache cache.cache_size = 0 cache.cache_size_bytes = 400 oids = [] for i in range(100): oid = self._numbered_oid(i) oids.append(oid) o = cache[oid] = makePersistent(oid) cache.update_object_size_estimation(oid, 1) o._p_estimated_size = 1 del o # leave it only in the cache self.assertEqual(cache.cache_non_ghost_count, 100) self.assertEqual(cache.total_estimated_size, 64 * 100) cache.incrgc() self.assertEqual(cache.total_estimated_size, 64 * 6) self.assertEqual(cache.cache_non_ghost_count, 6) self.assertEqual(len(cache), 6) cache.full_sweep() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.total_estimated_size, 0) self.assertEqual(cache.cache_non_ghost_count, 0) self.assertEqual(len(cache), 0) def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test_ring.py0000644000076500000240000000643600000000000022376 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2015 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest from .. import ring # pylint:disable=protected-access class DummyPersistent(object): _p_oid = None _Persistent__ring = None __next_oid = 0 @classmethod def _next_oid(cls): cls.__next_oid += 1 return cls.__next_oid def __init__(self, oid=None): if oid is None: self._p_oid = self._next_oid() def __repr__(self): # pragma: no cover return "" % (self._p_oid, id(self)) class CFFIRingTests(unittest.TestCase): def _getTargetClass(self): return ring._CFFIRing def _makeOne(self): return self._getTargetClass()() def test_empty_len(self): self.assertEqual(0, len(self._makeOne())) def test_empty_contains(self): r = self._makeOne() self.assertNotIn(DummyPersistent(), r) def test_empty_iter(self): self.assertEqual([], list(self._makeOne())) def test_add_one_len1(self): r = self._makeOne() p = DummyPersistent() r.add(p) self.assertEqual(1, len(r)) def test_add_one_contains(self): r = self._makeOne() p = DummyPersistent() r.add(p) self.assertIn(p, r) def test_delete_one_len0(self): r = self._makeOne() p = DummyPersistent() r.add(p) r.delete(p) self.assertEqual(0, len(r)) def test_delete_one_multiple(self): r = self._makeOne() p = DummyPersistent() r.add(p) r.delete(p) self.assertEqual(0, len(r)) self.assertFalse(p in r) r.delete(p) self.assertEqual(0, len(r)) self.assertFalse(p in r) def test_delete_from_wrong_ring(self): r1 = self._makeOne() r2 = self._makeOne() p1 = DummyPersistent() p2 = DummyPersistent() r1.add(p1) r2.add(p2) r2.delete(p1) self.assertEqual(1, len(r1)) self.assertEqual(1, len(r2)) self.assertEqual([p1], list(r1)) self.assertEqual([p2], list(r2)) def test_move_to_head(self): r = self._makeOne() p1 = DummyPersistent() p2 = DummyPersistent() p3 = DummyPersistent() r.add(p1) r.add(p2) r.add(p3) __traceback_info__ = [ p1._Persistent__ring, p2._Persistent__ring, p3._Persistent__ring, ] self.assertEqual(3, len(r)) self.assertEqual([p1, p2, p3], list(r)) r.move_to_head(p1) self.assertEqual([p2, p3, p1], list(r)) r.move_to_head(p3) self.assertEqual([p2, p1, p3], list(r)) r.move_to_head(p3) self.assertEqual([p2, p1, p3], list(r)) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test_timestamp.py0000644000076500000240000003665200000000000023445 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2011 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest from contextlib import contextmanager from persistent.tests.utils import skipIfNoCExtension class Test__UTC(unittest.TestCase): def _getTargetClass(self): from persistent.timestamp import _UTC return _UTC def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_tzname(self): utc = self._makeOne() # datetime.timezone.utc changed the tzname in 3.6 # Now it is just 'UTC', but prior to that it was 'UTC+00:00' self.assertEqual(utc.tzname(None)[0:3], 'UTC') def test_utcoffset(self): from datetime import timedelta utc = self._makeOne() self.assertEqual(utc.utcoffset(None), timedelta(0)) def test_dst(self): utc = self._makeOne() self.assertEqual(utc.dst(None), None) def test_fromutc(self): import datetime source = datetime.datetime.now(self._getTargetClass()()) utc = self._makeOne() self.assertEqual(utc.fromutc(source), source) class Test__UTCClass(Test__UTC): def _getTargetClass(self): from persistent.timestamp import _UTCClass return _UTCClass class TimeStampTestsMixin(object): # Tests that work for either implementation. def _getTargetClass(self): raise NotImplementedError def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_ctor_invalid_arglist(self): BAD_ARGS = [(), (1,), (1, 2), (1, 2, 3), (1, 2, 3, 4), (1, 2, 3, 4, 5), ('1', '2', '3', '4', '5', '6'), (1, 2, 3, 4, 5, 6, 7), (b'123',), ] for args in BAD_ARGS: with self.assertRaises((TypeError, ValueError)): self._makeOne(*args) def test_ctor_from_invalid_strings(self): BAD_ARGS = ['' '\x00', '\x00' * 2, '\x00' * 3, '\x00' * 4, '\x00' * 5, '\x00' * 7, ] for args in BAD_ARGS: self.assertRaises((TypeError, ValueError), self._makeOne, *args) def test_ctor_from_string(self): from persistent.timestamp import _makeUTC ZERO = _makeUTC(1900, 1, 1, 0, 0, 0) EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) DELTA = ZERO - EPOCH DELTA_SECS = DELTA.days * 86400 + DELTA.seconds SERIAL = b'\x00' * 8 ts = self._makeOne(SERIAL) self.assertEqual(ts.raw(), SERIAL) self.assertEqual(ts.year(), 1900) self.assertEqual(ts.month(), 1) self.assertEqual(ts.day(), 1) self.assertEqual(ts.hour(), 0) self.assertEqual(ts.minute(), 0) self.assertEqual(ts.second(), 0.0) self.assertEqual(ts.timeTime(), DELTA_SECS) def test_ctor_from_string_non_zero(self): before = self._makeOne(2011, 2, 16, 14, 37, 22.80544) after = self._makeOne(before.raw()) self.assertEqual(before.raw(), after.raw()) self.assertEqual(before.timeTime(), 1297867042.80544) def test_ctor_from_elements(self): from persistent.timestamp import _makeUTC ZERO = _makeUTC(1900, 1, 1, 0, 0, 0) EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) DELTA = ZERO - EPOCH DELTA_SECS = DELTA.days * 86400 + DELTA.seconds SERIAL = b'\x00' * 8 ts = self._makeOne(1900, 1, 1, 0, 0, 0.0) self.assertEqual(ts.raw(), SERIAL) self.assertEqual(ts.year(), 1900) self.assertEqual(ts.month(), 1) self.assertEqual(ts.day(), 1) self.assertEqual(ts.hour(), 0) self.assertEqual(ts.minute(), 0) self.assertEqual(ts.second(), 0.0) self.assertEqual(ts.timeTime(), DELTA_SECS) def test_laterThan_invalid(self): ERRORS = (ValueError, TypeError) SERIAL = b'\x01' * 8 ts = self._makeOne(SERIAL) self.assertRaises(ERRORS, ts.laterThan, None) self.assertRaises(ERRORS, ts.laterThan, '') self.assertRaises(ERRORS, ts.laterThan, ()) self.assertRaises(ERRORS, ts.laterThan, []) self.assertRaises(ERRORS, ts.laterThan, {}) self.assertRaises(ERRORS, ts.laterThan, object()) def test_laterThan_self_is_earlier(self): SERIAL1 = b'\x01' * 8 SERIAL2 = b'\x02' * 8 ts1 = self._makeOne(SERIAL1) ts2 = self._makeOne(SERIAL2) later = ts1.laterThan(ts2) self.assertEqual(later.raw(), b'\x02' * 7 + b'\x03') def test_laterThan_self_is_later(self): SERIAL1 = b'\x01' * 8 SERIAL2 = b'\x02' * 8 ts1 = self._makeOne(SERIAL1) ts2 = self._makeOne(SERIAL2) later = ts2.laterThan(ts1) self.assertTrue(later is ts2) def test_repr(self): SERIAL = b'\x01' * 8 ts = self._makeOne(SERIAL) self.assertEqual(repr(ts), repr(SERIAL)) def test_comparisons_to_non_timestamps(self): import operator from persistent._compat import PYTHON2 # Check the corner cases when comparing non-comparable types ts = self._makeOne(2011, 2, 16, 14, 37, 22.0) def check_common(op, passes): if passes == 'neither': self.assertFalse(op(ts, None)) self.assertFalse(op(None, ts)) return True if passes == 'both': self.assertTrue(op(ts, None)) self.assertTrue(op(None, ts)) return True return False def check_py2(op, passes): # pragma: no cover if passes == 'first': self.assertTrue(op(ts, None)) self.assertFalse(op(None, ts)) else: self.assertFalse(op(ts, None)) self.assertTrue(op(None, ts)) def check_py3(op, passes): self.assertRaises(TypeError, op, ts, None) self.assertRaises(TypeError, op, None, ts) check = check_py2 if PYTHON2 else check_py3 for op_name, passes in (('lt', 'second'), ('gt', 'first'), ('le', 'second'), ('ge', 'first'), ('eq', 'neither'), ('ne', 'both')): op = getattr(operator, op_name) if not check_common(op, passes): check(op, passes) class Instant(object): # Namespace to hold some constants. # A particular instant in time. now = 1229959248.3 # That instant in time split as the result of this expression: # (time.gmtime(now)[:5] + (now % 60,)) now_ts_args = (2008, 12, 22, 15, 20, 48.299999952316284) # We happen to know that on a 32-bit platform, the hashcode # of a TimeStamp at that instant should be exactly # -1419374591 # and the 64-bit should be exactly: # -3850693964765720575 bit_32_hash = -1419374591 bit_64_hash = -3850693964765720575 MAX_32_BITS = 2 ** 31 - 1 MAX_64_BITS = 2 ** 63 - 1 def __init__(self): from persistent import timestamp as MUT self.MUT = MUT # pylint:disable=protected-access self.orig_maxint = MUT._MAXINT self.is_32_bit_hash = self.orig_maxint == self.MAX_32_BITS self.orig_c_long = None self.c_int64 = None self.c_int32 = None if MUT.c_long is not None: import ctypes self.orig_c_long = MUT.c_long self.c_int32 = ctypes.c_int32 self.c_int64 = ctypes.c_int64 # win32, even on 64-bit long, has funny sizes self.is_32_bit_hash = self.c_int32 == ctypes.c_long self.expected_hash = self.bit_32_hash if self.is_32_bit_hash else self.bit_64_hash @contextmanager def _use_hash(self, maxint, c_long): # pylint:disable=protected-access try: self.MUT._MAXINT = maxint self.MUT.c_long = c_long yield finally: self.MUT._MAXINT = self.orig_maxint self.MUT.c_long = self.orig_c_long def use_32bit(self): return self._use_hash(self.MAX_32_BITS, self.c_int32) def use_64bit(self): return self._use_hash(self.MAX_64_BITS, self.c_int64) class pyTimeStampTests(TimeStampTestsMixin, unittest.TestCase): # Tests specific to the Python implementation def _getTargetClass(self): from persistent.timestamp import TimeStampPy return TimeStampPy def test_py_hash_32_64_bit(self): # Fake out the python version to think it's on a 32-bit # platform and test the same; also verify 64 bit instant = Instant() with instant.use_32bit(): py = self._makeOne(*Instant.now_ts_args) self.assertEqual(hash(py), Instant.bit_32_hash) with instant.use_64bit(): # call __hash__ directly to avoid interpreter truncation # in hash() on 32-bit platforms self.assertEqual(py.__hash__(), Instant.bit_64_hash) self.assertEqual(py.__hash__(), instant.expected_hash) class CTimeStampTests(TimeStampTestsMixin, unittest.TestCase): def _getTargetClass(self): from persistent.timestamp import TimeStamp return TimeStamp def test_hash_32_or_64_bit(self): ts = self._makeOne(*Instant.now_ts_args) self.assertIn(hash(ts), (Instant.bit_32_hash, Instant.bit_64_hash)) @skipIfNoCExtension class PyAndCComparisonTests(unittest.TestCase): """ Compares C and Python implementations. """ def _make_many_instants(self): # Given the above data, return many slight variations on # it to test matching yield Instant.now_ts_args for i in range(2000): yield Instant.now_ts_args[:-1] + (Instant.now_ts_args[-1] + (i % 60.0)/100.0, ) def _makeC(self, *args, **kwargs): from persistent._compat import _c_optimizations_available as get_c return get_c()['persistent.timestamp'].TimeStamp(*args, **kwargs) def _makePy(self, *args, **kwargs): from persistent.timestamp import TimeStampPy return TimeStampPy(*args, **kwargs) def _make_C_and_Py(self, *args, **kwargs): return self._makeC(*args, **kwargs), self._makePy(*args, **kwargs) def test_reprs_equal(self): for args in self._make_many_instants(): c, py = self._make_C_and_Py(*args) self.assertEqual(repr(c), repr(py)) def test_strs_equal(self): for args in self._make_many_instants(): c, py = self._make_C_and_Py(*args) self.assertEqual(str(c), str(py)) def test_raw_equal(self): c, py = self._make_C_and_Py(*Instant.now_ts_args) self.assertEqual(c.raw(), py.raw()) def test_equal(self): c, py = self._make_C_and_Py(*Instant.now_ts_args) self.assertEqual(c, py) def test_hash_equal(self): c, py = self._make_C_and_Py(*Instant.now_ts_args) self.assertEqual(hash(c), hash(py)) def test_hash_equal_constants(self): # The simple constants make it easier to diagnose # a difference in algorithms is_32_bit = Instant().is_32_bit_hash c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x00\x00') self.assertEqual(hash(c), 8) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x00\x01') self.assertEqual(hash(c), 9) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x01\x00') self.assertEqual(hash(c), 1000011) self.assertEqual(hash(c), hash(py)) # overflow kicks in here on 32-bit platforms c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x01\x00\x00') expected = -721379967 if is_32_bit else 1000006000001 self.assertEqual(hash(c), expected) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x01\x00\x00\x00') expected = 583896275 if is_32_bit else 1000009000027000019 self.assertEqual(hash(c), expected) self.assertEqual(hash(c), hash(py)) # Overflow kicks in at this point on 64-bit platforms c, py = self._make_C_and_Py(b'\x00\x00\x00\x01\x00\x00\x00\x00') expected = 1525764953 if is_32_bit else -4442925868394654887 self.assertEqual(hash(c), expected) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x00\x00\x01\x00\x00\x00\x00\x00') expected = -429739973 if is_32_bit else -3993531167153147845 self.assertEqual(hash(c), expected) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x01\x00\x00\x00\x00\x00\x00\x00') expected = 263152323 if is_32_bit else -3099646879006235965 self.assertEqual(hash(c), expected) self.assertEqual(hash(c), hash(py)) def test_ordering(self): small_c = self._makeC(b'\x00\x00\x00\x00\x00\x00\x00\x01') small_py = self._makePy(b'\x00\x00\x00\x00\x00\x00\x00\x01') big_c = self._makeC(b'\x01\x00\x00\x00\x00\x00\x00\x00') big_py = self._makePy(b'\x01\x00\x00\x00\x00\x00\x00\x00') self.assertTrue(small_py < big_py) self.assertTrue(small_py <= big_py) self.assertTrue(small_py < big_c) self.assertTrue(small_py <= big_c) self.assertTrue(small_py <= small_c) self.assertTrue(small_c < big_c) self.assertTrue(small_c <= big_c) self.assertTrue(small_c <= big_py) self.assertTrue(big_c > small_py) self.assertTrue(big_c >= big_py) self.assertFalse(big_c == small_py) self.assertFalse(small_py == big_c) self.assertTrue(big_c != small_py) self.assertTrue(small_py != big_c) def test_seconds_precision(self, seconds=6.123456789): # https://github.com/zopefoundation/persistent/issues/41 args = (2001, 2, 3, 4, 5, seconds) c = self._makeC(*args) py = self._makePy(*args) self.assertEqual(c, py) self.assertEqual(c.second(), py.second()) py2 = self._makePy(c.raw()) self.assertEqual(py2, c) c2 = self._makeC(c.raw()) self.assertEqual(c2, c) def test_seconds_precision_half(self): # make sure our rounding matches self.test_seconds_precision(seconds=6.5) self.test_seconds_precision(seconds=6.55) self.test_seconds_precision(seconds=6.555) self.test_seconds_precision(seconds=6.5555) self.test_seconds_precision(seconds=6.55555) self.test_seconds_precision(seconds=6.555555) self.test_seconds_precision(seconds=6.5555555) self.test_seconds_precision(seconds=6.55555555) self.test_seconds_precision(seconds=6.555555555) def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/test_wref.py0000644000076500000240000002605300000000000022377 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2003 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest class WeakRefTests(unittest.TestCase): def _getTargetClass(self): from persistent.wref import WeakRef return WeakRef def _makeOne(self, ob): return self._getTargetClass()(ob) def test_ctor_target_wo_jar(self): target = _makeTarget() wref = self._makeOne(target) self.assertTrue(wref._v_ob is target) self.assertEqual(wref.oid, b'OID') self.assertTrue(wref.dm is None) self.assertFalse('database_name' in wref.__dict__) def test_ctor_target_w_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) self.assertTrue(wref._v_ob is target) self.assertEqual(wref.oid, b'OID') self.assertTrue(wref.dm is jar) self.assertEqual(wref.database_name, 'testing') def test___call___target_in_volatile(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) self.assertTrue(wref() is target) def test___call___target_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() jar[target._p_oid] = target wref = self._makeOne(target) del wref._v_ob self.assertTrue(wref() is target) def test___call___target_not_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) del wref._v_ob self.assertTrue(wref() is None) def test___hash___w_target(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) self.assertEqual(hash(wref), hash(target)) def test___hash___wo_target(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) del wref._v_ob self.assertRaises(TypeError, hash, wref) def test___eq___w_non_weakref(self): target = _makeTarget() lhs = self._makeOne(target) self.assertNotEqual(lhs, object()) # Test belt-and-suspenders directly self.assertFalse(lhs.__eq__(object())) def test___eq___w_both_same_target(self): target = _makeTarget() lhs = self._makeOne(target) rhs_target = _makeTarget() rhs = self._makeOne(target) self.assertEqual(lhs, rhs) def test___eq___w_both_different_targets(self): lhs_target = _makeTarget(oid='LHS') lhs = self._makeOne(lhs_target) rhs_target = _makeTarget(oid='RHS') rhs = self._makeOne(rhs_target) self.assertNotEqual(lhs, rhs) def test___eq___w_lhs_gone_target_not_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() lhs = self._makeOne(target) del lhs._v_ob rhs = self._makeOne(target) self.assertRaises(TypeError, lambda: lhs == rhs) def test___eq___w_lhs_gone_target_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() jar[target._p_oid] = target lhs = self._makeOne(target) del lhs._v_ob rhs_target = _makeTarget() rhs = self._makeOne(target) self.assertEqual(lhs, rhs) def test___eq___w_rhs_gone_target_not_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() lhs = self._makeOne(target) rhs = self._makeOne(target) del rhs._v_ob self.assertRaises(TypeError, lambda: lhs == rhs) def test___eq___w_rhs_gone_target_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() jar[target._p_oid] = target lhs = self._makeOne(target) rhs = self._makeOne(target) del rhs._v_ob self.assertEqual(lhs, rhs) class PersistentWeakKeyDictionaryTests(unittest.TestCase): def _getTargetClass(self): from persistent.wref import PersistentWeakKeyDictionary return PersistentWeakKeyDictionary def _makeOne(self, adict, **kw): return self._getTargetClass()(adict, **kw) def test_ctor_w_adict_none_no_kwargs(self): pwkd = self._makeOne(None) self.assertEqual(pwkd.data, {}) def test_ctor_w_adict_as_dict(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne({key: value}) self.assertTrue(pwkd[key] is value) def test_ctor_w_adict_as_items(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) self.assertTrue(pwkd[key] is value) def test___getstate___empty(self): pwkd = self._makeOne(None) self.assertEqual(pwkd.__getstate__(), {'data': []}) def test___getstate___filled(self): from persistent.wref import WeakRef jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) self.assertEqual(pwkd.__getstate__(), {'data': [(WeakRef(key), value)]}) def test___setstate___empty(self): from persistent.wref import WeakRef jar = _makeJar() KEY = b'KEY' KEY2 = b'KEY2' KEY3 = b'KEY3' VALUE = b'VALUE' VALUE2 = b'VALUE2' VALUE3 = b'VALUE3' key = jar[KEY] = _makeTarget(oid=KEY) key._p_jar = jar kref = WeakRef(key) value = jar[VALUE] = _makeTarget(oid=VALUE) value._p_jar = jar key2 = _makeTarget(oid=KEY2) key2._p_jar = jar # not findable kref2 = WeakRef(key2) del kref2._v_ob # force a miss value2 = jar[VALUE2] = _makeTarget(oid=VALUE2) value2._p_jar = jar key3 = jar[KEY3] = _makeTarget(oid=KEY3) # findable key3._p_jar = jar kref3 = WeakRef(key3) del kref3._v_ob # force a miss, but win in the lookup value3 = jar[VALUE3] = _makeTarget(oid=VALUE3) value3._p_jar = jar pwkd = self._makeOne(None) pwkd.__setstate__({'data': [(kref, value), (kref2, value2), (kref3, value3)]}) self.assertTrue(pwkd[key] is value) self.assertTrue(pwkd.get(key2) is None) self.assertTrue(pwkd[key3] is value3) def test___setitem__(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) pwkd[key] = value self.assertTrue(pwkd[key] is value) def test___getitem___miss(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) def _try(): return pwkd[key] self.assertRaises(KeyError, _try) def test___delitem__(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) del pwkd[key] self.assertTrue(pwkd.get(key) is None) def test___delitem___miss(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) def _try(): del pwkd[key] self.assertRaises(KeyError, _try) def test_get_miss_w_explicit_default(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) self.assertTrue(pwkd.get(key, value) is value) def test___contains___miss(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar pwkd = self._makeOne(None) self.assertFalse(key in pwkd) def test___contains___hit(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) self.assertTrue(key in pwkd) def test___iter___empty(self): jar = _makeJar() pwkd = self._makeOne(None) self.assertEqual(list(pwkd), []) def test___iter___filled(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) self.assertEqual(list(pwkd), [key]) def test_update_w_other_pwkd(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar source = self._makeOne([(key, value)]) target = self._makeOne(None) target.update(source) self.assertTrue(target[key] is value) def test_update_w_dict(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar source = dict([(key, value)]) target = self._makeOne(None) target.update(source) self.assertTrue(target[key] is value) def _makeTarget(oid=b'OID'): from persistent import Persistent class Derived(Persistent): def __hash__(self): return hash(self._p_oid) def __eq__(self, other): return self._p_oid == other._p_oid def __repr__(self): # pragma: no cover return 'Derived: %s' % self._p_oid derived = Derived() derived._p_oid = oid return derived def _makeJar(): class _DB(object): database_name = 'testing' class _Jar(dict): db = lambda self: _DB() return _Jar() def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/tests/utils.py0000644000076500000240000000652400000000000021536 0ustar00jmaddenstaff00000000000000 class TrivialJar(object): """ Jar that only supports registering objects so ``_p_changed`` can be tested. """ def register(self, ob): """Does nothing""" class ResettingJar(object): """Testing stub for _p_jar attribute. """ def __init__(self): from persistent import PickleCache # XXX stub it! from persistent.interfaces import IPersistentDataManager from zope.interface import directlyProvides self.cache = self._cache = PickleCache(self) self.oid = 1 self.registered = {} directlyProvides(self, IPersistentDataManager) def add(self, obj): import struct obj._p_oid = struct.pack(">Q", self.oid) self.oid += 1 obj._p_jar = self self.cache[obj._p_oid] = obj # the following methods must be implemented to be a jar def setstate(self, obj): # Trivial setstate() implementation that just re-initializes # the object. This isn't what setstate() is supposed to do, # but it suffices for the tests. obj.__class__.__init__(obj) class RememberingJar(object): """Testing stub for _p_jar attribute. """ def __init__(self): from persistent import PickleCache # XXX stub it! self.cache = PickleCache(self) self.oid = 1 self.registered = {} def add(self, obj): import struct obj._p_oid = struct.pack(">Q", self.oid) self.oid += 1 obj._p_jar = self self.cache[obj._p_oid] = obj # Remember object's state for later. self.obj = obj self.remembered = obj.__getstate__() def fake_commit(self): self.remembered = self.obj.__getstate__() self.obj._p_changed = 0 # the following methods must be implemented to be a jar def register(self, obj): self.registered[obj] = 1 def setstate(self, obj): # Trivial setstate() implementation that resets the object's # state as of the time it was added to the jar. # This isn't what setstate() is supposed to do, # but it suffices for the tests. obj.__setstate__(self.remembered) def copy_test(self, obj): import copy # Test copy.copy. Do this first, because depending on the # version of Python, `UserDict.copy()` may wind up # mutating the original object's ``data`` (due to our # BWC with ``_container``). This shows up only as a failure # of coverage. obj.test = [1234] # Make sure instance vars are also copied. obj_copy = copy.copy(obj) self.assertIsNot(obj.data, obj_copy.data) self.assertEqual(obj.data, obj_copy.data) self.assertIs(obj.test, obj_copy.test) # Test internal copy obj_copy = obj.copy() self.assertIsNot(obj.data, obj_copy.data) self.assertEqual(obj.data, obj_copy.data) return obj_copy def skipIfNoCExtension(o): import unittest from persistent._compat import _should_attempt_c_optimizations from persistent._compat import _c_optimizations_available from persistent._compat import _c_optimizations_ignored if _should_attempt_c_optimizations() and not _c_optimizations_available(): # pragma: no cover return unittest.expectedFailure(o) return unittest.skipIf( _c_optimizations_ignored() or not _c_optimizations_available(), "The C extension is not available" )(o) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/timestamp.py0000644000076500000240000001570100000000000021234 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2011 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## __all__ = ('TimeStamp',) import datetime import math import struct import sys from persistent._compat import use_c_impl _RAWTYPE = bytes _MAXINT = sys.maxsize _ZERO = b'\x00' * 8 __all__ = [ 'TimeStamp', 'TimeStampPy', ] try: # Make sure to overflow and wraparound just # like the C code does. from ctypes import c_long except ImportError: # pragma: no cover # XXX: This is broken on 64-bit windows, where # sizeof(long) != sizeof(Py_ssize_t) # sizeof(long) == 4, sizeof(Py_ssize_t) == 8 # It can be fixed by setting _MAXINT = 2 ** 31 - 1 on all # win32 platforms, but then that breaks PyPy3 64 bit for an unknown # reason. c_long = None def _wraparound(x): return int(((x + (_MAXINT + 1)) & ((_MAXINT << 1) + 1)) - (_MAXINT + 1)) else: def _wraparound(x): return c_long(x).value class _UTCClass(datetime.tzinfo): # A Python 2 implementation of a UTC tzinfo. def tzname(self, dt): return 'UTC' def utcoffset(self, dt): return datetime.timedelta(0, 0, 0) def dst(self, dt): return None def fromutc(self, dt): return dt try: from datetime import timezone def _UTC(): return timezone.utc except ImportError: # pragma: no cover # Python 2 def _UTC(_inst=_UTCClass()): return _inst def _makeUTC(y, mo, d, h, mi, s): s = round(s, 6) # microsecond precision, to match the C implementation usec, sec = math.modf(s) sec = int(sec) usec = int(usec * 1e6) return datetime.datetime(y, mo, d, h, mi, sec, usec, tzinfo=_UTC()) _EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) _TS_SECOND_BYTES_BIAS = 60.0 / (1<<16) / (1<<16) def _makeRaw(year, month, day, hour, minute, second): a = (((year - 1900) * 12 + month - 1) * 31 + day - 1) a = (a * 24 + hour) * 60 + minute b = int(second / _TS_SECOND_BYTES_BIAS) # Don't round() this; the C version just truncates return struct.pack('>II', a, b) def _parseRaw(octets): a, b = struct.unpack('>II', octets) minute = a % 60 hour = a // 60 % 24 day = a // (60 * 24) % 31 + 1 month = a // (60 * 24 * 31) % 12 + 1 year = a // (60 * 24 * 31 * 12) + 1900 second = b * _TS_SECOND_BYTES_BIAS return (year, month, day, hour, minute, second) @use_c_impl class TimeStamp(object): __slots__ = ('_raw', '_elements') def __init__(self, *args): self._elements = None if len(args) == 1: raw = args[0] if not isinstance(raw, _RAWTYPE): raise TypeError('Raw octets must be of type: %s' % _RAWTYPE) if len(raw) != 8: raise TypeError('Raw must be 8 octets') self._raw = raw elif len(args) == 6: self._raw = _makeRaw(*args) # pylint:disable=no-value-for-parameter # Note that we don't preserve the incoming arguments in self._elements, # we derive them from the raw value. This is because the incoming # seconds value could have more precision than would survive # in the raw data, so we must be consistent. else: raise TypeError('Pass either a single 8-octet arg ' 'or 5 integers and a float') self._elements = _parseRaw(self._raw) def raw(self): return self._raw def __repr__(self): return repr(self._raw) def __str__(self): return "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%09.6f" % ( self.year(), self.month(), self.day(), self.hour(), self.minute(), self.second()) def year(self): return self._elements[0] def month(self): return self._elements[1] def day(self): return self._elements[2] def hour(self): return self._elements[3] def minute(self): return self._elements[4] def second(self): return self._elements[5] def timeTime(self): """ -> seconds since epoch, as a float. """ delta = _makeUTC(*self._elements) - _EPOCH return delta.days * 86400 + delta.seconds + delta.microseconds / 1e6 def laterThan(self, other): """ Return a timestamp instance which is later than 'other'. If self already qualifies, return self. Otherwise, return a new instance one moment later than 'other'. """ if not isinstance(other, self.__class__): raise ValueError() # pylint:disable=protected-access if self._raw > other._raw: return self a, b = struct.unpack('>II', other._raw) later = struct.pack('>II', a, b + 1) return self.__class__(later) def __eq__(self, other): try: return self.raw() == other.raw() except AttributeError: return NotImplemented def __ne__(self, other): try: return self.raw() != other.raw() except AttributeError: return NotImplemented def __hash__(self): # Match the C implementation a = bytearray(self._raw) x = a[0] << 7 for i in a: x = (1000003 * x) ^ i x ^= 8 x = _wraparound(x) if x == -1: # pragma: no cover # The C version has this condition, but it's not clear # why; it's also not immediately obvious what bytestring # would generate this---hence the no-cover x = -2 return x # Now the rest of the comparison operators # Sigh. Python 2.6 doesn't have functools.total_ordering # so we have to do it by hand def __lt__(self, other): try: return self.raw() < other.raw() except AttributeError: return NotImplemented def __gt__(self, other): try: return self.raw() > other.raw() except AttributeError: return NotImplemented def __le__(self, other): try: return self.raw() <= other.raw() except AttributeError: return NotImplemented def __ge__(self, other): try: return self.raw() >= other.raw() except AttributeError: return NotImplemented # This name is bound by the ``@use_c_impl`` decorator to the class defined above. # We make sure and list it statically, though, to help out linters. TimeStampPy = TimeStampPy # pylint:disable=undefined-variable,self-assigning-variable ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent/wref.py0000644000076500000240000001000300000000000020162 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2003 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """ZODB-based persistent weakrefs """ from persistent import Persistent WeakRefMarker = object() class WeakRef(object): """Persistent weak references Persistent weak references are used much like Python weak references. The major difference is that you can't specify an object to be called when the object is removed from the database. """ # We set _p_oid to a marker so that the serialization system can # provide special handling of weakrefs. _p_oid = WeakRefMarker def __init__(self, ob): self._v_ob = ob self.oid = ob._p_oid self.dm = ob._p_jar if self.dm is not None: self.database_name = self.dm.db().database_name def __call__(self): try: return self._v_ob except AttributeError: try: self._v_ob = self.dm[self.oid] except (KeyError, AttributeError): return None return self._v_ob def __hash__(self): self = self() if self is None: raise TypeError('Weakly-referenced object has gone away') return hash(self) def __eq__(self, other): if not isinstance(other, WeakRef): return False self = self() if self is None: raise TypeError('Weakly-referenced object has gone away') other = other() if other is None: raise TypeError('Weakly-referenced object has gone away') return self == other class PersistentWeakKeyDictionary(Persistent): """Persistent weak key dictionary This is akin to WeakKeyDictionaries. Note, however, that removal of items is extremely lazy. """ # TODO: It's expensive trying to load dead objects from the database. # It would be helpful if the data manager/connection cached these. def __init__(self, adict=None, **kwargs): self.data = {} if adict is not None: keys = getattr(adict, "keys", None) if keys is None: adict = dict(adict) self.update(adict) # XXX 'kwargs' is pointless, because keys must be strings, but we # are going to try (and fail) to wrap a WeakRef around them. if kwargs: # pragma: no cover self.update(kwargs) def __getstate__(self): state = Persistent.__getstate__(self) state['data'] = list(state['data'].items()) return state def __setstate__(self, state): state['data'] = dict([ (k, v) for (k, v) in state['data'] if k() is not None ]) Persistent.__setstate__(self, state) def __setitem__(self, key, value): self.data[WeakRef(key)] = value def __getitem__(self, key): return self.data[WeakRef(key)] def __delitem__(self, key): del self.data[WeakRef(key)] def get(self, key, default=None): """D.get(k[, d]) -> D[k] if k in D, else d. """ return self.data.get(WeakRef(key), default) def __contains__(self, key): return WeakRef(key) in self.data def __iter__(self): for k in self.data: yield k() def update(self, adict): if isinstance(adict, PersistentWeakKeyDictionary): self.data.update(adict.data) else: for k, v in adict.items(): self.data[WeakRef(k)] = v # TODO: May need more methods and tests. ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1585243094.9905174 persistent-4.6.4/persistent.egg-info/0000755000076500000240000000000000000000000020345 5ustar00jmaddenstaff00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent.egg-info/PKG-INFO0000644000076500000240000005327400000000000021455 0ustar00jmaddenstaff00000000000000Metadata-Version: 2.1 Name: persistent Version: 4.6.4 Summary: Translucent persistent objects Home-page: https://github.com/zopefoundation/persistent/ Author: Zope Corporation Author-email: zodb-dev@zope.org License: ZPL 2.1 Description: ``persistent``: automatic persistence for Python objects ========================================================= .. image:: https://travis-ci.org/zopefoundation/persistent.svg?branch=master :target: https://travis-ci.org/zopefoundation/persistent .. image:: https://coveralls.io/repos/github/zopefoundation/persistent/badge.svg?branch=master :target: https://coveralls.io/github/zopefoundation/persistent?branch=master .. image:: https://readthedocs.org/projects/persistent/badge/?version=latest :target: http://persistent.readthedocs.org/en/latest/ :alt: Documentation Status .. image:: https://img.shields.io/pypi/v/persistent.svg :target: https://pypi.org/project/persistent :alt: Latest release .. image:: https://img.shields.io/pypi/pyversions/persistent.svg :target: https://pypi.org/project/persistent :alt: Python versions This package contains a generic persistence implementation for Python. It forms the core protocol for making objects interact "transparently" with a database such as the ZODB. Please see the Sphinx documentation (``docs/index.rst``) for further information, or view the documentation at Read The Docs, for either the latest (``http://persistent.readthedocs.io/en/latest/``) or stable release (``http://persistent.readthedocs.io/en/stable/``). .. note:: Use of this standalone ``persistent`` release is not recommended or supported with ZODB < 3.11. ZODB 3.10 and earlier bundle their own version of the ``persistent`` package. ``persistent`` Changelog ======================== 4.6.4 (2020-03-26) ------------------ - Fix an overly specific test failure using zope.interface 5. See `issue 144 `_. - Fix two reference leaks that could theoretically occur as the result of obscure errors. See `issue 143 `_. 4.6.3 (2020-03-18) ------------------ - Fix a crash in the test suite under a 32-bit CPython on certain 32-bit platforms. See `issue 137 `_. Fix by `Jerry James `_. 4.6.2 (2020-03-12) ------------------ - Fix an ``AssertionError`` clearing a non-empty ``PersistentMapping`` that has no connection. See `issue 139 `_. 4.6.1 (2020-03-06) ------------------ - Stop installing C header files on PyPy (which is what persistent before 4.6.0 used to do), fixes `issue 135 `_. 4.6.0 (2020-03-05) ------------------ - Fix slicing of ``PersistentList`` to always return instances of the same class. It was broken on Python 3 prior to 3.7.4. - Fix copying of ``PersistentList`` and ``PersistentMapping`` using ``copy.copy`` to also copy the underlying data object. This was broken prior to Python 3.7.4. - Update the handling of the ``PURE_PYTHON`` environment variable. Now, a value of "0" requires that the C extensions be used; any other non-empty value prevents the extensions from being used. Also, all C extensions are required together or none of them will be used. This prevents strange errors that arise from a mismatch of Python and C implementations. See `issue 131 `_. Note that some private implementation details such as the names of the pure-Python implementations have changed. - Fix ``PersistentList`` to mark itself as changed after calling ``clear`` (if needed). See `PR 115 `_. - Fix ``PersistentMapping.update`` to accept keyword arguments like the native ``UserDict``. Previously, most uses of keyword arguments resulted in ``TypeError``; in the undocumented and extremely unlikely event of a single keyword argument called ``b`` that happens to be a dictionary, the behaviour will change. Also adjust the signatures of ``setdefault`` and ``pop`` to match the native version. - Fix ``PersistentList.clear``, ``PersistentMapping.clear`` and ``PersistentMapping.popitem`` to no longer mark the object as changed if it was empty. - Add preliminary support for Python 3.9a3+. See `issue 124 `_. - Fix the Python implementation of the PickleCache to be able to store objects that cannot be weakly referenced. See `issue 133 `_. Note that ``ctypes`` is required to use the Python implementation (except on PyPy). 4.5.1 (2019-11-06) ------------------ - Add support for Python 3.8. - Update documentation to Python 3. 4.5.0 (2019-05-09) ------------------ - Fully test the C implementation of the PickleCache, and fix discrepancies between it and the Python implementation: - The C implementation now raises ``ValueError`` instead of ``AssertionError`` for certain types of bad inputs. - The Python implementation uses the C wording for error messages. - The C implementation properly implements ``IPickleCache``; methods unique to the Python implementation were moved to ``IExtendedPickleCache``. - The Python implementation raises ``AttributeError`` if a persistent class doesn't have a ``p_jar`` attribute. See `issue 102 `_. - Allow sweeping cache without ``cache_size``. ``cache_size_bytes`` works with ``cache_size=0``, no need to set ``cache_size`` to a large value. - Require ``CFFI`` on CPython for pure-Python operation. This drops support for Jython (which was untested). See `issue 77 `_. - Fix DeprecationWarning about ``PY_SSIZE_T_CLEAN``. See `issue 108 `_. - Drop support for Python 3.4. 4.4.3 (2018-10-22) ------------------ - Fix the repr of the persistent objects to include the module name when using the C extension. This matches the pure-Python behaviour and the behaviour prior to 4.4.0. See `issue 92 `_. - Change the repr of persistent objects to format the OID as in integer in hexadecimal notation if it is an 8-byte byte string, as ZODB does. This eliminates some issues in doctests. See `issue 95 `_. 4.4.2 (2018-08-28) ------------------ - Explicitly use unsigned constants for packing and unpacking C timestamps, fixing an arithmetic issue for GCC when optimizations are enabled and ``-fwrapv`` is *not* enabled. See `issue 86 `_. 4.4.1 (2018-08-23) ------------------ - Fix installation of source packages on PyPy. See `issue 88 `_. 4.4.0 (2018-08-22) ------------------ - Use unsigned constants when doing arithmetic on C timestamps, possibly avoiding some overflow issues with some compilers or compiler settings. See `issue 86 `_. - Change the default representation of ``Persistent`` objects to include the representation of their OID and jar, if set. Also add the ability for subclasses to implement ``_p_repr()`` instead of overriding ``__repr__`` for better exception handling. See `issue 11 `_. - Reach and maintain 100% test coverage. - Simplify ``__init__.py``, including removal of an attempted legacy import of ``persistent.TimeStamp``. See `PR 80 `_. - Add support for Python 3.7 and drop support for Python 3.3. - Build the CFFI modules (used on PyPy or when PURE_PYTHON is set) `at installation or wheel building time `_ when CFFI is available. This replaces `the deprecated way `_ of building them at import time. If binary wheels are distributed, it eliminates the need to have a functioning C compiler to use PyPy. See `issue 75 `_. - Fix deleting the ``_p_oid`` of a pure-Python persistent object when it is in a cache. - Fix deleting special (``_p``) attributes of a pure-Python persistent object that overrides ``__delattr__`` and correctly calls ``_p_delattr``. - Remove some internal compatibility shims that are no longer necessary. See `PR 82 `_. - Make the return value of ``TimeStamp.second()`` consistent across C and Python implementations when the ``TimeStamp`` was created from 6 arguments with floating point seconds. Also make it match across trips through ``TimeStamp.raw()``. Previously, the C version could initially have erroneous rounding and too much false precision, while the Python version could have too much precision. The raw/repr values have not changed. See `issue 41 `_. 4.3.0 (2018-07-30) ------------------ - Fix the possibility of a rare crash in the C extension when deallocating items. See https://github.com/zopefoundation/persistent/issues/66 - Change cPickleCache's comparison of object sizes to determine whether an object can go in the cache to use ``PyObject_TypeCheck()``. This matches what the pure Python implementation does and is a stronger test that the object really is compatible with the cache. Previously, an object could potentially include ``cPersistent_HEAD`` and *not* set ``tp_base`` to ``cPersistenceCAPI->pertype`` and still be eligible for the pickle cache; that is no longer the case. See `issue 69 `_. 4.2.4.2 (2017-04-23) -------------------- - Packaging-only release: fix Python 2.7 ``manylinux`` wheels. 4.2.4.1 (2017-04-21) -------------------- - Packaging-only release: get ``manylinux`` wheel built automatically. 4.2.4 (2017-03-20) ------------------ - Avoid raising a ``SystemError: error return without exception set`` when loading an object with slots whose jar generates an exception (such as a ZODB ``POSKeyError``) in ``setstate``. 4.2.3 (2017-03-08) ------------------ - Fix the hashcode of Python ``TimeStamp`` objects on 64-bit Python on Windows. See https://github.com/zopefoundation/persistent/pull/55 - Stop calling ``gc.collect`` every time ``PickleCache.incrgc`` is called (every transaction boundary) in pure-Python mode (PyPy). This means that the reported size of the cache may be wrong (until the next GC), but it is much faster. This should not have any observable effects for user code. - Stop clearing the dict and slots of objects added to ``PickleCache.new_ghost`` (typically these values are passed to ``__new__`` from the pickle data) in pure-Python mode (PyPy). This matches the behaviour of the C code. - Add support for Python 3.6. - Fix ``__setstate__`` interning when ``state`` parameter is not a built-in dict 4.2.2 (2016-11-29) ------------------ - Drop use of ``ctypes`` for determining maximum integer size, to increase pure-Python compatibility. See https://github.com/zopefoundation/persistent/pull/31 - Ensure that ``__slots__`` attributes are cleared when a persistent object is ghostified. (This excluses classes that override ``__new__``. See https://github.com/zopefoundation/persistent/wiki/Notes_on_state_new_and_slots if you're curious.) 4.2.1 (2016-05-26) ------------------ - Fix the hashcode of C ``TimeStamp`` objects on 64-bit Python 3 on Windows. 4.2.0 (2016-05-05) ------------------ - Fixed the Python(/PYPY) implementation ``TimeStamp.timeTime`` method to have subsecond precision. - When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning the user's global wheel cache. - Add support for Python 3.5. - Drop support for Python 2.6 and 3.2. 4.1.1 (2015-06-02) ------------------ - Fix manifest and re-upload to fix stray files included in 4.1.0. 4.1.0 (2015-05-19) ------------------ - Make the Python implementation of ``Persistent`` and ``PickleCache`` behave more similarly to the C implementation. In particular, the Python version can now run the complete ZODB and ZEO test suites. - Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms. 4.0.9 (2015-04-08) ------------------ - Make the C and Python ``TimeStamp`` objects behave more alike. The Python version now produces the same ``repr`` and ``.raw()`` output as the C version, and has the same hashcode. In addition, the Python version is now supports ordering and equality like the C version. - Intern keys of object state in ``__setstate__`` to reduce memory usage when unpickling multiple objects with the same attributes. - Add support for PyPy3. - 100% branch coverage. 4.0.8 (2014-03-20) ------------------ - Add support for Python 3.4. - In pure-Python ``Persistent``, avoid loading state in ``_p_activate`` for non-ghost objects (which could corrupt their state). (PR #9) - In pure-Python, and don't throw ``POSKeyError`` if ``_p_activate`` is called on an object that has never been committed. (PR #9) - In pure-Python ``Persistent``, avoid calling a subclass's ``__setattr__`` at instance creation time. (PR #8) - Make it possible to delete ``_p_jar`` / ``_p_oid`` of a pure-Python ``Persistent`` object which has been removed from the jar's cache (fixes aborting a ZODB Connection that has added objects). (PR #7) 4.0.7 (2014-02-20) ------------------ - Avoid a KeyError from ``_p_accessed()`` on newly-created objects under pure-Python: these objects may be assigned to a jar, but not yet added to its cache. (PR #6) - Avoid a failure in ``Persistent.__setstate__`` when the state dict contains exactly two keys. (PR #5) - Fix a hang in ``picklecache`` invalidation if OIDs are manually passed out-of-order. (PR #4) - Add ``PURE_PYTHON`` environment variable support: if set, the C extensions will not be built, imported, or tested. 4.0.6 (2013-01-03) ------------------ - Updated Trove classifiers. 4.0.5 (2012-12-14) ------------------ - Fixed the C-extensions under Py3k (previously they compiled but were not importable). 4.0.4 (2012-12-11) ------------------ - Added support for Python 3.3. - C extenstions now build under Python 3.2, passing the same tests as the pure-Python reference implementation. 4.0.3 (2012-11-19) ------------------ - Fixed: In the C implimentation, an integer was compared with a pointer, with undefined results and a compiler warning. - Fixed: the Python implementation of the ``_p_estimated_size`` propety didn't support deletion. - Simplified implementation of the ``_p_estimated_size`` property to only accept integers. A TypeError is raised if an incorrect type is provided. 4.0.2 (2012-08-27) ------------------ - Correct initialization functions in renamed ``_timestamp`` extension. 4.0.1 (2012-08-26) ------------------ - Worked around test failure due to overflow to long on 32-bit systems. - Renamed ``TimeStamp`` extension module to avoid clash with pure-Python ``timestamp`` module on case-insensitive filesystems. N.B: the canonical way to import the ``TimeStamp`` class is now:: from persistent.timestamp import TimeStamp which will yield the class from the extension module (if available), falling back to the pure-Python reference implementation. 4.0.0 (2012-08-11) ------------------ Platform Changes ################ - Added explicit support for Python 3.2 and PyPy. - Note that the C implementations of Persistent, PickleCache, and Timestamp are not built (yet) on these platforms. - Dropped support for Python < 2.6. Testing Changes ############### - 100% unit test coverage. - Removed all ``ZODB``-dependent tests: - Rewrote some to avoid the dependency - Cloned the remainder into new ``ZODB.tests`` modules. - Refactored some doctests refactored as unittests. - Completed pure-Python reference implementations of 'Persistent', 'PickleCache', and 'TimeStamp'. - All covered platforms tested under ``tox``. - Added support for continuous integration using ``tox`` and ``jenkins``. - Added ``setup.py dev`` alias (installs ``nose`` and ``coverage``). - Dropped dependency on ``zope.testing`` / ``zope.testrunner``: tests now run with ``setup.py test``. Documentation Changes ##################### - Refactored many Doctests as Sphinx documentation (snippets are exercised via 'tox'). - Added ``setup.py docs`` alias (installs ``Sphinx`` and ``repoze.sphinx.autointerface``). Platform: any Classifier: Development Status :: 6 - Mature Classifier: License :: OSI Approved :: Zope Public License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Framework :: ZODB Classifier: Topic :: Database Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Operating System :: Microsoft :: Windows Classifier: Operating System :: Unix Provides-Extra: test Provides-Extra: testing Provides-Extra: docs ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent.egg-info/SOURCES.txt0000644000076500000240000000265300000000000022237 0ustar00jmaddenstaff00000000000000.coveragerc .manylinux-install.sh .manylinux.sh .travis.yml CHANGES.rst COPYRIGHT.txt LICENSE.txt MANIFEST.in README.rst appveyor.yml bootstrap.py buildout.cfg rtd.txt setup.cfg setup.py tox.ini docs/Makefile docs/api.rst docs/conf.py docs/glossary.rst docs/index.rst docs/make.bat docs/using.rst docs/api/attributes.rst docs/api/cache.rst docs/api/collections.rst docs/api/interfaces.rst docs/api/pickling.rst persistent/__init__.py persistent/_compat.h persistent/_compat.py persistent/_ring_build.py persistent/_timestamp.c persistent/cPersistence.c persistent/cPersistence.h persistent/cPickleCache.c persistent/dict.py persistent/interfaces.py persistent/list.py persistent/mapping.py persistent/persistence.py persistent/picklecache.py persistent/ring.c persistent/ring.h persistent/ring.py persistent/timestamp.py persistent/wref.py persistent.egg-info/PKG-INFO persistent.egg-info/SOURCES.txt persistent.egg-info/dependency_links.txt persistent.egg-info/not-zip-safe persistent.egg-info/requires.txt persistent.egg-info/top_level.txt persistent/tests/__init__.py persistent/tests/attrhooks.py persistent/tests/cucumbers.py persistent/tests/test__compat.py persistent/tests/test_docs.py persistent/tests/test_list.py persistent/tests/test_mapping.py persistent/tests/test_persistence.py persistent/tests/test_picklecache.py persistent/tests/test_ring.py persistent/tests/test_timestamp.py persistent/tests/test_wref.py persistent/tests/utils.py././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent.egg-info/dependency_links.txt0000644000076500000240000000000100000000000024413 0ustar00jmaddenstaff00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent.egg-info/not-zip-safe0000644000076500000240000000000100000000000022573 0ustar00jmaddenstaff00000000000000 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent.egg-info/requires.txt0000644000076500000240000000023100000000000022741 0ustar00jmaddenstaff00000000000000zope.interface [:platform_python_implementation == "CPython"] cffi [docs] Sphinx repoze.sphinx.autointerface [test] zope.testrunner manuel [testing] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/persistent.egg-info/top_level.txt0000644000076500000240000000001300000000000023071 0ustar00jmaddenstaff00000000000000persistent ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/rtd.txt0000644000076500000240000000003400000000000016002 0ustar00jmaddenstaff00000000000000repoze.sphinx.autointerface ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1585243094.9947891 persistent-4.6.4/setup.cfg0000644000076500000240000000021500000000000016272 0ustar00jmaddenstaff00000000000000[aliases] dev = develop easy_install persistent[testing] docs = develop easy_install persistent[docs] [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/setup.py0000644000076500000240000001051000000000000016162 0ustar00jmaddenstaff00000000000000############################################################################## # # Copyright (c) 2008 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import platform import os from setuptools import Extension from setuptools import find_packages from setuptools import setup version = '4.6.4' here = os.path.abspath(os.path.dirname(__file__)) def _read_file(filename): with open(os.path.join(here, filename)) as f: return f.read() README = (_read_file('README.rst') + '\n\n' + _read_file('CHANGES.rst')) define_macros = ( # We currently use macros like PyBytes_AS_STRING # and internal functions like _PyObject_GetDictPtr # that make it impossible to use the stable (limited) API. # ('Py_LIMITED_API', '0x03050000'), ) ext_modules = [ Extension( name='persistent.cPersistence', sources=[ 'persistent/cPersistence.c', 'persistent/ring.c', ], depends=[ 'persistent/cPersistence.h', 'persistent/ring.h', 'persistent/ring.c', ], define_macros=list(define_macros), ), Extension( name='persistent.cPickleCache', sources=[ 'persistent/cPickleCache.c', 'persistent/ring.c', ], depends=[ 'persistent/cPersistence.h', 'persistent/ring.h', 'persistent/ring.c', ], define_macros=list(define_macros), ), Extension( name='persistent._timestamp', sources=[ 'persistent/_timestamp.c', ], define_macros=list(define_macros), ), ] is_pypy = platform.python_implementation() == 'PyPy' if is_pypy: # Header installation doesn't work on PyPy: # https://github.com/zopefoundation/persistent/issues/135 headers = [] else: headers = [ 'persistent/cPersistence.h', 'persistent/ring.h', ] setup(name='persistent', version=version, description='Translucent persistent objects', long_description=README, classifiers=[ "Development Status :: 6 - Mature", "License :: OSI Approved :: Zope Public License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Framework :: ZODB", "Topic :: Database", "Topic :: Software Development :: Libraries :: Python Modules", "Operating System :: Microsoft :: Windows", "Operating System :: Unix", ], author="Zope Corporation", author_email="zodb-dev@zope.org", url="https://github.com/zopefoundation/persistent/", license="ZPL 2.1", platforms=["any"], # Make sure we don't get 'terryfy' included in wheels # created on macOS CI packages=find_packages(include=("persistent",)), include_package_data=True, zip_safe=False, ext_modules=ext_modules, cffi_modules=['persistent/_ring_build.py:ffi'], headers=headers, extras_require={ 'test': [ 'zope.testrunner', 'manuel', ], 'testing': (), 'docs': [ 'Sphinx', 'repoze.sphinx.autointerface', ], }, install_requires=[ 'zope.interface', "cffi ; platform_python_implementation == 'CPython'", ], setup_requires=[ "cffi ; platform_python_implementation == 'CPython'", ], entry_points={}) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1585243094.0 persistent-4.6.4/tox.ini0000644000076500000240000000206700000000000015773 0ustar00jmaddenstaff00000000000000[tox] envlist = py27,py35,py36,py37,py37-pure,py38,py39,py27-pure,pypy,pypy3,coverage,docs [testenv] deps = .[test] commands = zope-testrunner --test-path=. [] [testenv:py27-pure] basepython = python2.7 setenv = PURE_PYTHON = 1 [testenv:py37-pure] basepython = python3.7 setenv = PURE_PYTHON = 1 [testenv:coverage] usedevelop = true basepython = python3.6 commands = coverage run -m zope.testrunner --test-path=. python -c 'import os, subprocess; subprocess.check_call("coverage run -a -m zope.testrunner --test-path=.", env=dict(os.environ, PURE_PYTHON="1"), shell=True)' python -c 'import os, subprocess; subprocess.check_call("coverage run -a -m zope.testrunner --test-path=.", env=dict(os.environ, PURE_PYTHON="0"), shell=True)' coverage report --fail-under=100 deps = {[testenv]deps} coverage [testenv:docs] basepython = python3.6 commands = sphinx-build -b html -d docs/_build/doctrees docs docs/_build/html sphinx-build -b doctest -d docs/_build/doctrees docs docs/_build/doctest deps = .[docs]