persistent-4.2.2/0000755000076600000240000000000013017351012013613 5ustar jimstaff00000000000000persistent-4.2.2/.coveragerc0000644000076600000240000000013512577543740015760 0ustar jimstaff00000000000000[report] exclude_lines = # pragma: no cover class I[A-Z]\w+\((Interface|I[A-Z].*)\): persistent-4.2.2/.travis.yml0000644000076600000240000000364613017331206015740 0ustar jimstaff00000000000000language: python sudo: false matrix: include: - os: linux python: 2.7 - os: linux python: 3.3 - os: linux python: 3.4 - os: linux python: 3.5 - os: linux python: pypy - os: linux python: pypy3 - os: osx language: generic env: TERRYFY_PYTHON='homebrew 2' - os: osx language: generic env: TERRYFY_PYTHON='macpython 3.4' - os: osx language: generic env: TERRYFY_PYTHON='homebrew 3.5' before_install: - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then git clone https://github.com/MacPython/terryfy; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then source terryfy/travis_tools.sh; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then get_python_environment $TERRYFY_PYTHON venv; fi - if [[ "$TERRYFY_PYTHON" == "homebrew 3" ]]; then alias pip=`which pip3` ; fi install: - pip install -e . script: - python setup.py -q test -q notifications: email: false after_success: - echo [distutils] > ~/.pypirc - echo index-servers = pypi >> ~/.pypirc - echo [pypi] >> ~/.pypirc - echo repository=https://pypi.python.org/pypi >> ~/.pypirc - echo username=zope.wheelbuilder >> ~/.pypirc - echo password=$PYPIPASSWORD >> ~/.pypirc - if [[ $TRAVIS_TAG && "$TRAVIS_OS_NAME" == "osx" ]]; then pip install twine; fi - if [[ $TRAVIS_TAG && "$TRAVIS_OS_NAME" == "osx" ]]; then python setup.py bdist_wheel; fi - if [[ $TRAVIS_TAG && "$TRAVIS_OS_NAME" == "osx" ]]; then twine upload dist/*; fi env: global: secure: "avoDyPgppusNzldim5fq35sk7HAn3B8zs2KgAe+8Yr3MDpFaxNV96rhNkTdHgsNNw4N+PVew1sFUxLY7HzACyywLFPUPT+YMRcPeqiuekkQoQxatYR0dEfHrxPpNyvXRxIV+nrKTEaIxQNyDcJcyRw6K7NSNxuu2NR6Dj+xJmKY=" persistent-4.2.2/bootstrap.py0000644000076600000240000001644212712637364016233 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2006 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Bootstrap a buildout-based project Simply run this script in a directory containing a buildout.cfg. The script accepts buildout command-line options, so you can use the -c option to specify an alternate configuration file. """ import os import shutil import sys import tempfile from optparse import OptionParser __version__ = '2015-07-01' # See zc.buildout's changelog if this version is up to date. tmpeggs = tempfile.mkdtemp(prefix='bootstrap-') usage = '''\ [DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options] Bootstraps a buildout-based project. Simply run this script in a directory containing a buildout.cfg, using the Python that you want bin/buildout to use. Note that by using --find-links to point to local resources, you can keep this script from going over the network. ''' parser = OptionParser(usage=usage) parser.add_option("--version", action="store_true", default=False, help=("Return bootstrap.py version.")) parser.add_option("-t", "--accept-buildout-test-releases", dest='accept_buildout_test_releases', action="store_true", default=False, help=("Normally, if you do not specify a --version, the " "bootstrap script and buildout gets the newest " "*final* versions of zc.buildout and its recipes and " "extensions for you. If you use this flag, " "bootstrap and buildout will get the newest releases " "even if they are alphas or betas.")) parser.add_option("-c", "--config-file", help=("Specify the path to the buildout configuration " "file to be used.")) parser.add_option("-f", "--find-links", help=("Specify a URL to search for buildout releases")) parser.add_option("--allow-site-packages", action="store_true", default=False, help=("Let bootstrap.py use existing site packages")) parser.add_option("--buildout-version", help="Use a specific zc.buildout version") parser.add_option("--setuptools-version", help="Use a specific setuptools version") parser.add_option("--setuptools-to-dir", help=("Allow for re-use of existing directory of " "setuptools versions")) options, args = parser.parse_args() if options.version: print("bootstrap.py version %s" % __version__) sys.exit(0) ###################################################################### # load/install setuptools try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen ez = {} if os.path.exists('ez_setup.py'): exec(open('ez_setup.py').read(), ez) else: exec(urlopen('https://bootstrap.pypa.io/ez_setup.py').read(), ez) if not options.allow_site_packages: # ez_setup imports site, which adds site packages # this will remove them from the path to ensure that incompatible versions # of setuptools are not in the path import site # inside a virtualenv, there is no 'getsitepackages'. # We can't remove these reliably if hasattr(site, 'getsitepackages'): for sitepackage_path in site.getsitepackages(): # Strip all site-packages directories from sys.path that # are not sys.prefix; this is because on Windows # sys.prefix is a site-package directory. if sitepackage_path != sys.prefix: sys.path[:] = [x for x in sys.path if sitepackage_path not in x] setup_args = dict(to_dir=tmpeggs, download_delay=0) if options.setuptools_version is not None: setup_args['version'] = options.setuptools_version if options.setuptools_to_dir is not None: setup_args['to_dir'] = options.setuptools_to_dir ez['use_setuptools'](**setup_args) import setuptools import pkg_resources # This does not (always?) update the default working set. We will # do it. for path in sys.path: if path not in pkg_resources.working_set.entries: pkg_resources.working_set.add_entry(path) ###################################################################### # Install buildout ws = pkg_resources.working_set setuptools_path = ws.find( pkg_resources.Requirement.parse('setuptools')).location # Fix sys.path here as easy_install.pth added before PYTHONPATH cmd = [sys.executable, '-c', 'import sys; sys.path[0:0] = [%r]; ' % setuptools_path + 'from setuptools.command.easy_install import main; main()', '-mZqNxd', tmpeggs] find_links = os.environ.get( 'bootstrap-testing-find-links', options.find_links or ('http://downloads.buildout.org/' if options.accept_buildout_test_releases else None) ) if find_links: cmd.extend(['-f', find_links]) requirement = 'zc.buildout' version = options.buildout_version if version is None and not options.accept_buildout_test_releases: # Figure out the most recent final version of zc.buildout. import setuptools.package_index _final_parts = '*final-', '*final' def _final_version(parsed_version): try: return not parsed_version.is_prerelease except AttributeError: # Older setuptools for part in parsed_version: if (part[:1] == '*') and (part not in _final_parts): return False return True index = setuptools.package_index.PackageIndex( search_path=[setuptools_path]) if find_links: index.add_find_links((find_links,)) req = pkg_resources.Requirement.parse(requirement) if index.obtain(req) is not None: best = [] bestv = None for dist in index[req.project_name]: distv = dist.parsed_version if _final_version(distv): if bestv is None or distv > bestv: best = [dist] bestv = distv elif distv == bestv: best.append(dist) if best: best.sort() version = best[-1].version if version: requirement = '=='.join((requirement, version)) cmd.append(requirement) import subprocess if subprocess.call(cmd) != 0: raise Exception( "Failed to execute command:\n%s" % repr(cmd)[1:-1]) ###################################################################### # Import and run buildout ws.add_entry(tmpeggs) ws.require(requirement) import zc.buildout.buildout if not [a for a in args if '=' not in a]: args.append('bootstrap') # if -c was provided, we push it back into args for buildout' main function if options.config_file is not None: args[0:0] = ['-c', options.config_file] zc.buildout.buildout.main(args) shutil.rmtree(tmpeggs) persistent-4.2.2/buildout.cfg0000644000076600000240000000030412577543740016145 0ustar jimstaff00000000000000[buildout] develop = . parts = test scripts [test] recipe = zc.recipe.testrunner eggs = persistent [test] [scripts] recipe = zc.recipe.egg eggs = persistent [test] interpreter = py persistent-4.2.2/CHANGES.rst0000644000076600000240000001274613017350561015437 0ustar jimstaff00000000000000``persistent`` Changelog ======================== 4.2.2 (2016-11-29) ------------------ - Drop use of ``ctypes`` for determining maximum integer size, to increase pure-Python compatibility. - Ensure that ``__slots__`` attributes are cleared when a persistent object is ghostified. (This excluses classes that override ``__new__``. See https://github.com/zopefoundation/persistent/wiki/Notes_on_state_new_and_slots if you're curious.) 4.2.1 (2016-05-26) ------------------ - Fix the hashcode of C ``TimeStamp`` objects on 64-bit Python 3 on Windows. 4.2.0 (2016-05-05) ------------------ - Fixed the Python(/PYPY) implementation ``TimeStamp.timeTime`` method to have subsecond precision. - When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning the user's global wheel cache. - Add support for Python 3.5. - Drop support for Python 2.6 and 3.2. 4.1.1 (2015-06-02) ------------------ - Fix manifest and re-upload to fix stray files included in 4.1.0. 4.1.0 (2015-05-19) ------------------ - Make the Python implementation of ``Persistent`` and ``PickleCache`` behave more similarly to the C implementation. In particular, the Python version can now run the complete ZODB and ZEO test suites. - Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms. 4.0.9 (2015-04-08) ------------------ - Make the C and Python ``TimeStamp`` objects behave more alike. The Python version now produces the same ``repr`` and ``.raw()`` output as the C version, and has the same hashcode. In addition, the Python version is now supports ordering and equality like the C version. - Intern keys of object state in ``__setstate__`` to reduce memory usage when unpickling multiple objects with the same attributes. - Add support for PyPy3. - 100% branch coverage. 4.0.8 (2014-03-20) ------------------ - Add support for Python 3.4. - In pure-Python ``Persistent``, avoid loading state in ``_p_activate`` for non-ghost objects (which could corrupt their state). (PR #9) - In pure-Python, and don't throw ``POSKeyError`` if ``_p_activate`` is called on an object that has never been committed. (PR #9) - In pure-Python ``Persistent``, avoid calling a subclass's ``__setattr__`` at instance creation time. (PR #8) - Make it possible to delete ``_p_jar`` / ``_p_oid`` of a pure-Python ``Persistent`` object which has been removed from the jar's cache (fixes aborting a ZODB Connection that has added objects). (PR #7) 4.0.7 (2014-02-20) ------------------ - Avoid a KeyError from ``_p_accessed()`` on newly-created objects under pure-Python: these objects may be assigned to a jar, but not yet added to its cache. (PR #6) - Avoid a failure in ``Persistent.__setstate__`` when the state dict contains exactly two keys. (PR #5) - Fix a hang in ``picklecache`` invalidation if OIDs are manually passed out-of-order. (PR #4) - Add ``PURE_PYTHON`` environment variable support: if set, the C extensions will not be built, imported, or tested. 4.0.6 (2013-01-03) ------------------ - Updated Trove classifiers. 4.0.5 (2012-12-14) ------------------ - Fixed the C-extensions under Py3k (previously they compiled but were not importable). 4.0.4 (2012-12-11) ------------------ - Added support for Python 3.3. - C extenstions now build under Python 3.2, passing the same tests as the pure-Python reference implementation. 4.0.3 (2012-11-19) ------------------ - Fixed: In the C implimentation, an integer was compared with a pointer, with undefined results and a compiler warning. - Fixed: the Python implementation of the ``_p_estimated_size`` propety didn't support deletion. - Simplified implementation of the ``_p_estimated_size`` property to only accept integers. A TypeError is raised if an incorrect type is provided. 4.0.2 (2012-08-27) ------------------ - Correct initialization functions in renamed ``_timestamp`` extension. 4.0.1 (2012-08-26) ------------------ - Worked around test failure due to overflow to long on 32-bit systems. - Renamed ``TimeStamp`` extension module to avoid clash with pure-Python ``timestamp`` module on case-insensitive filesystems. N.B: the canonical way to import the ``TimeStamp`` class is now:: from persistent.timestamp import TimeStamp which will yield the class from the extension module (if available), falling back to the pure-Python reference implementation. 4.0.0 (2012-08-11) ------------------ Platform Changes ################ - Added explicit support for Python 3.2 and PyPy. - Note that the C implementations of Persistent, PickleCache, and Timestamp are not built (yet) on these platforms. - Dropped support for Python < 2.6. Testing Changes ############### - 100% unit test coverage. - Removed all ``ZODB``-dependent tests: - Rewrote some to avoid the dependency - Cloned the remainder into new ``ZODB.tests`` modules. - Refactored some doctests refactored as unittests. - Completed pure-Python reference implementations of 'Persistent', 'PickleCache', and 'TimeStamp'. - All covered platforms tested under ``tox``. - Added support for continuous integration using ``tox`` and ``jenkins``. - Added ``setup.py dev`` alias (installs ``nose`` and ``coverage``). - Dropped dependency on ``zope.testing`` / ``zope.testrunner``: tests now run with ``setup.py test``. Documentation Changes ##################### - Refactored many Doctests as Sphinx documentation (snippets are exercised via 'tox'). - Added ``setup.py docs`` alias (installs ``Sphinx`` and ``repoze.sphinx.autointerface``). persistent-4.2.2/COPYRIGHT.txt0000644000076600000240000000004012577543740015743 0ustar jimstaff00000000000000Zope Foundation and Contributorspersistent-4.2.2/docs/0000755000076600000240000000000013017351012014543 5ustar jimstaff00000000000000persistent-4.2.2/docs/api/0000755000076600000240000000000013017351012015314 5ustar jimstaff00000000000000persistent-4.2.2/docs/api/attributes.rst0000644000076600000240000001403012577543740020257 0ustar jimstaff00000000000000Customizing Attribute Access ============================ Hooking :meth:`__getattr__` --------------------------- The __getattr__ method works pretty much the same for persistent classes as it does for other classes. No special handling is needed. If an object is a ghost, then it will be activated before __getattr__ is called. In this example, our objects returns a tuple with the attribute name, converted to upper case and the value of _p_changed, for any attribute that isn't handled by the default machinery. .. doctest:: >>> from persistent.tests.attrhooks import OverridesGetattr >>> o = OverridesGetattr() >>> o._p_changed False >>> o._p_oid >>> o._p_jar >>> o.spam ('SPAM', False) >>> o.spam = 1 >>> o.spam 1 We'll save the object, so it can be deactivated: .. doctest:: >>> from persistent.tests.attrhooks import _resettingJar >>> jar = _resettingJar() >>> jar.add(o) >>> o._p_deactivate() >>> o._p_changed And now, if we ask for an attribute it doesn't have, .. doctest:: >>> o.eggs ('EGGS', False) And we see that the object was activated before calling the :meth:`__getattr__` method. Hooking All Access ------------------ In this example, we'll provide an example that shows how to override the :meth:`__getattribute__`, :meth:`__setattr__`, and :meth:`__delattr__` methods. We'll create a class that stores it's attributes in a secret dictionary within the instance dictionary. The class will have the policy that variables with names starting with ``tmp_`` will be volatile. Our sample class takes initial values as keyword arguments to the constructor: .. doctest:: >>> from persistent.tests.attrhooks import VeryPrivate >>> o = VeryPrivate(x=1) Hooking :meth:`__getattribute__`` ################################# The :meth:`__getattribute__` method is called for all attribute accesses. It overrides the attribute access support inherited from Persistent. .. doctest:: >>> o._p_changed False >>> o._p_oid >>> o._p_jar >>> o.x 1 >>> o.y Traceback (most recent call last): ... AttributeError: y Next, we'll save the object in a database so that we can deactivate it: .. doctest:: >>> from persistent.tests.attrhooks import _rememberingJar >>> jar = _rememberingJar() >>> jar.add(o) >>> o._p_deactivate() >>> o._p_changed And we'll get some data: .. doctest:: >>> o.x 1 which activates the object: .. doctest:: >>> o._p_changed False It works for missing attribes too: .. doctest:: >>> o._p_deactivate() >>> o._p_changed >>> o.y Traceback (most recent call last): ... AttributeError: y >>> o._p_changed False Hooking :meth:`__setattr__`` ############################ The :meth:`__setattr__` method is called for all attribute assignments. It overrides the attribute assignment support inherited from Persistent. Implementors of :meth:`__setattr__` methods: 1. Must call Persistent._p_setattr first to allow it to handle some attributes and to make sure that the object is activated if necessary, and 2. Must set _p_changed to mark objects as changed. .. doctest:: >>> o = VeryPrivate() >>> o._p_changed False >>> o._p_oid >>> o._p_jar >>> o.x Traceback (most recent call last): ... AttributeError: x >>> o.x = 1 >>> o.x 1 Because the implementation doesn't store attributes directly in the instance dictionary, we don't have a key for the attribute: .. doctest:: >>> 'x' in o.__dict__ False Next, we'll give the object a "remembering" jar so we can deactivate it: .. doctest:: >>> jar = _rememberingJar() >>> jar.add(o) >>> o._p_deactivate() >>> o._p_changed We'll modify an attribute .. doctest:: >>> o.y = 2 >>> o.y 2 which reactivates it, and markes it as modified, because our implementation marked it as modified: .. doctest:: >>> o._p_changed True Now, if fake a commit: .. doctest:: >>> jar.fake_commit() >>> o._p_changed False And deactivate the object: .. doctest:: >>> o._p_deactivate() >>> o._p_changed and then set a variable with a name starting with ``tmp_``, The object will be activated, but not marked as modified, because our :meth:`__setattr__` implementation doesn't mark the object as changed if the name starts with ``tmp_``: .. doctest:: >>> o.tmp_foo = 3 >>> o._p_changed False >>> o.tmp_foo 3 Hooking :meth:`__delattr__`` ############################ The __delattr__ method is called for all attribute deletions. It overrides the attribute deletion support inherited from Persistent. Implementors of :meth:`__delattr__` methods: 1. Must call Persistent._p_delattr first to allow it to handle some attributes and to make sure that the object is activated if necessary, and 2. Must set _p_changed to mark objects as changed. .. doctest:: >>> o = VeryPrivate(x=1, y=2, tmp_z=3) >>> o._p_changed False >>> o._p_oid >>> o._p_jar >>> o.x 1 >>> del o.x >>> o.x Traceback (most recent call last): ... AttributeError: x Next, we'll save the object in a jar so that we can deactivate it: .. doctest:: >>> jar = _rememberingJar() >>> jar.add(o) >>> o._p_deactivate() >>> o._p_changed If we delete an attribute: .. doctest:: >>> del o.y The object is activated. It is also marked as changed because our implementation marked it as changed. .. doctest:: >>> o._p_changed True >>> o.y Traceback (most recent call last): ... AttributeError: y >>> o.tmp_z 3 Now, if fake a commit: .. doctest:: >>> jar.fake_commit() >>> o._p_changed False And deactivate the object: .. doctest:: >>> o._p_deactivate() >>> o._p_changed and then delete a variable with a name starting with ``tmp_``, The object will be activated, but not marked as modified, because our :meth:`__delattr__` implementation doesn't mark the object as changed if the name starts with ``tmp_``: .. doctest:: >>> del o.tmp_z >>> o._p_changed False >>> o.tmp_z Traceback (most recent call last): ... AttributeError: tmp_z persistent-4.2.2/docs/api/cache.rst0000644000076600000240000000343512577543740017143 0ustar jimstaff00000000000000Caching Persistent Objects ========================== Creating Objects ``de novo`` ---------------------------- Creating ghosts from scratch, as opposed to ghostifying a non-ghost is rather tricky. :class:`~persistent.interfaces.IPeristent` doesn't really provide the right interface given that: - :meth:`_p_deactivate` and :meth:`_p_invalidate` are overridable, and could assume that the object's state is properly initialized. - Assigning :attr:`_p_changed` to None just calls :meth:`_p_deactivate`. - Deleting :attr:`_p_changed` just calls :meth:`_p_invalidate`. .. note:: The current cache implementation is intimately tied up with the persistence implementation and has internal access to the persistence state. The cache implementation can update the persistence state for newly created and ininitialized objects directly. The future persistence and cache implementations will be far more decoupled. The persistence implementation will only manage object state and generate object-usage events. The cache implemnentation(s) will be rersponsible for managing persistence-related (meta-)state, such as _p_state, _p_changed, _p_oid, etc. So in that future implemention, the cache will be more central to managing object persistence information. Caches have a :meth:`new_ghost` method that: - adds an object to the cache, and - initializes its persistence data. .. doctest:: >>> import persistent >>> from persistent.tests.utils import ResettingJar >>> class C(persistent.Persistent): ... pass >>> jar = ResettingJar() >>> cache = persistent.PickleCache(jar, 10, 100) >>> ob = C.__new__(C) >>> cache.new_ghost('1', ob) >>> ob._p_changed >>> ob._p_jar is jar True >>> ob._p_oid '1' >>> cache.cache_non_ghost_count 0 persistent-4.2.2/docs/api/interfaces.rst0000644000076600000240000000053712577543740020223 0ustar jimstaff00000000000000:mod:`persistent.interfaces` =================================== .. automodule:: persistent.interfaces .. autointerface:: IPersistent :members: :member-order: bysource .. autointerface:: IPersistentDataManager :members: :member-order: bysource .. autointerface:: IPickleCache :members: :member-order: bysource persistent-4.2.2/docs/api/pickling.rst0000644000076600000240000000734012577543740017677 0ustar jimstaff00000000000000Pickling Persistent Objects =========================== Persistent objects are designed to make the standard Python pickling machinery happy: .. doctest:: >>> import pickle >>> from persistent.tests.cucumbers import Simple >>> from persistent.tests.cucumbers import print_dict >>> x = Simple('x', aaa=1, bbb='foo') >>> print_dict(x.__getstate__()) {'__name__': 'x', 'aaa': 1, 'bbb': 'foo'} >>> f, (c,), state = x.__reduce__() >>> f.__name__ '__newobj__' >>> f.__module__ 'copy_reg' >>> c.__name__ 'Simple' >>> print_dict(state) {'__name__': 'x', 'aaa': 1, 'bbb': 'foo'} >>> import pickle >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True >>> x.__setstate__({'z': 1}) >>> x.__dict__ {'z': 1} This support even works well for derived classes which customize pickling by overriding :meth:`__getnewargs__`, :meth:`__getstate__` and :meth:`__setstate__`. .. doctest:: >>> from persistent.tests.cucumbers import Custom >>> x = Custom('x', 'y') >>> x.__getnewargs__() ('x', 'y') >>> x.a = 99 >>> (f, (c, ax, ay), a) = x.__reduce__() >>> f.__name__ '__newobj__' >>> f.__module__ 'copy_reg' >>> c.__name__ 'Custom' >>> ax, ay, a ('x', 'y', 99) >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True The support works for derived classes which define :attr:`__slots__`. It ignores any slots which map onto the "persistent" namespace (prefixed with ``_p_``) or the "volatile" namespace (prefixed with ``_v_``): .. doctest:: >>> import copy_reg >>> from persistent.tests.cucumbers import SubSlotted >>> x = SubSlotted('x', 'y', 'z') Note that we haven't yet assiged a value to the ``s4`` attribute: .. doctest:: >>> d, s = x.__getstate__() >>> d >>> print_dict(s) {'s1': 'x', 's2': 'y', 's3': 'z'} >>> import pickle >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True After assigning it: .. doctest:: >>> x.s4 = 'spam' >>> d, s = x.__getstate__() >>> d >>> print_dict(s) {'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'} >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True :class:`persistent.Persistent` supports derived classes which have base classes defining :attr:`__slots`, but which do not define attr:`__slots__` themselves: .. doctest:: >>> from persistent.tests.cucumbers import SubSubSlotted >>> x = SubSubSlotted('x', 'y', 'z') >>> d, s = x.__getstate__() >>> print_dict(d) {} >>> print_dict(s) {'s1': 'x', 's2': 'y', 's3': 'z'} >>> import pickle >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True >>> x.s4 = 'spam' >>> x.foo = 'bar' >>> x.baz = 'bam' >>> d, s = x.__getstate__() >>> print_dict(d) {'baz': 'bam', 'foo': 'bar'} >>> print_dict(s) {'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'} >>> pickle.loads(pickle.dumps(x)) == x True >>> pickle.loads(pickle.dumps(x, 0)) == x True >>> pickle.loads(pickle.dumps(x, 1)) == x True >>> pickle.loads(pickle.dumps(x, 2)) == x True persistent-4.2.2/docs/api.rst0000644000076600000240000000025012577543740016070 0ustar jimstaff00000000000000:mod:`persistent` API documentation =================================== .. toctree:: :maxdepth: 2 api/interfaces api/attributes api/pickling api/cache persistent-4.2.2/docs/conf.py0000644000076600000240000002073512577543740016076 0ustar jimstaff00000000000000# -*- coding: utf-8 -*- # # persistent documentation build configuration file, created by # sphinx-quickstart on Wed Feb 16 20:50:32 2011. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'repoze.sphinx.autointerface', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'persistent' copyright = u'2011, ZODB Developers ' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '3.10' # The full version, including alpha/beta/rc tags. release = '3.10b1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'persistentdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'persistent.tex', u'persistent Documentation', u'ZODB Developers \\textless{}zope-dev@zope.org\\textgreater{}', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'persistent', u'persistent Documentation', [u'ZODB Developers '], 1) ] # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'persistent' epub_author = u'ZODB Developers ' epub_publisher = u'ZODB Developers ' epub_copyright = u'2011, ZODB Developers ' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. #epub_exclude_files = [] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} persistent-4.2.2/docs/glossary.rst0000644000076600000240000000273212577543740017171 0ustar jimstaff00000000000000.. _glossary: Glossary ======== .. glossary:: :sorted: data manager The object responsible for storing and loading an object's :term:`pickled data` in a backing store. Also called a :term:`jar`. jar Alias for :term:`data manager`: short for "pickle jar", because it traditionally holds the :term:`pickled data` of persistent objects. object cache An MRU cache for objects associated with a given :term:`data manager`. ghost An object whose :term:`pickled data` has not yet been loaded from its :term:`jar`. Accessing or mutating any of its attributes causes that data to be loaded, which is referred to as :term:`activation`. volatile attribute Attributes of a persistent object which are *not* caputured as part of its :term:`pickled data`. These attributes thus disappear during :term:`deactivation` or :term:`invalidation`. pickled data The serialized data of a persistent object, stored in and retrieved from a backing store by a :term:`data manager`. activation Moving an object from the ``GHOST`` state to the ``UPTODATE`` state, load its :term:`pickled data` from its :term:`jar`. deactivation Moving an object from the ``UPTODATE`` state to the ``GHOST`` state, discarding its :term:`pickled data`. invalidation Moving an object from either the ``UPTODATE`` state or the ``CHANGED`` state to the ``GHOST`` state, discarding its :term:`pickled data`. persistent-4.2.2/docs/index.rst0000644000076600000240000000071612577543740016435 0ustar jimstaff00000000000000:mod:`persistent`: automatic persistence for Python objects ============================================================ This package contains a generic persistence implementation for Python. It forms the core protocol for making objects interact "transparently" with a database such as the ZODB. Contents: .. toctree:: :maxdepth: 2 using api glossary Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` persistent-4.2.2/docs/make.bat0000644000076600000240000001064712577543740016205 0ustar jimstaff00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\persistent.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\persistent.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end persistent-4.2.2/docs/Makefile0000644000076600000240000001077612577543740016243 0ustar jimstaff00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/persistent.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/persistent.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/persistent" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/persistent" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." persistent-4.2.2/docs/using.rst0000644000076600000240000002670412577543740016460 0ustar jimstaff00000000000000Using :mod:`persistent` in your application =========================================== Inheriting from :class:`persistent.Persistent` ---------------------------------------------- The basic mechanism for making your application's objects persistent is mix-in interitance. Instances whose classes derive from :class:`persistent.Persistent` are automatically capable of being created as :term:`ghost` instances, being associated with a database connection (called the :term:`jar`), and notifying the connection when they have been changed. Relationship to a Data Manager and its Cache -------------------------------------------- Except immediately after their creation, persistent objects are normally associated with a :term:`data manager` (also referred to as a :term:`jar`). An object's data manager is stored in its ``_p_jar`` attribute. The data manager is responsible for loading and saving the state of the persistent object to some sort of backing store, including managing any interactions with transaction machinery. Each data manager maintains an :term:`object cache`, which keeps track of the currently loaded objects, as well as any objects they reference which have not yet been loaded: such an object is called a :term:`ghost`. The cache is stored on the data manager in its ``_cache`` attribute. A persistent object remains in the ghost state until the application attempts to access or mutate one of its attributes: at that point, the object requests that its data manager load its state. The persistent object also notifies the cache that it has been loaded, as well as on each subsequent attribute access. The cache keeps a "most-recently-used" list of its objects, and removes objects in least-recently-used order when it is asked to reduce its working set. The examples below use a stub data manager class, and its stub cache class: .. doctest:: >>> class Cache(object): ... def __init__(self): ... self._mru = [] ... def mru(self, oid): ... self._mru.append(oid) >>> from zope.interface import implements >>> from persistent.interfaces import IPersistentDataManager >>> class DM(object): ... implements(IPersistentDataManager) ... def __init__(self): ... self._cache = Cache() ... self.registered = 0 ... def register(self, ob): ... self.registered += 1 ... def setstate(self, ob): ... ob.__setstate__({'x': 42}) .. note:: Notic that the ``DM`` class always sets the ``x`` attribute to the value ``42`` when activating an object. Persistent objects without a Data Manager ----------------------------------------- Before aersistent instance has been associtated with a a data manager ( i.e., its ``_p_jar`` is still ``None``). The examples below use a class, ``P``, defined as: .. doctest:: >>> from persistent import Persistent >>> from persistent.interfaces import GHOST, UPTODATE, CHANGED >>> class P(Persistent): ... def __init__(self): ... self.x = 0 ... def inc(self): ... self.x += 1 Instances of the derived ``P`` class which are not (yet) assigned to a :term:`data manager` behave as other Python instances, except that they have some extra attributes: .. doctest:: >>> p = P() >>> p.x 0 The :attr:`_p_changed` attribute is a three-state flag: it can be one of ``None`` (the object is not loaded), ``False`` (the object has not been changed since it was loaded) or ``True`` (the object has been changed). Until the object is assigned a :term:`jar`, this attribute will always be ``False``. .. doctest:: >>> p._p_changed False The :attr:`_p_state` attribute is an integaer, representing which of the "persistent lifecycle" states the object is in. Until the object is assigned a :term:`jar`, this attribute will always be ``0`` (the ``UPTODATE`` constant): .. doctest:: >>> p._p_state == UPTODATE True The :attr:`_p_jar` attribute is the object's :term:`data manager`. Since it has not yet been assigned, its value is ``None``: .. doctest:: >>> print p._p_jar None The :attr:`_p_oid` attribute is the :term:`object id`, a unique value normally assigned by the object's :term:`data manager`. Since the object has not yet been associated with its :term:`jar`, its value is ``None``: .. doctest:: >>> print p._p_oid None Without a data manager, modifying a persistent object has no effect on its ``_p_state`` or ``_p_changed``. .. doctest:: >>> p.inc() >>> p.inc() >>> p.x 2 >>> p._p_changed False >>> p._p_state 0 Try all sorts of different ways to change the object's state: .. doctest:: >>> p._p_deactivate() >>> p._p_state 0 >>> p._p_changed False >>> p._p_changed = True >>> p._p_changed False >>> p._p_state 0 >>> del p._p_changed >>> p._p_changed False >>> p._p_state 0 >>> p.x 2 Associating an Object with a Data Manager ----------------------------------------- Once associated with a data manager, a persistent object's behavior changes: .. doctest:: >>> p = P() >>> dm = DM() >>> p._p_oid = "00000012" >>> p._p_jar = dm >>> p._p_changed False >>> p._p_state 0 >>> p.__dict__ {'x': 0} >>> dm.registered 0 Modifying the object marks it as changed and registers it with the data manager. Subsequent modifications don't have additional side-effects. .. doctest:: >>> p.inc() >>> p.x 1 >>> p.__dict__ {'x': 1} >>> p._p_changed True >>> p._p_state 1 >>> dm.registered 1 >>> p.inc() >>> p._p_changed True >>> p._p_state 1 >>> dm.registered 1 Object which register themselves with the data manager are candidates for storage to the backing store at a later point in time. Explicitly controlling ``_p_state`` ----------------------------------- Persistent objects expose three methods for moving an object into and out of the "ghost" state:: :meth:`persistent.Persistent._p_activate`, :meth:`persistent.Persistent._p_activate_p_deactivate`, and :meth:`persistent.Persistent._p_invalidate`: .. doctest:: >>> p = P() >>> p._p_oid = '00000012' >>> p._p_jar = DM() After being assigned a jar, the object is initially in the ``UPTODATE`` state: .. doctest:: >>> p._p_state 0 From that state, ``_p_deactivate`` rests the object to the ``GHOST`` state: .. doctest:: >>> p._p_deactivate() >>> p._p_state -1 From the ``GHOST`` state, ``_p_activate`` reloads the object's data and moves it to the ``UPTODATE`` state: .. doctest:: >>> p._p_activate() >>> p._p_state 0 >>> p.x 42 Changing the object puts it in the ``CHANGED`` state: .. doctest:: >>> p.inc() >>> p.x 43 >>> p._p_state 1 Attempting to deactivate in the ``CHANGED`` state is a no-op: .. doctest:: >>> p._p_deactivate() >>> p.__dict__ {'x': 43} >>> p._p_changed True >>> p._p_state 1 ``_p_invalidate`` forces objects into the ``GHOST`` state; it works even on objects in the ``CHANGED`` state, which is the key difference between deactivation and invalidation: .. doctest:: >>> p._p_invalidate() >>> p.__dict__ {} >>> p._p_state -1 You can manually reset the ``_p_changed`` field to ``False``: in this case, the object changes to the ``UPTODATE`` state but retains its modifications: .. doctest:: >>> p.inc() >>> p.x 43 >>> p._p_changed = False >>> p._p_state 0 >>> p._p_changed False >>> p.x 43 For an object in the "ghost" state, assigning ``True`` (or any value which is coercible to ``True``) to its ``_p_changed`` attributes activates the object, which is exactly the same as calling ``_p_activate``: .. doctest:: >>> p._p_invalidate() >>> p._p_state -1 >>> p._p_changed = True >>> p._p_changed True >>> p._p_state 1 >>> p.x 42 The pickling protocol --------------------- Because persistent objects need to control how they are pickled and unpickled, the :class:`persistent.Persistent` base class overrides the implementations of ``__getstate__()`` and ``__setstate__()``: .. doctest:: >>> p = P() >>> dm = DM() >>> p._p_oid = "00000012" >>> p._p_jar = dm >>> p.__getstate__() {'x': 0} >>> p._p_state 0 Calling ``__setstate__`` always leaves the object in the uptodate state. .. doctest:: >>> p.__setstate__({'x': 5}) >>> p._p_state 0 A :term:`volatile attribute` is an attribute those whose name begins with a special prefix (``_v__``). Unlike normal attributes, volatile attributes do not get stored in the object's :term:`pickled data`. .. doctest:: >>> p._v_foo = 2 >>> p.__getstate__() {'x': 5} Assigning to volatile attributes doesn't cause the object to be marked as changed: .. doctest:: >>> p._p_state 0 The ``_p_serial`` attribute is not affected by calling setstate. .. doctest:: >>> p._p_serial = "00000012" >>> p.__setstate__(p.__getstate__()) >>> p._p_serial '00000012' Estimated Object Size --------------------- We can store a size estimation in ``_p_estimated_size``. Its default is 0. The size estimation can be used by a cache associated with the data manager to help in the implementation of its replacement strategy or its size bounds. .. doctest:: >>> p._p_estimated_size 0 >>> p._p_estimated_size = 1000 >>> p._p_estimated_size 1024 Huh? Why is the estimated size coming out different than what we put in? The reason is that the size isn't stored exactly. For backward compatibility reasons, the size needs to fit in 24 bits, so, internally, it is adjusted somewhat. Of course, the estimated size must not be negative. .. doctest:: >>> p._p_estimated_size = -1 Traceback (most recent call last): .... ValueError: _p_estimated_size must not be negative Overriding the attribute protocol --------------------------------- Subclasses which override the attribute-management methods provided by :class:`persistent.Persistent`, but must obey some constraints: :meth:`__getattribute__`` When overriding ``__getattribute__``, the derived class implementation **must** first call :meth:`persistent.Persistent._p_getattr`, passing the name being accessed. This method ensures that the object is activated, if needed, and handles the "special" attributes which do not require activation (e.g., ``_p_oid``, ``__class__``, ``__dict__``, etc.) If ``_p_getattr`` returns ``True``, the derived class implementation **must** delegate to the base class implementation for the attribute. :meth:`__setattr__` When overriding ``__setattr__``, the derived class implementation **must** first call :meth:`persistent.Persistent._p_setattr`, passing the name being accessed and the value. This method ensures that the object is activated, if needed, and handles the "special" attributes which do not require activation (``_p_*``). If ``_p_setattr`` returns ``True``, the derived implementation must assume that the attribute value has been set by the base class. :meth:`__detattr__` When overriding ``__detattr__``, the derived class implementation **must** first call :meth:`persistent.Persistent._p_detattr`, passing the name being accessed. This method ensures that the object is activated, if needed, and handles the "special" attributes which do not require activation (``_p_*``). If ``_p_delattr`` returns ``True``, the derived implementation must assume that the attribute has been deleted base class. :meth:`__getattr__` For the `__getattr__` method, the behavior is like that for regular Python classes and for earlier versions of ZODB 3. persistent-4.2.2/LICENSE.txt0000644000076600000240000000402612577543740015465 0ustar jimstaff00000000000000Zope Public License (ZPL) Version 2.1 A copyright notice accompanies this license document that identifies the copyright holders. This license has been certified as open source. It has also been designated as GPL compatible by the Free Software Foundation (FSF). Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions in source code must retain the accompanying copyright notice, this list of conditions, and the following disclaimer. 2. Redistributions in binary form must reproduce the accompanying copyright notice, this list of conditions, and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Names of the copyright holders must not be used to endorse or promote products derived from this software without prior written permission from the copyright holders. 4. The right to distribute this software or to use it for any purpose does not give you the right to use Servicemarks (sm) or Trademarks (tm) of the copyright holders. Use of them is covered by separate agreement with the copyright holders. 5. If any files are modified, you must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. Disclaimer THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. persistent-4.2.2/MANIFEST.in0000644000076600000240000000052012577543740015373 0ustar jimstaff00000000000000include *.txt include *.rst recursive-include docs * recursive-include persistent * global-exclude *.dll global-exclude *.pyc global-exclude *.pyo global-exclude *.so global-exclude coverage.xml prune docs/_build prune persistent/__pycache__ include .coveragerc include .travis.yml include buildout.cfg include tox.ini include *.py persistent-4.2.2/persistent/0000755000076600000240000000000013017351012016013 5ustar jimstaff00000000000000persistent-4.2.2/persistent/__init__.py0000644000076600000240000000540612577543740020156 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE # ############################################################################## """Prefer C implementations of Persistent / PickleCache / TimeStamp. Fall back to pure Python implementations. """ import os PURE_PYTHON = os.environ.get('PURE_PYTHON') if not PURE_PYTHON: try: from persistent.cPersistence import Persistent from persistent.cPersistence import GHOST from persistent.cPersistence import UPTODATE from persistent.cPersistence import CHANGED from persistent.cPersistence import STICKY from persistent.cPersistence import simple_new except ImportError: #pragma NO COVER from persistent.persistence import Persistent from persistent.persistence import GHOST from persistent.persistence import UPTODATE from persistent.persistence import CHANGED from persistent.persistence import STICKY else: from persistent._compat import copy_reg copy_reg.constructor(simple_new) # Make an interface declaration for Persistent, if zope.interface # is available. Note that the Python version already does this. try: from zope.interface import classImplements except ImportError: #pragma NO COVER pass else: from persistent.interfaces import IPersistent classImplements(Persistent, IPersistent) try: from persistent.cPickleCache import PickleCache except ImportError: #pragma NO COVER from persistent.picklecache import PickleCache try: import persistent.TimeStamp except ImportError: #pragma NO COVER import persistent.timestamp as TimeStamp import sys sys.modules['persistent.TimeStamp' ] = sys.modules['persistent.timestamp'] else: #pragma NO COVER from persistent.persistence import Persistent from persistent.persistence import GHOST from persistent.persistence import UPTODATE from persistent.persistence import CHANGED from persistent.persistence import STICKY from persistent.picklecache import PickleCache import persistent.timestamp as TimeStamp import sys sys.modules['persistent.TimeStamp'] = sys.modules['persistent.timestamp'] persistent-4.2.2/persistent/_compat.h0000644000076600000240000000277312577543740017644 0ustar jimstaff00000000000000/***************************************************************************** Copyright (c) 2012 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ #ifndef PERSISTENT__COMPAT_H #define PERSISTENT__COMPAT_H #include "Python.h" #if PY_MAJOR_VERSION >= 3 #define PY3K #endif #ifdef PY3K #define INTERN PyUnicode_InternFromString #define INTERN_INPLACE PyUnicode_InternInPlace #define NATIVE_CHECK_EXACT PyUnicode_CheckExact #define NATIVE_FROM_STRING_AND_SIZE PyUnicode_FromStringAndSize #define Py_TPFLAGS_HAVE_RICHCOMPARE 0 #define INT_FROM_LONG(x) PyLong_FromLong(x) #define INT_CHECK(x) PyLong_Check(x) #define INT_AS_LONG(x) PyLong_AS_LONG(x) #define CAPI_CAPSULE_NAME "persistent.cPersistence.CAPI" #else #define INTERN PyString_InternFromString #define INTERN_INPLACE PyString_InternInPlace #define NATIVE_CHECK_EXACT PyString_CheckExact #define NATIVE_FROM_STRING_AND_SIZE PyString_FromStringAndSize #define INT_FROM_LONG(x) PyInt_FromLong(x) #define INT_CHECK(x) PyInt_Check(x) #define INT_AS_LONG(x) PyInt_AS_LONG(x) #endif #endif persistent-4.2.2/persistent/_compat.py0000644000076600000240000000276412577543740020045 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2012 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import sys if sys.version_info[0] > 2: #pragma NO COVER import copyreg as copy_reg from collections import UserDict as IterableUserDict from collections import UserList from sys import intern def _u(s): return s def _b(s): if isinstance(s, str): return s.encode('unicode_escape') return s def _native(s): if isinstance(s, bytes): return s.decode('unicode_escape') return s PYTHON3 = True PYTHON2 = False else: #pragma NO COVER import copy_reg from UserDict import IterableUserDict from UserList import UserList def _u(s): return unicode(s, 'unicode_escape') def _native(s): if isinstance(s, unicode): return s.encode('unicode_escape') return s _b = _native PYTHON3 = False PYTHON2 = True intern = intern persistent-4.2.2/persistent/_timestamp.c0000644000076600000240000003312713017331206020332 0ustar jimstaff00000000000000/***************************************************************************** Copyright (c) 2001, 2004 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ #include "Python.h" #include "bytesobject.h" #include #include "_compat.h" PyObject *TimeStamp_FromDate(int, int, int, int, int, double); PyObject *TimeStamp_FromString(const char *); static char TimeStampModule_doc[] = "A 64-bit TimeStamp used as a ZODB serial number.\n" "\n" "$Id$\n"; typedef struct { PyObject_HEAD unsigned char data[8]; } TimeStamp; /* The first dimension of the arrays below is non-leapyear / leapyear */ static char month_len[2][12] = { {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31} }; static short joff[2][12] = { {0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334}, {0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335} }; static double gmoff=0; /* TODO: May be better (faster) to store in a file static. */ #define SCONV ((double)60) / ((double)(1<<16)) / ((double)(1<<16)) static int leap(int year) { return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0); } static int days_in_month(int year, int month) { return month_len[leap(year)][month]; } static double TimeStamp_yad(int y) { double d, s; y -= 1900; d = (y - 1) * 365; if (y > 0) { s = 1.0; y -= 1; } else { s = -1.0; y = -y; } return d + s * (y / 4 - y / 100 + (y + 300) / 400); } static double TimeStamp_abst(int y, int mo, int d, int m, int s) { return (TimeStamp_yad(y) + joff[leap(y)][mo] + d) * 86400 + m * 60 + s; } static int TimeStamp_init_gmoff(void) { struct tm *t; time_t z=0; t = gmtime(&z); if (t == NULL) { PyErr_SetString(PyExc_SystemError, "gmtime failed"); return -1; } gmoff = TimeStamp_abst(t->tm_year+1900, t->tm_mon, t->tm_mday - 1, t->tm_hour * 60 + t->tm_min, t->tm_sec); return 0; } static void TimeStamp_dealloc(TimeStamp *ts) { PyObject_Del(ts); } static PyObject* TimeStamp_richcompare(TimeStamp *self, TimeStamp *other, int op) { PyObject *result = NULL; int cmp; if (Py_TYPE(other) != Py_TYPE(self)) { result = Py_NotImplemented; } else { cmp = memcmp(self->data, other->data, 8); switch (op) { case Py_LT: result = (cmp < 0) ? Py_True : Py_False; break; case Py_LE: result = (cmp <= 0) ? Py_True : Py_False; break; case Py_EQ: result = (cmp == 0) ? Py_True : Py_False; break; case Py_NE: result = (cmp != 0) ? Py_True : Py_False; break; case Py_GT: result = (cmp > 0) ? Py_True : Py_False; break; case Py_GE: result = (cmp >= 0) ? Py_True : Py_False; break; } } Py_XINCREF(result); return result; } #ifdef PY3K static Py_hash_t #else static long #endif TimeStamp_hash(TimeStamp *self) { register unsigned char *p = (unsigned char *)self->data; register int len = 8; register long x = *p << 7; while (--len >= 0) x = (1000003*x) ^ *p++; x ^= 8; if (x == -1) x = -2; return x; } typedef struct { /* TODO: reverse-engineer what's in these things and comment them */ int y; int m; int d; int mi; } TimeStampParts; static void TimeStamp_unpack(TimeStamp *self, TimeStampParts *p) { unsigned long v; v = (self->data[0] * 16777216 + self->data[1] * 65536 + self->data[2] * 256 + self->data[3]); p->y = v / 535680 + 1900; p->m = (v % 535680) / 44640 + 1; p->d = (v % 44640) / 1440 + 1; p->mi = v % 1440; } static double TimeStamp_sec(TimeStamp *self) { unsigned int v; v = (self->data[4] * 16777216 + self->data[5] * 65536 + self->data[6] * 256 + self->data[7]); return SCONV * v; } static PyObject * TimeStamp_year(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.y); } static PyObject * TimeStamp_month(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.m); } static PyObject * TimeStamp_day(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.d); } static PyObject * TimeStamp_hour(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.mi / 60); } static PyObject * TimeStamp_minute(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return INT_FROM_LONG(p.mi % 60); } static PyObject * TimeStamp_second(TimeStamp *self) { return PyFloat_FromDouble(TimeStamp_sec(self)); } static PyObject * TimeStamp_timeTime(TimeStamp *self) { TimeStampParts p; TimeStamp_unpack(self, &p); return PyFloat_FromDouble(TimeStamp_abst(p.y, p.m - 1, p.d - 1, p.mi, 0) + TimeStamp_sec(self) - gmoff); } static PyObject * TimeStamp_raw(TimeStamp *self) { return PyBytes_FromStringAndSize((const char*)self->data, 8); } static PyObject * TimeStamp_repr(TimeStamp *self) { PyObject *raw, *result; raw = TimeStamp_raw(self); result = PyObject_Repr(raw); Py_DECREF(raw); return result; } static PyObject * TimeStamp_str(TimeStamp *self) { char buf[128]; TimeStampParts p; int len; TimeStamp_unpack(self, &p); len =sprintf(buf, "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%09.6f", p.y, p.m, p.d, p.mi / 60, p.mi % 60, TimeStamp_sec(self)); return NATIVE_FROM_STRING_AND_SIZE(buf, len); } static PyObject * TimeStamp_laterThan(TimeStamp *self, PyObject *obj) { TimeStamp *o = NULL; TimeStampParts p; unsigned char new[8]; int i; if (Py_TYPE(obj) != Py_TYPE(self)) { PyErr_SetString(PyExc_TypeError, "expected TimeStamp object"); return NULL; } o = (TimeStamp *)obj; if (memcmp(self->data, o->data, 8) > 0) { Py_INCREF(self); return (PyObject *)self; } memcpy(new, o->data, 8); for (i = 7; i > 3; i--) { if (new[i] == 255) new[i] = 0; else { new[i]++; return TimeStamp_FromString((const char*)new); } } /* All but the first two bytes are the same. Need to increment the year, month, and day explicitly. */ TimeStamp_unpack(o, &p); if (p.mi >= 1439) { p.mi = 0; if (p.d == month_len[leap(p.y)][p.m - 1]) { p.d = 1; if (p.m == 12) { p.m = 1; p.y++; } else p.m++; } else p.d++; } else p.mi++; return TimeStamp_FromDate(p.y, p.m, p.d, p.mi / 60, p.mi % 60, 0); } static struct PyMethodDef TimeStamp_methods[] = { {"year", (PyCFunction)TimeStamp_year, METH_NOARGS}, {"minute", (PyCFunction)TimeStamp_minute, METH_NOARGS}, {"month", (PyCFunction)TimeStamp_month, METH_NOARGS}, {"day", (PyCFunction)TimeStamp_day, METH_NOARGS}, {"hour", (PyCFunction)TimeStamp_hour, METH_NOARGS}, {"second", (PyCFunction)TimeStamp_second, METH_NOARGS}, {"timeTime", (PyCFunction)TimeStamp_timeTime, METH_NOARGS}, {"laterThan", (PyCFunction)TimeStamp_laterThan, METH_O}, {"raw", (PyCFunction)TimeStamp_raw, METH_NOARGS}, {NULL, NULL}, }; #define DEFERRED_ADDRESS(ADDR) 0 static PyTypeObject TimeStamp_type = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(NULL), 0) "persistent.TimeStamp", sizeof(TimeStamp), /* tp_basicsize */ 0, /* tp_itemsize */ (destructor)TimeStamp_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ (reprfunc)TimeStamp_repr, /* tp_repr */ 0, /* tp_as_number */ 0, /* tp_as_sequence */ 0, /* tp_as_mapping */ (hashfunc)TimeStamp_hash, /* tp_hash */ 0, /* tp_call */ (reprfunc)TimeStamp_str, /* tp_str */ 0, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_RICHCOMPARE, /* tp_flags */ 0, /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ (richcmpfunc)&TimeStamp_richcompare, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ TimeStamp_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ }; PyObject * TimeStamp_FromString(const char *buf) { /* buf must be exactly 8 characters */ TimeStamp *ts = (TimeStamp *)PyObject_New(TimeStamp, &TimeStamp_type); memcpy(ts->data, buf, 8); return (PyObject *)ts; } #define CHECK_RANGE(VAR, LO, HI) if ((VAR) < (LO) || (VAR) > (HI)) { \ return PyErr_Format(PyExc_ValueError, \ # VAR " must be between %d and %d: %d", \ (LO), (HI), (VAR)); \ } PyObject * TimeStamp_FromDate(int year, int month, int day, int hour, int min, double sec) { TimeStamp *ts = NULL; int d; unsigned int v; if (year < 1900) return PyErr_Format(PyExc_ValueError, "year must be greater than 1900: %d", year); CHECK_RANGE(month, 1, 12); d = days_in_month(year, month - 1); if (day < 1 || day > d) return PyErr_Format(PyExc_ValueError, "day must be between 1 and %d: %d", d, day); CHECK_RANGE(hour, 0, 23); CHECK_RANGE(min, 0, 59); /* Seconds are allowed to be anything, so chill If we did want to be pickly, 60 would be a better choice. if (sec < 0 || sec > 59) return PyErr_Format(PyExc_ValueError, "second must be between 0 and 59: %f", sec); */ ts = (TimeStamp *)PyObject_New(TimeStamp, &TimeStamp_type); v = (((year - 1900) * 12 + month - 1) * 31 + day - 1); v = (v * 24 + hour) * 60 + min; ts->data[0] = v / 16777216; ts->data[1] = (v % 16777216) / 65536; ts->data[2] = (v % 65536) / 256; ts->data[3] = v % 256; sec /= SCONV; v = (unsigned int)sec; ts->data[4] = v / 16777216; ts->data[5] = (v % 16777216) / 65536; ts->data[6] = (v % 65536) / 256; ts->data[7] = v % 256; return (PyObject *)ts; } PyObject * TimeStamp_TimeStamp(PyObject *obj, PyObject *args) { char *buf = NULL; int len = 0, y, mo, d, h = 0, m = 0; double sec = 0; #ifdef PY3K if (PyArg_ParseTuple(args, "y#", &buf, &len)) #else if (PyArg_ParseTuple(args, "s#", &buf, &len)) #endif { if (len != 8) { PyErr_SetString(PyExc_ValueError, "8-byte array expected"); return NULL; } return TimeStamp_FromString(buf); } PyErr_Clear(); if (!PyArg_ParseTuple(args, "iii|iid", &y, &mo, &d, &h, &m, &sec)) return NULL; return TimeStamp_FromDate(y, mo, d, h, m, sec); } static PyMethodDef TimeStampModule_functions[] = { {"TimeStamp", TimeStamp_TimeStamp, METH_VARARGS}, {NULL, NULL}, }; #ifdef PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "_timestamp", /* m_name */ TimeStampModule_doc, /* m_doc */ -1, /* m_size */ TimeStampModule_functions, /* m_methods */ NULL, /* m_reload */ NULL, /* m_traverse */ NULL, /* m_clear */ NULL, /* m_free */ }; #endif static PyObject* module_init(void) { PyObject *module; if (TimeStamp_init_gmoff() < 0) return NULL; #ifdef PY3K module = PyModule_Create(&moduledef); #else module = Py_InitModule4("_timestamp", TimeStampModule_functions, TimeStampModule_doc, NULL, PYTHON_API_VERSION); #endif if (module == NULL) return NULL; #ifdef PY3K ((PyObject*)&TimeStamp_type)->ob_type = &PyType_Type; #else TimeStamp_type.ob_type = &PyType_Type; #endif TimeStamp_type.tp_getattro = PyObject_GenericGetAttr; return module; } #ifdef PY3K PyMODINIT_FUNC PyInit__timestamp(void) { return module_init(); } #else PyMODINIT_FUNC init_timestamp(void) { module_init(); } #endif persistent-4.2.2/persistent/cPersistence.c0000644000076600000240000012303613017350405020620 0ustar jimstaff00000000000000/***************************************************************************** Copyright (c) 2001, 2002 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ static char cPersistence_doc_string[] = "Defines Persistent mixin class for persistent objects.\n" "\n" "$Id$\n"; #include "cPersistence.h" #include "structmember.h" struct ccobject_head_struct { CACHE_HEAD }; /* These two objects are initialized when the module is loaded */ static PyObject *TimeStamp, *py_simple_new; /* Strings initialized by init_strings() below. */ static PyObject *py_keys, *py_setstate, *py___dict__, *py_timeTime; static PyObject *py__p_changed, *py__p_deactivate; static PyObject *py___getattr__, *py___setattr__, *py___delattr__; static PyObject *py___slotnames__, *copy_reg_slotnames, *__newobj__; static PyObject *py___getnewargs__, *py___getstate__; static PyObject *py_unsaved, *py_ghost, *py_saved, *py_changed, *py_sticky; static int init_strings(void) { #define INIT_STRING(S) \ if (!(py_ ## S = INTERN(#S))) \ return -1; INIT_STRING(keys); INIT_STRING(setstate); INIT_STRING(timeTime); INIT_STRING(__dict__); INIT_STRING(_p_changed); INIT_STRING(_p_deactivate); INIT_STRING(__getattr__); INIT_STRING(__setattr__); INIT_STRING(__delattr__); INIT_STRING(__slotnames__); INIT_STRING(__getnewargs__); INIT_STRING(__getstate__); INIT_STRING(unsaved); INIT_STRING(ghost); INIT_STRING(saved); INIT_STRING(changed); INIT_STRING(sticky); #undef INIT_STRING return 0; } #ifdef Py_DEBUG static void fatal_1350(cPersistentObject *self, const char *caller, const char *detail) { char buf[1000]; PyOS_snprintf(buf, sizeof(buf), "cPersistence.c %s(): object at %p with type %.200s\n" "%s.\n" "The only known cause is multiple threads trying to ghost and\n" "unghost the object simultaneously.\n" "That's not legal, but ZODB can't stop it.\n" "See Collector #1350.\n", caller, self, Py_TYPE(self)->tp_name, detail); Py_FatalError(buf); } #endif static void ghostify(cPersistentObject*); static PyObject * pickle_slotnames(PyTypeObject *cls); static PyObject * convert_name(PyObject *name); /* Load the state of the object, unghostifying it. Upon success, return 1. * If an error occurred, re-ghostify the object and return -1. */ static int unghostify(cPersistentObject *self) { if (self->state < 0 && self->jar) { PyObject *r; /* Is it ever possible to not have a cache? */ if (self->cache) { /* Create a node in the ring for this unghostified object. */ self->cache->non_ghost_count++; self->cache->total_estimated_size += _estimated_size_in_bytes(self->estimated_size); ring_add(&self->cache->ring_home, &self->ring); Py_INCREF(self); } /* set state to CHANGED while setstate() call is in progress to prevent a recursive call to _PyPersist_Load(). */ self->state = cPersistent_CHANGED_STATE; /* Call the object's __setstate__() */ r = PyObject_CallMethod(self->jar, "setstate", "O", (PyObject *)self); if (r == NULL) { ghostify(self); return -1; } self->state = cPersistent_UPTODATE_STATE; Py_DECREF(r); if (self->cache && self->ring.r_next == NULL) { #ifdef Py_DEBUG fatal_1350(self, "unghostify", "is not in the cache despite that we just " "unghostified it"); #else PyErr_Format(PyExc_SystemError, "object at %p with type " "%.200s not in the cache despite that we just " "unghostified it", self, Py_TYPE(self)->tp_name); return -1; #endif } } return 1; } /****************************************************************************/ static PyTypeObject Pertype; static void accessed(cPersistentObject *self) { /* Do nothing unless the object is in a cache and not a ghost. */ if (self->cache && self->state >= 0 && self->ring.r_next) ring_move_to_head(&self->cache->ring_home, &self->ring); } static void ghostify(cPersistentObject *self) { PyObject **dictptr, *slotnames; /* are we already a ghost? */ if (self->state == cPersistent_GHOST_STATE) return; /* Is it ever possible to not have a cache? */ if (self->cache == NULL) { self->state = cPersistent_GHOST_STATE; return; } if (self->ring.r_next == NULL) { /* There's no way to raise an error in this routine. */ #ifdef Py_DEBUG fatal_1350(self, "ghostify", "claims to be in a cache but isn't"); #else return; #endif } /* If we're ghostifying an object, we better have some non-ghosts. */ assert(self->cache->non_ghost_count > 0); self->cache->non_ghost_count--; self->cache->total_estimated_size -= _estimated_size_in_bytes(self->estimated_size); ring_del(&self->ring); self->state = cPersistent_GHOST_STATE; /* clear __dict__ */ dictptr = _PyObject_GetDictPtr((PyObject *)self); if (dictptr && *dictptr) { Py_DECREF(*dictptr); *dictptr = NULL; } /* clear all slots besides _p_* * ( for backward-compatibility reason we do this only if class does not * override __new__ ) */ if (Py_TYPE(self)->tp_new == Pertype.tp_new) { slotnames = pickle_slotnames(Py_TYPE(self)); if (slotnames && slotnames != Py_None) { int i; for (i = 0; i < PyList_GET_SIZE(slotnames); i++) { PyObject *name; char *cname; int is_special; name = PyList_GET_ITEM(slotnames, i); #ifdef PY3K if (PyUnicode_Check(name)) { PyObject *converted = convert_name(name); cname = PyBytes_AS_STRING(converted); #else if (PyBytes_Check(name)) { cname = PyBytes_AS_STRING(name); #endif is_special = !strncmp(cname, "_p_", 3); #ifdef PY3K Py_DECREF(converted); #endif if (is_special) /* skip persistent */ { continue; } } /* NOTE: this skips our delattr hook */ if (PyObject_GenericSetAttr((PyObject *)self, name, NULL) < 0) /* delattr of non-set slot will raise AttributeError - we * simply ignore. */ PyErr_Clear(); } } Py_XDECREF(slotnames); } /* We remove the reference to the just ghosted object that the ring * holds. Note that the dictionary of oids->objects has an uncounted * reference, so if the ring's reference was the only one, this frees * the ghost object. Note further that the object's dealloc knows to * inform the dictionary that it is going away. */ Py_DECREF(self); } static int changed(cPersistentObject *self) { if ((self->state == cPersistent_UPTODATE_STATE || self->state == cPersistent_STICKY_STATE) && self->jar) { PyObject *meth, *arg, *result; static PyObject *s_register; if (s_register == NULL) s_register = INTERN("register"); meth = PyObject_GetAttr((PyObject *)self->jar, s_register); if (meth == NULL) return -1; arg = PyTuple_New(1); if (arg == NULL) { Py_DECREF(meth); return -1; } Py_INCREF(self); PyTuple_SET_ITEM(arg, 0, (PyObject *)self); result = PyEval_CallObject(meth, arg); Py_DECREF(arg); Py_DECREF(meth); if (result == NULL) return -1; Py_DECREF(result); self->state = cPersistent_CHANGED_STATE; } return 0; } static int readCurrent(cPersistentObject *self) { if ((self->state == cPersistent_UPTODATE_STATE || self->state == cPersistent_STICKY_STATE) && self->jar && self->oid) { static PyObject *s_readCurrent=NULL; PyObject *r; if (s_readCurrent == NULL) s_readCurrent = INTERN("readCurrent"); r = PyObject_CallMethodObjArgs(self->jar, s_readCurrent, self, NULL); if (r == NULL) return -1; Py_DECREF(r); } return 0; } static PyObject * Per__p_deactivate(cPersistentObject *self) { if (self->state == cPersistent_UPTODATE_STATE && self->jar) { PyObject **dictptr = _PyObject_GetDictPtr((PyObject *)self); if (dictptr && *dictptr) { Py_DECREF(*dictptr); *dictptr = NULL; } /* Note that we need to set to ghost state unless we are called directly. Methods that override this need to do the same! */ ghostify(self); if (PyErr_Occurred()) return NULL; } Py_INCREF(Py_None); return Py_None; } static PyObject * Per__p_activate(cPersistentObject *self) { if (unghostify(self) < 0) return NULL; Py_INCREF(Py_None); return Py_None; } static int Per_set_changed(cPersistentObject *self, PyObject *v); static PyObject * Per__p_invalidate(cPersistentObject *self) { signed char old_state = self->state; if (old_state != cPersistent_GHOST_STATE) { if (Per_set_changed(self, NULL) < 0) return NULL; ghostify(self); if (PyErr_Occurred()) return NULL; } Py_INCREF(Py_None); return Py_None; } static PyObject * pickle_slotnames(PyTypeObject *cls) { PyObject *slotnames; slotnames = PyDict_GetItem(cls->tp_dict, py___slotnames__); if (slotnames) { int n = PyObject_Not(slotnames); if (n < 0) return NULL; if (n) slotnames = Py_None; Py_INCREF(slotnames); return slotnames; } slotnames = PyObject_CallFunctionObjArgs(copy_reg_slotnames, (PyObject*)cls, NULL); if (slotnames && !(slotnames == Py_None || PyList_Check(slotnames))) { PyErr_SetString(PyExc_TypeError, "copy_reg._slotnames didn't return a list or None"); Py_DECREF(slotnames); return NULL; } return slotnames; } static PyObject * pickle_copy_dict(PyObject *state) { PyObject *copy, *key, *value; char *ckey; Py_ssize_t pos = 0; copy = PyDict_New(); if (!copy) return NULL; if (!state) return copy; while (PyDict_Next(state, &pos, &key, &value)) { int is_special; #ifdef PY3K if (key && PyUnicode_Check(key)) { PyObject *converted = convert_name(key); ckey = PyBytes_AS_STRING(converted); #else if (key && PyBytes_Check(key)) { ckey = PyBytes_AS_STRING(key); #endif is_special = (*ckey == '_' && (ckey[1] == 'v' || ckey[1] == 'p') && ckey[2] == '_'); #ifdef PY3K Py_DECREF(converted); #endif if (is_special) /* skip volatile and persistent */ continue; } if (PyObject_SetItem(copy, key, value) < 0) goto err; } return copy; err: Py_DECREF(copy); return NULL; } static char pickle___getstate__doc[] = "Get the object serialization state\n" "\n" "If the object has no assigned slots and has no instance dictionary, then \n" "None is returned.\n" "\n" "If the object has no assigned slots and has an instance dictionary, then \n" "the a copy of the instance dictionary is returned. The copy has any items \n" "with names starting with '_v_' or '_p_' ommitted.\n" "\n" "If the object has assigned slots, then a two-element tuple is returned. \n" "The first element is either None or a copy of the instance dictionary, \n" "as described above. The second element is a dictionary with items \n" "for each of the assigned slots.\n" ; static PyObject * pickle___getstate__(PyObject *self) { PyObject *slotnames=NULL, *slots=NULL, *state=NULL; PyObject **dictp; int n=0; slotnames = pickle_slotnames(Py_TYPE(self)); if (!slotnames) return NULL; dictp = _PyObject_GetDictPtr(self); if (dictp) state = pickle_copy_dict(*dictp); else { state = Py_None; Py_INCREF(state); } if (slotnames != Py_None) { int i; slots = PyDict_New(); if (!slots) goto end; for (i = 0; i < PyList_GET_SIZE(slotnames); i++) { PyObject *name, *value; char *cname; int is_special; name = PyList_GET_ITEM(slotnames, i); #ifdef PY3K if (PyUnicode_Check(name)) { PyObject *converted = convert_name(name); cname = PyBytes_AS_STRING(converted); #else if (PyBytes_Check(name)) { cname = PyBytes_AS_STRING(name); #endif is_special = (*cname == '_' && (cname[1] == 'v' || cname[1] == 'p') && cname[2] == '_'); #ifdef PY3K Py_DECREF(converted); #endif if (is_special) /* skip volatile and persistent */ { continue; } } /* Unclear: Will this go through our getattr hook? */ value = PyObject_GetAttr(self, name); if (value == NULL) PyErr_Clear(); else { int err = PyDict_SetItem(slots, name, value); Py_DECREF(value); if (err < 0) goto end; n++; } } } if (n) state = Py_BuildValue("(NO)", state, slots); end: Py_XDECREF(slotnames); Py_XDECREF(slots); return state; } static int pickle_setattrs_from_dict(PyObject *self, PyObject *dict) { PyObject *key, *value; Py_ssize_t pos = 0; if (!PyDict_Check(dict)) { PyErr_SetString(PyExc_TypeError, "Expected dictionary"); return -1; } while (PyDict_Next(dict, &pos, &key, &value)) { if (PyObject_SetAttr(self, key, value) < 0) return -1; } return 0; } static char pickle___setstate__doc[] = "Set the object serialization state\n\n" "The state should be in one of 3 forms:\n\n" "- None\n\n" " Ignored\n\n" "- A dictionary\n\n" " In this case, the object's instance dictionary will be cleared and \n" " updated with the new state.\n\n" "- A two-tuple with a string as the first element. \n\n" " In this case, the method named by the string in the first element will\n" " be called with the second element.\n\n" " This form supports migration of data formats.\n\n" "- A two-tuple with None or a Dictionary as the first element and\n" " with a dictionary as the second element.\n\n" " If the first element is not None, then the object's instance dictionary \n" " will be cleared and updated with the value.\n\n" " The items in the second element will be assigned as attributes.\n" ; static PyObject * pickle___setstate__(PyObject *self, PyObject *state) { PyObject *slots=NULL; if (PyTuple_Check(state)) { if (!PyArg_ParseTuple(state, "OO:__setstate__", &state, &slots)) return NULL; } if (state != Py_None) { PyObject **dict; PyObject *d_key, *d_value; Py_ssize_t i; dict = _PyObject_GetDictPtr(self); if (!dict) { PyErr_SetString(PyExc_TypeError, "this object has no instance dictionary"); return NULL; } if (!*dict) { *dict = PyDict_New(); if (!*dict) return NULL; } PyDict_Clear(*dict); i = 0; while (PyDict_Next(state, &i, &d_key, &d_value)) { /* normally the keys for instance attributes are interned. we should try to do that here. */ if (NATIVE_CHECK_EXACT(d_key)) { Py_INCREF(d_key); INTERN_INPLACE(&d_key); Py_DECREF(d_key); } if (PyObject_SetItem(*dict, d_key, d_value) < 0) return NULL; } } if (slots && pickle_setattrs_from_dict(self, slots) < 0) return NULL; Py_INCREF(Py_None); return Py_None; } static char pickle___reduce__doc[] = "Reduce an object to contituent parts for serialization\n" ; static PyObject * pickle___reduce__(PyObject *self) { PyObject *args=NULL, *bargs=NULL, *state=NULL, *getnewargs=NULL; int l, i; getnewargs = PyObject_GetAttr(self, py___getnewargs__); if (getnewargs) { bargs = PyObject_CallFunctionObjArgs(getnewargs, NULL); Py_DECREF(getnewargs); if (!bargs) return NULL; l = PyTuple_Size(bargs); if (l < 0) goto end; } else { PyErr_Clear(); l = 0; } args = PyTuple_New(l+1); if (args == NULL) goto end; Py_INCREF(Py_TYPE(self)); PyTuple_SET_ITEM(args, 0, (PyObject*)(Py_TYPE(self))); for (i = 0; i < l; i++) { Py_INCREF(PyTuple_GET_ITEM(bargs, i)); PyTuple_SET_ITEM(args, i+1, PyTuple_GET_ITEM(bargs, i)); } state = PyObject_CallMethodObjArgs(self, py___getstate__, NULL); if (!state) goto end; state = Py_BuildValue("(OON)", __newobj__, args, state); end: Py_XDECREF(bargs); Py_XDECREF(args); return state; } /* Return the object's state, a dict or None. If the object has no dict, it's state is None. Otherwise, return a dict containing all the attributes that don't start with "_v_". The caller should not modify this dict, as it may be a reference to the object's __dict__. */ static PyObject * Per__getstate__(cPersistentObject *self) { /* TODO: Should it be an error to call __getstate__() on a ghost? */ if (unghostify(self) < 0) return NULL; /* TODO: should we increment stickyness? Tim doesn't understand that question. S*/ return pickle___getstate__((PyObject*)self); } /* The Persistent base type provides a traverse function, but not a clear function. An instance of a Persistent subclass will have its dict cleared through subtype_clear(). There is always a cycle between a persistent object and its cache. When the cycle becomes unreachable, the clear function for the cache will break the cycle. Thus, the persistent object need not have a clear function. It would be complex to write a clear function for the objects, if we needed one, because of the reference count tricks done by the cache. */ static void Per_dealloc(cPersistentObject *self) { if (self->state >= 0) { /* If the cache has been cleared, then a non-ghost object isn't in the ring any longer. */ if (self->ring.r_next != NULL) { /* if we're ghostifying an object, we better have some non-ghosts */ assert(self->cache->non_ghost_count > 0); self->cache->non_ghost_count--; self->cache->total_estimated_size -= _estimated_size_in_bytes(self->estimated_size); ring_del(&self->ring); } } if (self->cache) cPersistenceCAPI->percachedel(self->cache, self->oid); Py_XDECREF(self->cache); Py_XDECREF(self->jar); Py_XDECREF(self->oid); Py_TYPE(self)->tp_free(self); } static int Per_traverse(cPersistentObject *self, visitproc visit, void *arg) { int err; #define VISIT(SLOT) \ if (SLOT) { \ err = visit((PyObject *)(SLOT), arg); \ if (err) \ return err; \ } VISIT(self->jar); VISIT(self->oid); VISIT(self->cache); #undef VISIT return 0; } /* convert_name() returns a new reference to a string name or sets an exception and returns NULL. */ static PyObject * convert_name(PyObject *name) { #ifdef Py_USING_UNICODE /* The Unicode to string conversion is done here because the existing tp_setattro slots expect a string object as name and we wouldn't want to break those. */ if (PyUnicode_Check(name)) { name = PyUnicode_AsEncodedString(name, NULL, NULL); } else #endif if (!PyBytes_Check(name)) { PyErr_SetString(PyExc_TypeError, "attribute name must be a string"); return NULL; } else Py_INCREF(name); return name; } /* Returns true if the object requires unghostification. There are several special attributes that we allow access to without requiring that the object be unghostified: __class__ __del__ __dict__ __of__ __setstate__ */ static int unghost_getattr(const char *s) { if (*s++ != '_') return 1; if (*s == 'p') { s++; if (*s == '_') return 0; /* _p_ */ else return 1; } else if (*s == '_') { s++; switch (*s) { case 'c': return strcmp(s, "class__"); case 'd': s++; if (!strcmp(s, "el__")) return 0; /* __del__ */ if (!strcmp(s, "ict__")) return 0; /* __dict__ */ return 1; case 'o': return strcmp(s, "of__"); case 's': return strcmp(s, "setstate__"); default: return 1; } } return 1; } static PyObject* Per_getattro(cPersistentObject *self, PyObject *name) { PyObject *result = NULL; /* guilty until proved innocent */ PyObject *converted; char *s; converted = convert_name(name); if (!converted) goto Done; s = PyBytes_AS_STRING(converted); if (unghost_getattr(s)) { if (unghostify(self) < 0) goto Done; accessed(self); } result = PyObject_GenericGetAttr((PyObject *)self, name); Done: Py_XDECREF(converted); return result; } /* Exposed as _p_getattr method. Test whether base getattr should be used */ static PyObject * Per__p_getattr(cPersistentObject *self, PyObject *name) { PyObject *result = NULL; /* guilty until proved innocent */ PyObject *converted; char *s; converted = convert_name(name); if (!converted) goto Done; s = PyBytes_AS_STRING(converted); if (*s != '_' || unghost_getattr(s)) { if (unghostify(self) < 0) goto Done; accessed(self); result = Py_False; } else result = Py_True; Py_INCREF(result); Done: Py_XDECREF(converted); return result; } /* TODO: we should probably not allow assignment of __class__ and __dict__. */ static int Per_setattro(cPersistentObject *self, PyObject *name, PyObject *v) { int result = -1; /* guilty until proved innocent */ PyObject *converted; char *s; converted = convert_name(name); if (!converted) goto Done; s = PyBytes_AS_STRING(converted); if (strncmp(s, "_p_", 3) != 0) { if (unghostify(self) < 0) goto Done; accessed(self); if (strncmp(s, "_v_", 3) != 0 && self->state != cPersistent_CHANGED_STATE) { if (changed(self) < 0) goto Done; } } result = PyObject_GenericSetAttr((PyObject *)self, name, v); Done: Py_XDECREF(converted); return result; } static int Per_p_set_or_delattro(cPersistentObject *self, PyObject *name, PyObject *v) { int result = -1; /* guilty until proved innocent */ PyObject *converted; char *s; converted = convert_name(name); if (!converted) goto Done; s = PyBytes_AS_STRING(converted); if (strncmp(s, "_p_", 3)) { if (unghostify(self) < 0) goto Done; accessed(self); result = 0; } else { if (PyObject_GenericSetAttr((PyObject *)self, name, v) < 0) goto Done; result = 1; } Done: Py_XDECREF(converted); return result; } static PyObject * Per__p_setattr(cPersistentObject *self, PyObject *args) { PyObject *name, *v, *result; int r; if (!PyArg_ParseTuple(args, "OO:_p_setattr", &name, &v)) return NULL; r = Per_p_set_or_delattro(self, name, v); if (r < 0) return NULL; result = r ? Py_True : Py_False; Py_INCREF(result); return result; } static PyObject * Per__p_delattr(cPersistentObject *self, PyObject *name) { int r; PyObject *result; r = Per_p_set_or_delattro(self, name, NULL); if (r < 0) return NULL; result = r ? Py_True : Py_False; Py_INCREF(result); return result; } static PyObject * Per_get_changed(cPersistentObject *self) { if (self->state < 0) { Py_INCREF(Py_None); return Py_None; } return PyBool_FromLong(self->state == cPersistent_CHANGED_STATE); } static int Per_set_changed(cPersistentObject *self, PyObject *v) { int deactivate = 0; int true; if (!v) { /* delattr is used to invalidate an object even if it has changed. */ if (self->state != cPersistent_GHOST_STATE) self->state = cPersistent_UPTODATE_STATE; deactivate = 1; } else if (v == Py_None) deactivate = 1; if (deactivate) { PyObject *res, *meth; meth = PyObject_GetAttr((PyObject *)self, py__p_deactivate); if (meth == NULL) return -1; res = PyObject_CallObject(meth, NULL); if (res) Py_DECREF(res); else { /* an error occured in _p_deactivate(). It's not clear what we should do here. The code is obviously ignoring the exception, but it shouldn't return 0 for a getattr and set an exception. The simplest change is to clear the exception, but that simply masks the error. This prints an error to stderr just like exceptions in __del__(). It would probably be better to log it but that would be painful from C. */ PyErr_WriteUnraisable(meth); } Py_DECREF(meth); return 0; } /* !deactivate. If passed a true argument, mark self as changed (starting * with ZODB 3.6, that includes activating the object if it's a ghost). * If passed a false argument, and the object isn't a ghost, set the * state as up-to-date. */ true = PyObject_IsTrue(v); if (true == -1) return -1; if (true) { if (self->state < 0) { if (unghostify(self) < 0) return -1; } return changed(self); } /* We were passed a false, non-None argument. If we're not a ghost, * mark self as up-to-date. */ if (self->state >= 0) self->state = cPersistent_UPTODATE_STATE; return 0; } static PyObject * Per_get_oid(cPersistentObject *self) { PyObject *oid = self->oid ? self->oid : Py_None; Py_INCREF(oid); return oid; } static int Per_set_oid(cPersistentObject *self, PyObject *v) { if (self->cache) { int result; if (v == NULL) { PyErr_SetString(PyExc_ValueError, "can't delete _p_oid of cached object"); return -1; } result = PyObject_RichCompareBool(self->oid, v, Py_NE); if (result < 0) return -1; if (result) { PyErr_SetString(PyExc_ValueError, "can not change _p_oid of cached object"); return -1; } } Py_XDECREF(self->oid); Py_XINCREF(v); self->oid = v; return 0; } static PyObject * Per_get_jar(cPersistentObject *self) { PyObject *jar = self->jar ? self->jar : Py_None; Py_INCREF(jar); return jar; } static int Per_set_jar(cPersistentObject *self, PyObject *v) { if (self->cache) { int result; if (v == NULL) { PyErr_SetString(PyExc_ValueError, "can't delete _p_jar of cached object"); return -1; } result = PyObject_RichCompareBool(self->jar, v, Py_NE); if (result < 0) return -1; if (result) { PyErr_SetString(PyExc_ValueError, "can not change _p_jar of cached object"); return -1; } } Py_XDECREF(self->jar); Py_XINCREF(v); self->jar = v; return 0; } static PyObject * Per_get_serial(cPersistentObject *self) { return PyBytes_FromStringAndSize(self->serial, 8); } static int Per_set_serial(cPersistentObject *self, PyObject *v) { if (v) { if (PyBytes_Check(v) && PyBytes_GET_SIZE(v) == 8) memcpy(self->serial, PyBytes_AS_STRING(v), 8); else { PyErr_SetString(PyExc_ValueError, "_p_serial must be an 8-character bytes array"); return -1; } } else memset(self->serial, 0, 8); return 0; } static PyObject * Per_get_mtime(cPersistentObject *self) { PyObject *t, *v; if (unghostify(self) < 0) return NULL; accessed(self); if (memcmp(self->serial, "\0\0\0\0\0\0\0\0", 8) == 0) { Py_INCREF(Py_None); return Py_None; } #ifdef PY3K t = PyObject_CallFunction(TimeStamp, "y#", self->serial, 8); #else t = PyObject_CallFunction(TimeStamp, "s#", self->serial, 8); #endif if (!t) { return NULL; } v = PyObject_CallMethod(t, "timeTime", ""); Py_DECREF(t); return v; } static PyObject * Per_get_state(cPersistentObject *self) { return INT_FROM_LONG(self->state); } static PyObject * Per_get_estimated_size(cPersistentObject *self) { return INT_FROM_LONG(_estimated_size_in_bytes(self->estimated_size)); } static int Per_set_estimated_size(cPersistentObject *self, PyObject *v) { if (v) { if (INT_CHECK(v)) { long lv = INT_AS_LONG(v); if (lv < 0) { PyErr_SetString(PyExc_ValueError, "_p_estimated_size must not be negative"); return -1; } self->estimated_size = _estimated_size_in_24_bits(lv); } else { PyErr_SetString(PyExc_TypeError, "_p_estimated_size must be an integer"); return -1; } } else self->estimated_size = 0; return 0; } static PyObject * Per_get_status(cPersistentObject *self) { PyObject *result = NULL; if (!self->jar) { result = py_unsaved; } else { switch (self->state) { case cPersistent_GHOST_STATE: result = py_ghost; break; case cPersistent_STICKY_STATE: result = py_sticky; break; case cPersistent_UPTODATE_STATE: result = py_saved; break; case cPersistent_CHANGED_STATE: result = py_changed; break; } } if (result) { Py_INCREF(result); } return result; } static PyObject* Per_get_sticky(cPersistentObject *self) { return PyBool_FromLong(self->state == cPersistent_STICKY_STATE); } static int Per_set_sticky(cPersistentObject *self, PyObject* value) { if (self->state < 0) { PyErr_SetString(PyExc_ValueError, "can't set sticky flag on a ghost"); return -1; } if (self->jar) { if (PyObject_IsTrue(value)) { self->state = cPersistent_STICKY_STATE; } else { self->state = cPersistent_UPTODATE_STATE; } } return 0; } static PyGetSetDef Per_getsets[] = { {"_p_changed", (getter)Per_get_changed, (setter)Per_set_changed}, {"_p_jar", (getter)Per_get_jar, (setter)Per_set_jar}, {"_p_mtime", (getter)Per_get_mtime}, {"_p_oid", (getter)Per_get_oid, (setter)Per_set_oid}, {"_p_serial", (getter)Per_get_serial, (setter)Per_set_serial}, {"_p_state", (getter)Per_get_state}, {"_p_estimated_size", (getter)Per_get_estimated_size, (setter)Per_set_estimated_size }, {"_p_status", (getter)Per_get_status}, {"_p_sticky", (getter)Per_get_sticky, (setter)Per_set_sticky}, {NULL} }; static struct PyMethodDef Per_methods[] = { {"_p_deactivate", (PyCFunction)Per__p_deactivate, METH_NOARGS, "_p_deactivate() -- Deactivate the object"}, {"_p_activate", (PyCFunction)Per__p_activate, METH_NOARGS, "_p_activate() -- Activate the object"}, {"_p_invalidate", (PyCFunction)Per__p_invalidate, METH_NOARGS, "_p_invalidate() -- Invalidate the object"}, {"_p_getattr", (PyCFunction)Per__p_getattr, METH_O, "_p_getattr(name) -- Test whether the base class must handle the name\n" "\n" "The method unghostifies the object, if necessary.\n" "The method records the object access, if necessary.\n" "\n" "This method should be called by subclass __getattribute__\n" "implementations before doing anything else. If the method\n" "returns True, then __getattribute__ implementations must delegate\n" "to the base class, Persistent.\n" }, {"_p_setattr", (PyCFunction)Per__p_setattr, METH_VARARGS, "_p_setattr(name, value) -- Save persistent meta data\n" "\n" "This method should be called by subclass __setattr__ implementations\n" "before doing anything else. If it returns true, then the attribute\n" "was handled by the base class.\n" "\n" "The method unghostifies the object, if necessary.\n" "The method records the object access, if necessary.\n" }, {"_p_delattr", (PyCFunction)Per__p_delattr, METH_O, "_p_delattr(name) -- Delete persistent meta data\n" "\n" "This method should be called by subclass __delattr__ implementations\n" "before doing anything else. If it returns true, then the attribute\n" "was handled by the base class.\n" "\n" "The method unghostifies the object, if necessary.\n" "The method records the object access, if necessary.\n" }, {"__getstate__", (PyCFunction)Per__getstate__, METH_NOARGS, pickle___getstate__doc }, {"__setstate__", (PyCFunction)pickle___setstate__, METH_O, pickle___setstate__doc}, {"__reduce__", (PyCFunction)pickle___reduce__, METH_NOARGS, pickle___reduce__doc}, {NULL, NULL} /* sentinel */ }; /* This module is compiled as a shared library. Some compilers don't allow addresses of Python objects defined in other libraries to be used in static initializers here. The DEFERRED_ADDRESS macro is used to tag the slots where such addresses appear; the module init function must fill in the tagged slots at runtime. The argument is for documentation -- the macro ignores it. */ #define DEFERRED_ADDRESS(ADDR) 0 static PyTypeObject Pertype = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "persistent.Persistent", /* tp_name */ sizeof(cPersistentObject), /* tp_basicsize */ 0, /* tp_itemsize */ (destructor)Per_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ 0, /* tp_repr */ 0, /* tp_as_number */ 0, /* tp_as_sequence */ 0, /* tp_as_mapping */ 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ (getattrofunc)Per_getattro, /* tp_getattro */ (setattrofunc)Per_setattro, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ 0, /* tp_doc */ (traverseproc)Per_traverse, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ Per_methods, /* tp_methods */ 0, /* tp_members */ Per_getsets, /* tp_getset */ }; /* End of code for Persistent objects */ /* -------------------------------------------------------- */ typedef int (*intfunctionwithpythonarg)(PyObject*); /* Load the object's state if necessary and become sticky */ static int Per_setstate(cPersistentObject *self) { if (unghostify(self) < 0) return -1; self->state = cPersistent_STICKY_STATE; return 0; } static PyObject * simple_new(PyObject *self, PyObject *type_object) { if (!PyType_Check(type_object)) { PyErr_SetString(PyExc_TypeError, "simple_new argument must be a type object."); return NULL; } return PyType_GenericNew((PyTypeObject *)type_object, NULL, NULL); } static PyMethodDef cPersistence_methods[] = { {"simple_new", simple_new, METH_O, "Create an object by simply calling a class's __new__ method without " "arguments."}, {NULL, NULL} }; static cPersistenceCAPIstruct truecPersistenceCAPI = { &Pertype, (getattrofunc)Per_getattro, /*tp_getattr with object key*/ (setattrofunc)Per_setattro, /*tp_setattr with object key*/ changed, accessed, ghostify, (intfunctionwithpythonarg)Per_setstate, NULL, /* The percachedel slot is initialized in cPickleCache.c when the module is loaded. It uses a function in a different shared library. */ readCurrent }; #ifdef PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "cPersistence", /* m_name */ cPersistence_doc_string, /* m_doc */ -1, /* m_size */ cPersistence_methods, /* m_methods */ NULL, /* m_reload */ NULL, /* m_traverse */ NULL, /* m_clear */ NULL, /* m_free */ }; #endif static PyObject* module_init(void) { PyObject *module, *ts_module, *capi; PyObject *copy_reg; if (init_strings() < 0) return NULL; #ifdef PY3K module = PyModule_Create(&moduledef); #else module = Py_InitModule3("cPersistence", cPersistence_methods, cPersistence_doc_string); #endif #ifdef PY3K ((PyObject*)&Pertype)->ob_type = &PyType_Type; #else Pertype.ob_type = &PyType_Type; #endif Pertype.tp_new = PyType_GenericNew; if (PyType_Ready(&Pertype) < 0) return NULL; if (PyModule_AddObject(module, "Persistent", (PyObject *)&Pertype) < 0) return NULL; cPersistenceCAPI = &truecPersistenceCAPI; #ifdef PY3K capi = PyCapsule_New(cPersistenceCAPI, CAPI_CAPSULE_NAME, NULL); #else capi = PyCObject_FromVoidPtr(cPersistenceCAPI, NULL); #endif if (!capi) return NULL; if (PyModule_AddObject(module, "CAPI", capi) < 0) return NULL; if (PyModule_AddIntConstant(module, "GHOST", cPersistent_GHOST_STATE) < 0) return NULL; if (PyModule_AddIntConstant(module, "UPTODATE", cPersistent_UPTODATE_STATE) < 0) return NULL; if (PyModule_AddIntConstant(module, "CHANGED", cPersistent_CHANGED_STATE) < 0) return NULL; if (PyModule_AddIntConstant(module, "STICKY", cPersistent_STICKY_STATE) < 0) return NULL; py_simple_new = PyObject_GetAttrString(module, "simple_new"); if (!py_simple_new) return NULL; #ifdef PY3K copy_reg = PyImport_ImportModule("copyreg"); #else copy_reg = PyImport_ImportModule("copy_reg"); #endif if (!copy_reg) return NULL; copy_reg_slotnames = PyObject_GetAttrString(copy_reg, "_slotnames"); if (!copy_reg_slotnames) { Py_DECREF(copy_reg); return NULL; } __newobj__ = PyObject_GetAttrString(copy_reg, "__newobj__"); if (!__newobj__) { Py_DECREF(copy_reg); return NULL; } if (!TimeStamp) { ts_module = PyImport_ImportModule("persistent.timestamp"); if (!ts_module) return NULL; TimeStamp = PyObject_GetAttrString(ts_module, "TimeStamp"); Py_DECREF(ts_module); /* fall through to immediate return on error */ } return module; } #ifdef PY3K PyMODINIT_FUNC PyInit_cPersistence(void) { return module_init(); } #else PyMODINIT_FUNC initcPersistence(void) { module_init(); } #endif persistent-4.2.2/persistent/cPersistence.h0000644000076600000240000001177212577543740020650 0ustar jimstaff00000000000000/***************************************************************************** Copyright (c) 2001, 2002 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ #ifndef CPERSISTENCE_H #define CPERSISTENCE_H #include "_compat.h" #include "bytesobject.h" #include "ring.h" #define CACHE_HEAD \ PyObject_HEAD \ CPersistentRing ring_home; \ int non_ghost_count; \ Py_ssize_t total_estimated_size; struct ccobject_head_struct; typedef struct ccobject_head_struct PerCache; /* How big is a persistent object? 12 PyGC_Head is two pointers and an int 8 PyObject_HEAD is an int and a pointer 12 jar, oid, cache pointers 8 ring struct 8 serialno 4 state + extra 4 size info (56) so far 4 dict ptr 4 weaklist ptr ------------------------- 68 only need 62, but obmalloc rounds up to multiple of eight Even a ghost requires 64 bytes. It's possible to make a persistent instance with slots and no dict, which changes the storage needed. */ #define cPersistent_HEAD \ PyObject_HEAD \ PyObject *jar; \ PyObject *oid; \ PerCache *cache; \ CPersistentRing ring; \ char serial[8]; \ signed state:8; \ unsigned estimated_size:24; /* We recently added estimated_size. We originally added it as a new unsigned long field after a signed char state field and a 3-character reserved field. This didn't work because there are packages in the wild that have their own copies of cPersistence.h that didn't see the update. To get around this, we used the reserved space by making estimated_size a 24-bit bit field in the space occupied by the old 3-character reserved field. To fit in 24 bits, we made the units of estimated_size 64-character blocks. This allows is to handle up to a GB. We should never see that, but to be paranoid, we also truncate sizes greater than 1GB. We also set the minimum size to 64 bytes. We use the _estimated_size_in_24_bits and _estimated_size_in_bytes macros both to avoid repetition and to make intent a little clearer. */ #define _estimated_size_in_24_bits(I) ((I) > 1073741696 ? 16777215 : (I)/64+1) #define _estimated_size_in_bytes(I) ((I)*64) #define cPersistent_GHOST_STATE -1 #define cPersistent_UPTODATE_STATE 0 #define cPersistent_CHANGED_STATE 1 #define cPersistent_STICKY_STATE 2 typedef struct { cPersistent_HEAD } cPersistentObject; typedef void (*percachedelfunc)(PerCache *, PyObject *); typedef struct { PyTypeObject *pertype; getattrofunc getattro; setattrofunc setattro; int (*changed)(cPersistentObject*); void (*accessed)(cPersistentObject*); void (*ghostify)(cPersistentObject*); int (*setstate)(PyObject*); percachedelfunc percachedel; int (*readCurrent)(cPersistentObject*); } cPersistenceCAPIstruct; #define cPersistenceType cPersistenceCAPI->pertype #ifndef DONT_USE_CPERSISTENCECAPI static cPersistenceCAPIstruct *cPersistenceCAPI; #endif #define cPersistanceModuleName "cPersistence" #define PER_TypeCheck(O) PyObject_TypeCheck((O), cPersistenceCAPI->pertype) #define PER_USE_OR_RETURN(O,R) {if((O)->state==cPersistent_GHOST_STATE && cPersistenceCAPI->setstate((PyObject*)(O)) < 0) return (R); else if ((O)->state==cPersistent_UPTODATE_STATE) (O)->state=cPersistent_STICKY_STATE;} #define PER_CHANGED(O) (cPersistenceCAPI->changed((cPersistentObject*)(O))) #define PER_READCURRENT(O, E) \ if (cPersistenceCAPI->readCurrent((cPersistentObject*)(O)) < 0) { E; } #define PER_GHOSTIFY(O) (cPersistenceCAPI->ghostify((cPersistentObject*)(O))) /* If the object is sticky, make it non-sticky, so that it can be ghostified. The value is not meaningful */ #define PER_ALLOW_DEACTIVATION(O) ((O)->state==cPersistent_STICKY_STATE && ((O)->state=cPersistent_UPTODATE_STATE)) #define PER_PREVENT_DEACTIVATION(O) ((O)->state==cPersistent_UPTODATE_STATE && ((O)->state=cPersistent_STICKY_STATE)) /* Make a persistent object usable from C by: - Making sure it is not a ghost - Making it sticky. IMPORTANT: If you call this and don't call PER_ALLOW_DEACTIVATION, your object will not be ghostified. PER_USE returns a 1 on success and 0 failure, where failure means error. */ #define PER_USE(O) \ (((O)->state != cPersistent_GHOST_STATE \ || (cPersistenceCAPI->setstate((PyObject*)(O)) >= 0)) \ ? (((O)->state==cPersistent_UPTODATE_STATE) \ ? ((O)->state=cPersistent_STICKY_STATE) : 1) : 0) #define PER_ACCESSED(O) (cPersistenceCAPI->accessed((cPersistentObject*)(O))) #endif persistent-4.2.2/persistent/cPickleCache.c0000644000076600000240000012151612577543740020510 0ustar jimstaff00000000000000/***************************************************************************** Copyright (c) 2001, 2002 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ /* Objects are stored under three different regimes: Regime 1: Persistent Classes Persistent Classes are part of ZClasses. They are stored in the self->data dictionary, and are never garbage collected. The klass_items() method returns a sequence of (oid,object) tuples for every Persistent Class, which should make it possible to implement garbage collection in Python if necessary. Regime 2: Ghost Objects There is no benefit to keeping a ghost object which has no external references, therefore a weak reference scheme is used to ensure that ghost objects are removed from memory as soon as possible, when the last external reference is lost. Ghost objects are stored in the self->data dictionary. Normally a dictionary keeps a strong reference on its values, however this reference count is 'stolen'. This weak reference scheme leaves a dangling reference, in the dictionary, when the last external reference is lost. To clean up this dangling reference the persistent object dealloc function calls self->cache->_oid_unreferenced(self->oid). The cache looks up the oid in the dictionary, ensures it points to an object whose reference count is zero, then removes it from the dictionary. Before removing the object from the dictionary it must temporarily resurrect the object in much the same way that class instances are resurrected before their __del__ is called. Since ghost objects are stored under a different regime to non-ghost objects, an extra ghostify function in cPersistenceAPI replaces self->state=GHOST_STATE assignments that were common in other persistent classes (such as BTrees). Regime 3: Non-Ghost Objects Non-ghost objects are stored in two data structures: the dictionary mapping oids to objects and a doubly-linked list that encodes the order in which the objects were accessed. The dictionary reference is borrowed, as it is for ghosts. The list reference is a new reference; the list stores recently used objects, even if they are otherwise unreferenced, to avoid loading the object from the database again. The doubly-link-list nodes contain next and previous pointers linking together the cache and all non-ghost persistent objects. The node embedded in the cache is the home position. On every attribute access a non-ghost object will relink itself just behind the home position in the ring. Objects accessed least recently will eventually find themselves positioned after the home position. Occasionally other nodes are temporarily inserted in the ring as position markers. The cache contains a ring_lock flag which must be set and unset before and after doing so. Only if the flag is unset can the cache assume that all nodes are either his own home node, or nodes from persistent objects. This assumption is useful during the garbage collection process. The number of non-ghost objects is counted in self->non_ghost_count. The garbage collection process consists of traversing the ring, and deactivating (that is, turning into a ghost) every object until self->non_ghost_count is down to the target size, or until it reaches the home position again. Note that objects in the sticky or changed states are still kept in the ring, however they can not be deactivated. The garbage collection process must skip such objects, rather than deactivating them. */ static char cPickleCache_doc_string[] = "Defines the PickleCache used by ZODB Connection objects.\n" "\n" "$Id$\n"; #define DONT_USE_CPERSISTENCECAPI #include "cPersistence.h" #include "structmember.h" #include #include #undef Py_FindMethod /* Python 2.4 backward compat */ #if PY_MAJOR_VERSION <= 2 && PY_MINOR_VERSION < 5 #define Py_ssize_t int typedef Py_ssize_t (*lenfunc)(PyObject *); #endif /* Python string objects to speed lookups; set by module init. */ static PyObject *py__p_changed; static PyObject *py__p_deactivate; static PyObject *py__p_jar; static PyObject *py__p_oid; static cPersistenceCAPIstruct *capi; /* This object is the pickle cache. The CACHE_HEAD macro guarantees that layout of this struct is the same as the start of ccobject_head in cPersistence.c */ typedef struct { CACHE_HEAD int klass_count; /* count of persistent classes */ PyObject *data; /* oid -> object dict */ PyObject *jar; /* Connection object */ int cache_size; /* target number of items in cache */ Py_ssize_t cache_size_bytes; /* target total estimated size of items in cache */ /* Most of the time the ring contains only: * many nodes corresponding to persistent objects * one 'home' node from the cache. In some cases it is handy to temporarily add other types of node into the ring as placeholders. 'ring_lock' is a boolean indicating that someone has already done this. Currently this is only used by the garbage collection code. */ int ring_lock; /* 'cache_drain_resistance' controls how quickly the cache size will drop when it is smaller than the configured size. A value of zero means it will not drop below the configured size (suitable for most caches). Otherwise, it will remove cache_non_ghost_count/cache_drain_resistance items from the cache every time (suitable for rarely used caches, such as those associated with Zope versions. */ int cache_drain_resistance; } ccobject; static int cc_ass_sub(ccobject *self, PyObject *key, PyObject *v); /* ---------------------------------------------------------------- */ #define OBJECT_FROM_RING(SELF, HERE) \ ((cPersistentObject *)(((char *)here) - offsetof(cPersistentObject, ring))) /* Insert self into the ring, following after. */ static void insert_after(CPersistentRing *self, CPersistentRing *after) { assert(self != NULL); assert(after != NULL); self->r_prev = after; self->r_next = after->r_next; after->r_next->r_prev = self; after->r_next = self; } /* Remove self from the ring. */ static void unlink_from_ring(CPersistentRing *self) { assert(self != NULL); self->r_prev->r_next = self->r_next; self->r_next->r_prev = self->r_prev; } static int scan_gc_items(ccobject *self, int target, Py_ssize_t target_bytes) { /* This function must only be called with the ring lock held, because it places non-object placeholders in the ring. */ cPersistentObject *object; CPersistentRing *here; CPersistentRing before_original_home; int result = -1; /* guilty until proved innocent */ /* Scan the ring, from least to most recently used, deactivating * up-to-date objects, until we either find the ring_home again or * or we've ghosted enough objects to reach the target size. * Tricky: __getattr__ and __del__ methods can do anything, and in * particular if we ghostify an object with a __del__ method, that method * can load the object again, putting it back into the MRU part of the * ring. Waiting to find ring_home again can thus cause an infinite * loop (Collector #1208). So before_original_home records the MRU * position we start with, and we stop the scan when we reach that. */ insert_after(&before_original_home, self->ring_home.r_prev); here = self->ring_home.r_next; /* least recently used object */ while (here != &before_original_home && (self->non_ghost_count > target || (target_bytes && self->total_estimated_size > target_bytes) ) ) { assert(self->ring_lock); assert(here != &self->ring_home); /* At this point we know that the ring only contains nodes from persistent objects, plus our own home node. We know this because the ring lock is held. We can safely assume the current ring node is a persistent object now we know it is not the home */ object = OBJECT_FROM_RING(self, here); if (object->state == cPersistent_UPTODATE_STATE) { CPersistentRing placeholder; PyObject *method; PyObject *temp; int error_occurred = 0; /* deactivate it. This is the main memory saver. */ /* Add a placeholder, a dummy node in the ring. We need to do this to mark our position in the ring. It is possible that the PyObject_GetAttr() call below will invoke a __getattr__() hook in Python. Also possible that deactivation will lead to a __del__ method call. So another thread might run, and mutate the ring as a side effect of object accesses. There's no predicting then where in the ring here->next will point after that. The placeholder won't move as a side effect of calling Python code. */ insert_after(&placeholder, here); method = PyObject_GetAttr((PyObject *)object, py__p_deactivate); if (method == NULL) error_occurred = 1; else { temp = PyObject_CallObject(method, NULL); Py_DECREF(method); if (temp == NULL) error_occurred = 1; else Py_DECREF(temp); } here = placeholder.r_next; unlink_from_ring(&placeholder); if (error_occurred) goto Done; } else here = here->r_next; } result = 0; Done: unlink_from_ring(&before_original_home); return result; } static PyObject * lockgc(ccobject *self, int target_size, Py_ssize_t target_size_bytes) { /* This is thread-safe because of the GIL, and there's nothing * in between checking the ring_lock and acquiring it that calls back * into Python. */ if (self->ring_lock) { Py_INCREF(Py_None); return Py_None; } self->ring_lock = 1; if (scan_gc_items(self, target_size, target_size_bytes) < 0) { self->ring_lock = 0; return NULL; } self->ring_lock = 0; Py_INCREF(Py_None); return Py_None; } static PyObject * cc_incrgc(ccobject *self, PyObject *args) { int obsolete_arg = -999; int starting_size = self->non_ghost_count; int target_size = self->cache_size; Py_ssize_t target_size_bytes = self->cache_size_bytes; if (self->cache_drain_resistance >= 1) { /* This cache will gradually drain down to a small size. Check a (small) number of objects proportional to the current size */ int target_size_2 = (starting_size - 1 - starting_size / self->cache_drain_resistance); if (target_size_2 < target_size) target_size = target_size_2; } if (!PyArg_ParseTuple(args, "|i:incrgc", &obsolete_arg)) return NULL; if (obsolete_arg != -999 && (PyErr_Warn(PyExc_DeprecationWarning, "No argument expected") < 0)) return NULL; return lockgc(self, target_size, target_size_bytes); } static PyObject * cc_full_sweep(ccobject *self, PyObject *args) { int dt = -999; /* TODO: This should be deprecated; */ if (!PyArg_ParseTuple(args, "|i:full_sweep", &dt)) return NULL; if (dt == -999) return lockgc(self, 0, 0); else return cc_incrgc(self, args); } static PyObject * cc_minimize(ccobject *self, PyObject *args) { int ignored = -999; if (!PyArg_ParseTuple(args, "|i:minimize", &ignored)) return NULL; if (ignored != -999 && (PyErr_Warn(PyExc_DeprecationWarning, "No argument expected") < 0)) return NULL; return lockgc(self, 0, 0); } static int _invalidate(ccobject *self, PyObject *key) { static PyObject *_p_invalidate = NULL; PyObject *meth, *v; v = PyDict_GetItem(self->data, key); if (v == NULL) return 0; if (_p_invalidate == NULL) { _p_invalidate = INTERN("_p_invalidate"); if (_p_invalidate == NULL) { /* It doesn't make any sense to ignore this error, but the caller ignores all errors. TODO: and why does it do that? This should be fixed */ return -1; } } if (v->ob_refcnt <= 1 && PyType_Check(v)) { /* This looks wrong, but it isn't. We use strong references to types because they don't have the ring members. The result is that we *never* remove classes unless they are modified. We can fix this by using wekrefs uniformly. */ self->klass_count--; return PyDict_DelItem(self->data, key); } meth = PyObject_GetAttr(v, _p_invalidate); if (meth == NULL) return -1; v = PyObject_CallObject(meth, NULL); Py_DECREF(meth); if (v == NULL) return -1; Py_DECREF(v); return 0; } static PyObject * cc_invalidate(ccobject *self, PyObject *inv) { PyObject *key, *v; Py_ssize_t i = 0; if (PyDict_Check(inv)) { while (PyDict_Next(inv, &i, &key, &v)) { if (_invalidate(self, key) < 0) return NULL; } PyDict_Clear(inv); } else { if (PyBytes_Check(inv)) { if (_invalidate(self, inv) < 0) return NULL; } else { int l, r; l = PyObject_Length(inv); if (l < 0) return NULL; for (i=l; --i >= 0; ) { key = PySequence_GetItem(inv, i); if (!key) return NULL; r = _invalidate(self, key); Py_DECREF(key); if (r < 0) return NULL; } /* Dubious: modifying the input may be an unexpected side effect. */ PySequence_DelSlice(inv, 0, l); } } Py_INCREF(Py_None); return Py_None; } static PyObject * cc_get(ccobject *self, PyObject *args) { PyObject *r, *key, *d = NULL; if (!PyArg_ParseTuple(args, "O|O:get", &key, &d)) return NULL; r = PyDict_GetItem(self->data, key); if (!r) { if (d) r = d; else r = Py_None; } Py_INCREF(r); return r; } static PyObject * cc_items(ccobject *self) { return PyObject_CallMethod(self->data, "items", ""); } static PyObject * cc_klass_items(ccobject *self) { PyObject *l,*k,*v; Py_ssize_t p = 0; l = PyList_New(0); if (l == NULL) return NULL; while (PyDict_Next(self->data, &p, &k, &v)) { if(PyType_Check(v)) { v = Py_BuildValue("OO", k, v); if (v == NULL) { Py_DECREF(l); return NULL; } if (PyList_Append(l, v) < 0) { Py_DECREF(v); Py_DECREF(l); return NULL; } Py_DECREF(v); } } return l; } static PyObject * cc_debug_info(ccobject *self) { PyObject *l,*k,*v; Py_ssize_t p = 0; l = PyList_New(0); if (l == NULL) return NULL; while (PyDict_Next(self->data, &p, &k, &v)) { if (v->ob_refcnt <= 0) v = Py_BuildValue("Oi", k, v->ob_refcnt); else if (! PyType_Check(v) && (v->ob_type->tp_basicsize >= sizeof(cPersistentObject)) ) v = Py_BuildValue("Oisi", k, v->ob_refcnt, v->ob_type->tp_name, ((cPersistentObject*)v)->state); else v = Py_BuildValue("Ois", k, v->ob_refcnt, v->ob_type->tp_name); if (v == NULL) goto err; if (PyList_Append(l, v) < 0) goto err; } return l; err: Py_DECREF(l); return NULL; } static PyObject * cc_lru_items(ccobject *self) { PyObject *l; CPersistentRing *here; if (self->ring_lock) { /* When the ring lock is held, we have no way of know which ring nodes belong to persistent objects, and which a placeholders. */ PyErr_SetString(PyExc_ValueError, ".lru_items() is unavailable during garbage collection"); return NULL; } l = PyList_New(0); if (l == NULL) return NULL; here = self->ring_home.r_next; while (here != &self->ring_home) { PyObject *v; cPersistentObject *object = OBJECT_FROM_RING(self, here); if (object == NULL) { Py_DECREF(l); return NULL; } v = Py_BuildValue("OO", object->oid, object); if (v == NULL) { Py_DECREF(l); return NULL; } if (PyList_Append(l, v) < 0) { Py_DECREF(v); Py_DECREF(l); return NULL; } Py_DECREF(v); here = here->r_next; } return l; } static void cc_oid_unreferenced(ccobject *self, PyObject *oid) { /* This is called by the persistent object deallocation function when the reference count on a persistent object reaches zero. We need to fix up our dictionary; its reference is now dangling because we stole its reference count. Be careful to not release the global interpreter lock until this is complete. */ PyObject *v; /* If the cache has been cleared by GC, data will be NULL. */ if (!self->data) return; v = PyDict_GetItem(self->data, oid); assert(v); assert(v->ob_refcnt == 0); /* Need to be very hairy here because a dictionary is about to decref an already deleted object. */ #ifdef Py_TRACE_REFS /* This is called from the deallocation function after the interpreter has untracked the reference. Track it again. */ _Py_NewReference(v); /* Don't increment total refcount as a result of the shenanigans played in this function. The _Py_NewReference() call above creates artificial references to v. */ _Py_RefTotal--; assert(v->ob_type); #else Py_INCREF(v); #endif assert(v->ob_refcnt == 1); /* Incremement the refcount again, because delitem is going to DECREF it. If it's refcount reached zero again, we'd call back to the dealloc function that called us. */ Py_INCREF(v); /* TODO: Should we call _Py_ForgetReference() on error exit? */ if (PyDict_DelItem(self->data, oid) < 0) return; Py_DECREF((ccobject *)((cPersistentObject *)v)->cache); ((cPersistentObject *)v)->cache = NULL; assert(v->ob_refcnt == 1); /* Undo the temporary resurrection. Don't DECREF the object, because this function is called from the object's dealloc function. If the refcnt reaches zero, it will all be invoked recursively. */ _Py_ForgetReference(v); } static PyObject * cc_ringlen(ccobject *self) { CPersistentRing *here; int c = 0; for (here = self->ring_home.r_next; here != &self->ring_home; here = here->r_next) c++; return INT_FROM_LONG(c); } static PyObject * cc_update_object_size_estimation(ccobject *self, PyObject *args) { PyObject *oid; cPersistentObject *v; unsigned int new_size; if (!PyArg_ParseTuple(args, "OI:updateObjectSizeEstimation", &oid, &new_size)) return NULL; /* Note: reference borrowed */ v = (cPersistentObject *)PyDict_GetItem(self->data, oid); if (v) { /* we know this object -- update our "total_size_estimation" we must only update when the object is in the ring */ if (v->ring.r_next) { self->total_estimated_size += _estimated_size_in_bytes( (int)(_estimated_size_in_24_bits(new_size)) - (int)(v->estimated_size) ); /* we do this in "Connection" as we need it even when the object is not in the cache (or not the ring) */ /* v->estimated_size = new_size; */ } } Py_RETURN_NONE; } static PyObject* cc_new_ghost(ccobject *self, PyObject *args) { PyObject *tmp, *key, *v; if (!PyArg_ParseTuple(args, "OO:new_ghost", &key, &v)) return NULL; /* Sanity check the value given to make sure it is allowed in the cache */ if (PyType_Check(v)) { /* Its a persistent class, such as a ZClass. Thats ok. */ } else if (v->ob_type->tp_basicsize < sizeof(cPersistentObject)) { /* If it's not an instance of a persistent class, (ie Python classes that derive from persistent.Persistent, BTrees, etc), report an error. TODO: checking sizeof() seems a poor test. */ PyErr_SetString(PyExc_TypeError, "Cache values must be persistent objects."); return NULL; } /* Can't access v->oid directly because the object might be a * persistent class. */ tmp = PyObject_GetAttr(v, py__p_oid); if (tmp == NULL) return NULL; Py_DECREF(tmp); if (tmp != Py_None) { PyErr_SetString(PyExc_AssertionError, "New ghost object must not have an oid"); return NULL; } /* useful sanity check, but not strictly an invariant of this class */ tmp = PyObject_GetAttr(v, py__p_jar); if (tmp == NULL) return NULL; Py_DECREF(tmp); if (tmp != Py_None) { PyErr_SetString(PyExc_AssertionError, "New ghost object must not have a jar"); return NULL; } tmp = PyDict_GetItem(self->data, key); if (tmp) { Py_DECREF(tmp); PyErr_SetString(PyExc_AssertionError, "The given oid is already in the cache"); return NULL; } if (PyType_Check(v)) { if (PyObject_SetAttr(v, py__p_jar, self->jar) < 0) return NULL; if (PyObject_SetAttr(v, py__p_oid, key) < 0) return NULL; if (PyDict_SetItem(self->data, key, v) < 0) return NULL; PyObject_GC_UnTrack((void *)self->data); self->klass_count++; } else { cPersistentObject *p = (cPersistentObject *)v; if(p->cache != NULL) { PyErr_SetString(PyExc_AssertionError, "Already in a cache"); return NULL; } if (PyDict_SetItem(self->data, key, v) < 0) return NULL; /* the dict should have a borrowed reference */ PyObject_GC_UnTrack((void *)self->data); Py_DECREF(v); Py_INCREF(self); p->cache = (PerCache *)self; Py_INCREF(self->jar); p->jar = self->jar; Py_INCREF(key); p->oid = key; p->state = cPersistent_GHOST_STATE; } Py_RETURN_NONE; } static struct PyMethodDef cc_methods[] = { {"items", (PyCFunction)cc_items, METH_NOARGS, "Return list of oid, object pairs for all items in cache."}, {"lru_items", (PyCFunction)cc_lru_items, METH_NOARGS, "List (oid, object) pairs from the lru list, as 2-tuples."}, {"klass_items", (PyCFunction)cc_klass_items, METH_NOARGS, "List (oid, object) pairs of cached persistent classes."}, {"full_sweep", (PyCFunction)cc_full_sweep, METH_VARARGS, "full_sweep() -- Perform a full sweep of the cache."}, {"minimize", (PyCFunction)cc_minimize, METH_VARARGS, "minimize([ignored]) -- Remove as many objects as possible\n\n" "Ghostify all objects that are not modified. Takes an optional\n" "argument, but ignores it."}, {"incrgc", (PyCFunction)cc_incrgc, METH_VARARGS, "incrgc() -- Perform incremental garbage collection\n\n" "This method had been depricated!" "Some other implementations support an optional parameter 'n' which\n" "indicates a repetition count; this value is ignored."}, {"invalidate", (PyCFunction)cc_invalidate, METH_O, "invalidate(oids) -- invalidate one, many, or all ids"}, {"get", (PyCFunction)cc_get, METH_VARARGS, "get(key [, default]) -- get an item, or a default"}, {"ringlen", (PyCFunction)cc_ringlen, METH_NOARGS, "ringlen() -- Returns number of non-ghost items in cache."}, {"debug_info", (PyCFunction)cc_debug_info, METH_NOARGS, "debug_info() -- Returns debugging data about objects in the cache."}, {"update_object_size_estimation", (PyCFunction)cc_update_object_size_estimation, METH_VARARGS, "update_object_size_estimation(oid, new_size) -- " "update the caches size estimation for *oid* " "(if this is known to the cache)."}, {"new_ghost", (PyCFunction)cc_new_ghost, METH_VARARGS, "new_ghost() -- Initialize a ghost and add it to the cache."}, {NULL, NULL} /* sentinel */ }; static int cc_init(ccobject *self, PyObject *args, PyObject *kwds) { int cache_size = 100; Py_ssize_t cache_size_bytes = 0; PyObject *jar; if (!PyArg_ParseTuple(args, "O|in", &jar, &cache_size, &cache_size_bytes)) return -1; self->jar = NULL; self->data = PyDict_New(); if (self->data == NULL) { Py_DECREF(self); return -1; } /* Untrack the dict mapping oids to objects. The dict contains uncounted references to ghost objects, so it isn't safe for GC to visit it. If GC finds an object with more referents that refcounts, it will die with an assertion failure. When the cache participates in GC, it will need to traverse the objects in the doubly-linked list, which will account for all the non-ghost objects. */ PyObject_GC_UnTrack((void *)self->data); self->jar = jar; Py_INCREF(jar); self->cache_size = cache_size; self->cache_size_bytes = cache_size_bytes; self->non_ghost_count = 0; self->total_estimated_size = 0; self->klass_count = 0; self->cache_drain_resistance = 0; self->ring_lock = 0; self->ring_home.r_next = &self->ring_home; self->ring_home.r_prev = &self->ring_home; return 0; } static void cc_dealloc(ccobject *self) { Py_XDECREF(self->data); Py_XDECREF(self->jar); PyObject_GC_Del(self); } static int cc_clear(ccobject *self) { Py_ssize_t pos = 0; PyObject *k, *v; /* Clearing the cache is delicate. A non-ghost object will show up in the ring and in the dict. If we deallocating the dict before clearing the ring, the GC will decref each object in the dict. Since the dict references are uncounted, this will lead to objects having negative refcounts. Freeing the non-ghost objects should eliminate many objects from the cache, but there may still be ghost objects left. It's not safe to decref the dict until it's empty, so we need to manually clear those out of the dict, too. We accomplish that by replacing all the ghost objects with None. */ /* We don't need to lock the ring, because the cache is unreachable. It should be impossible for anyone to be modifying the cache. */ assert(! self->ring_lock); while (self->ring_home.r_next != &self->ring_home) { CPersistentRing *here = self->ring_home.r_next; cPersistentObject *o = OBJECT_FROM_RING(self, here); if (o->cache) { Py_INCREF(o); /* account for uncounted reference */ if (PyDict_DelItem(self->data, o->oid) < 0) return -1; } o->cache = NULL; Py_DECREF(self); self->ring_home.r_next = here->r_next; o->ring.r_prev = NULL; o->ring.r_next = NULL; Py_DECREF(o); here = here->r_next; } Py_XDECREF(self->jar); while (PyDict_Next(self->data, &pos, &k, &v)) { Py_INCREF(v); if (PyDict_SetItem(self->data, k, Py_None) < 0) return -1; } Py_XDECREF(self->data); self->data = NULL; self->jar = NULL; return 0; } static int cc_traverse(ccobject *self, visitproc visit, void *arg) { int err; CPersistentRing *here; /* If we're in the midst of cleaning up old objects, the ring contains * assorted junk we must not pass on to the visit() callback. This * should be rare (our cleanup code would need to have called back * into Python, which in turn triggered Python's gc). When it happens, * simply don't chase any pointers. The cache will appear to be a * source of external references then, and at worst we miss cleaning * up a dead cycle until the next time Python's gc runs. */ if (self->ring_lock) return 0; #define VISIT(SLOT) \ if (SLOT) { \ err = visit((PyObject *)(SLOT), arg); \ if (err) \ return err; \ } VISIT(self->jar); here = self->ring_home.r_next; /* It is possible that an object is traversed after it is cleared. In that case, there is no ring. */ if (!here) return 0; while (here != &self->ring_home) { cPersistentObject *o = OBJECT_FROM_RING(self, here); VISIT(o); here = here->r_next; } #undef VISIT return 0; } static Py_ssize_t cc_length(ccobject *self) { return PyObject_Length(self->data); } static PyObject * cc_subscript(ccobject *self, PyObject *key) { PyObject *r; r = PyDict_GetItem(self->data, key); if (r == NULL) { PyErr_SetObject(PyExc_KeyError, key); return NULL; } Py_INCREF(r); return r; } static int cc_add_item(ccobject *self, PyObject *key, PyObject *v) { int result; PyObject *oid, *object_again, *jar; cPersistentObject *p; /* Sanity check the value given to make sure it is allowed in the cache */ if (PyType_Check(v)) { /* Its a persistent class, such as a ZClass. Thats ok. */ } else if (v->ob_type->tp_basicsize < sizeof(cPersistentObject)) { /* If it's not an instance of a persistent class, (ie Python classes that derive from persistent.Persistent, BTrees, etc), report an error. TODO: checking sizeof() seems a poor test. */ PyErr_SetString(PyExc_TypeError, "Cache values must be persistent objects."); return -1; } /* Can't access v->oid directly because the object might be a * persistent class. */ oid = PyObject_GetAttr(v, py__p_oid); if (oid == NULL) return -1; if (! PyBytes_Check(oid)) { Py_DECREF(oid); PyErr_Format(PyExc_TypeError, "Cached object oid must be bytes, not a %s", oid->ob_type->tp_name); return -1; } /* we know they are both strings. * now check if they are the same string. */ result = PyObject_RichCompareBool(key, oid, Py_NE); Py_DECREF(oid); if (result < 0) { return -1; } if (result) { PyErr_SetString(PyExc_ValueError, "Cache key does not match oid"); return -1; } /* useful sanity check, but not strictly an invariant of this class */ jar = PyObject_GetAttr(v, py__p_jar); if (jar == NULL) return -1; if (jar==Py_None) { Py_DECREF(jar); PyErr_SetString(PyExc_ValueError, "Cached object jar missing"); return -1; } Py_DECREF(jar); object_again = PyDict_GetItem(self->data, key); if (object_again) { if (object_again != v) { PyErr_SetString(PyExc_ValueError, "A different object already has the same oid"); return -1; } else { /* re-register under the same oid - no work needed */ return 0; } } if (PyType_Check(v)) { if (PyDict_SetItem(self->data, key, v) < 0) return -1; PyObject_GC_UnTrack((void *)self->data); self->klass_count++; return 0; } else { PerCache *cache = ((cPersistentObject *)v)->cache; if (cache) { if (cache != (PerCache *)self) /* This object is already in a different cache. */ PyErr_SetString(PyExc_ValueError, "Cache values may only be in one cache."); return -1; } /* else: This object is already one of ours, which is ok. It would be very strange if someone was trying to register the same object under a different key. */ } if (PyDict_SetItem(self->data, key, v) < 0) return -1; /* the dict should have a borrowed reference */ PyObject_GC_UnTrack((void *)self->data); Py_DECREF(v); p = (cPersistentObject *)v; Py_INCREF(self); p->cache = (PerCache *)self; if (p->state >= 0) { /* insert this non-ghost object into the ring just behind the home position. */ self->non_ghost_count++; ring_add(&self->ring_home, &p->ring); /* this list should have a new reference to the object */ Py_INCREF(v); } return 0; } static int cc_del_item(ccobject *self, PyObject *key) { PyObject *v; cPersistentObject *p; /* unlink this item from the ring */ v = PyDict_GetItem(self->data, key); if (v == NULL) { PyErr_SetObject(PyExc_KeyError, key); return -1; } if (PyType_Check(v)) { self->klass_count--; } else { p = (cPersistentObject *)v; if (p->state >= 0) { self->non_ghost_count--; ring_del(&p->ring); /* The DelItem below will account for the reference held by the list. */ } else { /* This is a ghost object, so we haven't kept a reference count on it. For it have stayed alive this long someone else must be keeping a reference to it. Therefore we need to temporarily give it back a reference count before calling DelItem below */ Py_INCREF(v); } Py_DECREF((PyObject *)p->cache); p->cache = NULL; } if (PyDict_DelItem(self->data, key) < 0) { PyErr_SetString(PyExc_RuntimeError, "unexpectedly couldn't remove key in cc_ass_sub"); return -1; } return 0; } static int cc_ass_sub(ccobject *self, PyObject *key, PyObject *v) { if (!PyBytes_Check(key)) { PyErr_Format(PyExc_TypeError, "cPickleCache key must be bytes, not a %s", key->ob_type->tp_name); return -1; } if (v) return cc_add_item(self, key, v); else return cc_del_item(self, key); } static PyMappingMethods cc_as_mapping = { (lenfunc)cc_length, /* mp_length */ (binaryfunc)cc_subscript, /* mp_subscript */ (objobjargproc)cc_ass_sub, /* mp_ass_subscript */ }; static PyObject * cc_cache_data(ccobject *self, void *context) { return PyDict_Copy(self->data); } static PyGetSetDef cc_getsets[] = { {"cache_data", (getter)cc_cache_data}, {NULL} }; static PyMemberDef cc_members[] = { {"cache_size", T_INT, offsetof(ccobject, cache_size)}, {"cache_size_bytes", T_PYSSIZET, offsetof(ccobject, cache_size_bytes)}, {"total_estimated_size", T_PYSSIZET, offsetof(ccobject, total_estimated_size), READONLY}, {"cache_drain_resistance", T_INT, offsetof(ccobject, cache_drain_resistance)}, {"cache_non_ghost_count", T_INT, offsetof(ccobject, non_ghost_count), READONLY}, {"cache_klass_count", T_INT, offsetof(ccobject, klass_count), READONLY}, {NULL} }; /* This module is compiled as a shared library. Some compilers don't allow addresses of Python objects defined in other libraries to be used in static initializers here. The DEFERRED_ADDRESS macro is used to tag the slots where such addresses appear; the module init function must fill in the tagged slots at runtime. The argument is for documentation -- the macro ignores it. */ #define DEFERRED_ADDRESS(ADDR) 0 static PyTypeObject Cctype = { PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "persistent.PickleCache", /* tp_name */ sizeof(ccobject), /* tp_basicsize */ 0, /* tp_itemsize */ (destructor)cc_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ 0, /* tp_repr */ 0, /* tp_as_number */ 0, /* tp_as_sequence */ &cc_as_mapping, /* tp_as_mapping */ 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ 0, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ 0, /* tp_doc */ (traverseproc)cc_traverse, /* tp_traverse */ (inquiry)cc_clear, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ cc_methods, /* tp_methods */ cc_members, /* tp_members */ cc_getsets, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ 0, /* tp_dictoffset */ (initproc)cc_init, /* tp_init */ }; #ifdef PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "cPickleCache", /* m_name */ cPickleCache_doc_string, /* m_doc */ -1, /* m_size */ NULL, /* m_methods */ NULL, /* m_reload */ NULL, /* m_traverse */ NULL, /* m_clear */ NULL, /* m_free */ }; #endif static PyObject* module_init(void) { PyObject *module; #ifdef PY3K ((PyObject*)&Cctype)->ob_type = &PyType_Type; #else Cctype.ob_type = &PyType_Type; #endif Cctype.tp_new = &PyType_GenericNew; if (PyType_Ready(&Cctype) < 0) { return NULL; } #ifdef PY3K module = PyModule_Create(&moduledef); #else module = Py_InitModule3("cPickleCache", NULL, cPickleCache_doc_string); #endif #ifdef PY3K capi = (cPersistenceCAPIstruct *)PyCapsule_Import(CAPI_CAPSULE_NAME, 0); #else capi = (cPersistenceCAPIstruct *)PyCObject_Import( "persistent.cPersistence", "CAPI"); #endif if (!capi) return NULL; capi->percachedel = (percachedelfunc)cc_oid_unreferenced; py__p_changed = INTERN("_p_changed"); if (!py__p_changed) return NULL; py__p_deactivate = INTERN("_p_deactivate"); if (!py__p_deactivate) return NULL; py__p_jar = INTERN("_p_jar"); if (!py__p_jar) return NULL; py__p_oid = INTERN("_p_oid"); if (!py__p_oid) return NULL; if (PyModule_AddStringConstant(module, "cache_variant", "stiff/c") < 0) return NULL; /* This leaks a reference to Cctype, but it doesn't matter. */ if (PyModule_AddObject(module, "PickleCache", (PyObject *)&Cctype) < 0) return NULL; return module; } #ifdef PY3K PyMODINIT_FUNC PyInit_cPickleCache(void) { return module_init(); } #else PyMODINIT_FUNC initcPickleCache(void) { module_init(); } #endif persistent-4.2.2/persistent/dict.py0000644000076600000240000000135712577543740017343 0ustar jimstaff00000000000000############################################################################## # # Copyright Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## # persistent.dict is deprecated. Use persistent.mapping from persistent.mapping import PersistentMapping as PersistentDict persistent-4.2.2/persistent/interfaces.py0000644000076600000240000004254012577543740020542 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Persistence Interfaces """ from zope.interface import Interface from zope.interface import Attribute # Allowed values for _p_state try: from persistent.cPersistence import GHOST from persistent.cPersistence import UPTODATE from persistent.cPersistence import CHANGED from persistent.cPersistence import STICKY except ImportError: #pragma NO COVER GHOST = -1 UPTODATE = 0 CHANGED = 1 STICKY = 2 OID_TYPE = SERIAL_TYPE = bytes class IPersistent(Interface): """Python persistent interface A persistent object can be in one of several states: - Unsaved The object has been created but not saved in a data manager. In this state, the _p_changed attribute is non-None and false and the _p_jar attribute is None. - Saved The object has been saved and has not been changed since it was saved. In this state, the _p_changed attribute is non-None and false and the _p_jar attribute is set to a data manager. - Sticky This state is identical to the saved state except that the object cannot transition to the ghost state. This is a special state used by C methods of persistent objects to make sure that state is not unloaded in the middle of computation. In this state, the _p_changed attribute is non-None and false and the _p_jar attribute is set to a data manager. There is no Python API for detecting whether an object is in the sticky state. - Changed The object has been changed. In this state, the _p_changed attribute is true and the _p_jar attribute is set to a data manager. - Ghost the object is in memory but its state has not been loaded from the database (or its state has been unloaded). In this state, the object doesn't contain any application data. In this state, the _p_changed attribute is None, and the _p_jar attribute is set to the data manager from which the object was obtained. In all the above, _p_oid (the persistent object id) is set when _p_jar first gets set. The following state transitions are possible: - Unsaved -> Saved This transition occurs when an object is saved in the database. This usually happens when an unsaved object is added to (e.g. as an attribute or item of) a saved (or changed) object and the transaction is committed. - Saved -> Changed Sticky -> Changed Ghost -> Changed This transition occurs when someone sets an attribute or sets _p_changed to a true value on a saved, sticky or ghost object. When the transition occurs, the persistent object is required to call the register() method on its data manager, passing itself as the only argument. Prior to ZODB 3.6, setting _p_changed to a true value on a ghost object was ignored (the object remained a ghost, and getting its _p_changed attribute continued to return None). - Saved -> Sticky This transition occurs when C code marks the object as sticky to prevent its deactivation. - Saved -> Ghost This transition occurs when a saved object is deactivated or invalidated. See discussion below. - Sticky -> Saved This transition occurs when C code unmarks the object as sticky to allow its deactivation. - Changed -> Saved This transition occurs when a transaction is committed. After saving the state of a changed object during transaction commit, the data manager sets the object's _p_changed to a non-None false value. - Changed -> Ghost This transition occurs when a transaction is aborted. All changed objects are invalidated by the data manager by an abort. - Ghost -> Saved This transition occurs when an attribute or operation of a ghost is accessed and the object's state is loaded from the database. Note that there is a separate C API that is not included here. The C API requires a specific data layout and defines the sticky state. About Invalidation, Deactivation and the Sticky & Ghost States The sticky state is intended to be a short-lived state, to prevent an object's state from being discarded while we're in C routines. It is an error to invalidate an object in the sticky state. Deactivation is a request that an object discard its state (become a ghost). Deactivation is an optimization, and a request to deactivate may be ignored. There are two equivalent ways to request deactivation: - call _p_deactivate() - set _p_changed to None There are two ways to invalidate an object: call the _p_invalidate() method (preferred) or delete its _p_changed attribute. This cannot be ignored, and is used when semantics require invalidation. Normally, an invalidated object transitions to the ghost state. However, some objects cannot be ghosts. When these objects are invalidated, they immediately reload their state from their data manager, and are then in the saved state. """ _p_jar = Attribute( """The data manager for the object. The data manager should implement IPersistentDataManager (note that this constraint is not enforced). If there is no data manager, then this is None. Once assigned to a data manager, an object cannot be re-assigned to another. """) _p_oid = Attribute( """The object id. It is up to the data manager to assign this. The special value None is reserved to indicate that an object id has not been assigned. Non-None object ids must be non-empty strings. The 8-byte string consisting of 8 NUL bytes ('\x00\x00\x00\x00\x00\x00\x00\x00') is reserved to identify the database root object. Once assigned an OID, an object cannot be re-assigned another. """) _p_changed = Attribute( """The persistent state of the object. This is one of: None -- The object is a ghost. false but not None -- The object is saved (or has never been saved). true -- The object has been modified since it was last saved. The object state may be changed by assigning or deleting this attribute; however, assigning None is ignored if the object is not in the saved state, and may be ignored even if the object is in the saved state. At and after ZODB 3.6, setting _p_changed to a true value for a ghost object activates the object; prior to 3.6, setting _p_changed to a true value on a ghost object was ignored. Note that an object can transition to the changed state only if it has a data manager. When such a state change occurs, the 'register' method of the data manager must be called, passing the persistent object. Deleting this attribute forces invalidation independent of existing state, although it is an error if the sticky state is current. """) _p_serial = Attribute( """The object serial number. This member is used by the data manager to distiguish distinct revisions of a given persistent object. This is an 8-byte string (not Unicode). """) _p_mtime = Attribute( """The object's modification time (read-only). This is a float, representing seconds since the epoch (as returned by time.time). """) _p_state = Attribute( """The object's persistence state token. Must be one of GHOST, UPTODATE, CHANGED, or STICKY. """) _p_estimated_size = Attribute( """An estimate of the object's size in bytes. May be set by the data manager. """) # Attribute access protocol def __getattribute__(name): """ Handle activating ghosts before returning an attribute value. "Special" attributes and '_p_*' attributes don't require activation. """ def __setattr__(name, value): """ Handle activating ghosts before setting an attribute value. "Special" attributes and '_p_*' attributes don't require activation. """ def __delattr__(name): """ Handle activating ghosts before deleting an attribute value. "Special" attributes and '_p_*' attributes don't require activation. """ # Pickling protocol. def __getstate__(): """Get the object data. The state should not include persistent attributes ("_p_name"). The result must be picklable. """ def __setstate__(state): """Set the object data. """ def __reduce__(): """Reduce an object to contituent parts for serialization. """ # Custom methods def _p_activate(): """Activate the object. Change the object to the saved state if it is a ghost. """ def _p_deactivate(): """Deactivate the object. Possibly change an object in the saved state to the ghost state. It may not be possible to make some persistent objects ghosts, and, for optimization reasons, the implementation may choose to keep an object in the saved state. """ def _p_invalidate(): """Invalidate the object. Invalidate the object. This causes any data to be thrown away, even if the object is in the changed state. The object is moved to the ghost state; further accesses will cause object data to be reloaded. """ def _p_getattr(name): """Test whether the base class must handle the name The method unghostifies the object, if necessary. The method records the object access, if necessary. This method should be called by subclass __getattribute__ implementations before doing anything else. If the method returns True, then __getattribute__ implementations must delegate to the base class, Persistent. """ def _p_setattr(name, value): """Save persistent meta data This method should be called by subclass __setattr__ implementations before doing anything else. If it returns true, then the attribute was handled by the base class. The method unghostifies the object, if necessary. The method records the object access, if necessary. """ def _p_delattr(name): """Delete persistent meta data This method should be called by subclass __delattr__ implementations before doing anything else. If it returns true, then the attribute was handled by the base class. The method unghostifies the object, if necessary. The method records the object access, if necessary. """ # TODO: document conflict resolution. class IPersistentDataManager(Interface): """Provide services for managing persistent state. This interface is used by a persistent object to interact with its data manager in the context of a transaction. """ _cache = Attribute("The pickle cache associated with this connection.") def setstate(object): """Load the state for the given object. The object should be in the ghost state. The object's state will be set and the object will end up in the saved state. The object must provide the IPersistent interface. """ def oldstate(obj, tid): """Return copy of 'obj' that was written by transaction 'tid'. The returned object does not have the typical metadata (_p_jar, _p_oid, _p_serial) set. I'm not sure how references to other peristent objects are handled. Parameters obj: a persistent object from this Connection. tid: id of a transaction that wrote an earlier revision. Raises KeyError if tid does not exist or if tid deleted a revision of obj. """ def register(object): """Register an IPersistent with the current transaction. This method must be called when the object transitions to the changed state. A subclass could override this method to customize the default policy of one transaction manager for each thread. """ # Maybe later: ## def mtime(object): ## """Return the modification time of the object. ## The modification time may not be known, in which case None ## is returned. If non-None, the return value is the kind of ## timestamp supplied by Python's time.time(). ## """ class IPickleCache(Interface): """ API of the cache for a ZODB connection. """ def __getitem__(oid): """ -> the persistent object for OID. o Raise KeyError if not found. """ def __setitem__(oid, value): """ Save the persistent object under OID. o 'oid' must be a string, else raise ValueError. o Raise KeyError on duplicate """ def __delitem__(oid): """ Remove the persistent object for OID. o 'oid' must be a string, else raise ValueError. o Raise KeyError if not found. """ def get(oid, default=None): """ -> the persistent object for OID. o Return 'default' if not found. """ def mru(oid): """ Move the element corresonding to 'oid' to the head. o Raise KeyError if no element is found. """ def __len__(): """ -> the number of OIDs in the cache. """ def items(): """-> a sequence of tuples (oid, value) for cached objects. o Only includes items in 'data' (no p-classes). """ def ringlen(): """ -> the number of persistent objects in the ring. o Only includes items in the ring (no ghosts or p-classes). """ def lru_items(): """ -> a sequence of tuples (oid, value) for cached objects. o Tuples will be in LRU order. o Only includes items in the ring (no ghosts or p-classes). """ def klass_items(): """-> a sequence of tuples (oid, value) for cached p-classes. o Only includes persistent classes. """ def incrgc(): """ Perform an incremental garbage collection sweep. o Reduce number of non-ghosts to 'cache_size', if possible. o Ghostify in LRU order. o Skip dirty or sticky objects. o Quit once we get down to 'cache_size'. """ def full_sweep(): """ Perform a full garbage collection sweep. o Reduce number of non-ghosts to 0, if possible. o Ghostify all non-sticky / non-changed objecs. """ def minimize(): """ Alias for 'full_sweep'. o XXX? """ def new_ghost(oid, obj): """ Add the given (ghost) object to the cache. Also, set its _p_jar and _p_oid, and ensure it is in the GHOST state. If the object doesn't define '_p_oid' / '_p_jar', raise. If the object's '_p_oid' is not None, raise. If the object's '_p_jar' is not None, raise. If 'oid' is already in the cache, raise. """ def reify(to_reify): """ Reify the indicated objects. o If 'to_reify' is a string, treat it as an OID. o Otherwise, iterate over it as a sequence of OIDs. o For each OID, if present in 'data' and in GHOST state: o Call '_p_activate' on the object. o Add it to the ring. o If any OID is present but not in GHOST state, skip it. o Raise KeyErrory if any OID is not present. """ def invalidate(to_invalidate): """ Invalidate the indicated objects. o If 'to_invalidate' is a string, treat it as an OID. o Otherwise, iterate over it as a sequence of OIDs. o Any OID corresponding to a p-class will cause the corresponding p-class to be removed from the cache. o For all other OIDs, ghostify the corrsponding object and remove it from the ring. """ def debug_info(): """Return debugging data about objects in the cache. o Return a sequence of tuples, (oid, refcount, typename, state). """ def update_object_size_estimation(oid, new_size): """Update the cache's size estimation for 'oid', if known to the cache. """ cache_size = Attribute('Target size of the cache') cache_drain_resistance = Attribute('Factor for draining cache below ' 'target size') cache_non_ghost_count = Attribute('Number of non-ghosts in the cache ' '(XXX how is it different from ' 'ringlen?') cache_data = Attribute("Property: copy of our 'data' dict") cache_klass_count = Attribute("Property: len of 'persistent_classes'") persistent-4.2.2/persistent/list.py0000644000076600000240000000567012577543740017375 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE # ############################################################################## """Python implementation of persistent list. $Id$""" import persistent from persistent._compat import UserList from persistent._compat import PYTHON2 class PersistentList(UserList, persistent.Persistent): __super_setitem = UserList.__setitem__ __super_delitem = UserList.__delitem__ if PYTHON2: # pragma: no cover __super_setslice = UserList.__setslice__ __super_delslice = UserList.__delslice__ __super_iadd = UserList.__iadd__ __super_imul = UserList.__imul__ __super_append = UserList.append __super_insert = UserList.insert __super_pop = UserList.pop __super_remove = UserList.remove __super_reverse = UserList.reverse __super_sort = UserList.sort __super_extend = UserList.extend def __setitem__(self, i, item): self.__super_setitem(i, item) self._p_changed = 1 def __delitem__(self, i): self.__super_delitem(i) self._p_changed = 1 def __setslice__(self, i, j, other): self.__super_setslice(i, j, other) self._p_changed = 1 def __delslice__(self, i, j): self.__super_delslice(i, j) self._p_changed = 1 def __iadd__(self, other): L = self.__super_iadd(other) self._p_changed = 1 return L def __imul__(self, n): L = self.__super_imul(n) self._p_changed = 1 return L def append(self, item): self.__super_append(item) self._p_changed = 1 def insert(self, i, item): self.__super_insert(i, item) self._p_changed = 1 def pop(self, i=-1): rtn = self.__super_pop(i) self._p_changed = 1 return rtn def remove(self, item): self.__super_remove(item) self._p_changed = 1 def reverse(self): self.__super_reverse() self._p_changed = 1 def sort(self, *args, **kwargs): self.__super_sort(*args, **kwargs) self._p_changed = 1 def extend(self, other): self.__super_extend(other) self._p_changed = 1 # This works around a bug in Python 2.1.x (up to 2.1.2 at least) where the # __cmp__ bogusly raises a RuntimeError, and because this is an extension # class, none of the rich comparison stuff works anyway. def __cmp__(self, other): return cmp(self.data, self._UserList__cast(other)) persistent-4.2.2/persistent/mapping.py0000644000076600000240000000657012577543740020055 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE # ############################################################################## """Python implementation of persistent base types $Id$""" import persistent from persistent._compat import IterableUserDict class default(object): def __init__(self, func): self.func = func def __get__(self, inst, class_): if inst is None: return self return self.func(inst) class PersistentMapping(IterableUserDict, persistent.Persistent): """A persistent wrapper for mapping objects. This class allows wrapping of mapping objects so that object changes are registered. As a side effect, mapping objects may be subclassed. A subclass of PersistentMapping or any code that adds new attributes should not create an attribute named _container. This is reserved for backwards compatibility reasons. """ # UserDict provides all of the mapping behavior. The # PersistentMapping class is responsible marking the persistent # state as changed when a method actually changes the state. At # the mapping API evolves, we may need to add more methods here. __super_delitem = IterableUserDict.__delitem__ __super_setitem = IterableUserDict.__setitem__ __super_clear = IterableUserDict.clear __super_update = IterableUserDict.update __super_setdefault = IterableUserDict.setdefault __super_pop = IterableUserDict.pop __super_popitem = IterableUserDict.popitem def __delitem__(self, key): self.__super_delitem(key) self._p_changed = 1 def __setitem__(self, key, v): self.__super_setitem(key, v) self._p_changed = 1 def clear(self): self.__super_clear() self._p_changed = 1 def update(self, b): self.__super_update(b) self._p_changed = 1 def setdefault(self, key, failobj=None): # We could inline all of UserDict's implementation into the # method here, but I'd rather not depend at all on the # implementation in UserDict (simple as it is). if not key in self.data: self._p_changed = 1 return self.__super_setdefault(key, failobj) def pop(self, key, *args): self._p_changed = 1 return self.__super_pop(key, *args) def popitem(self): self._p_changed = 1 return self.__super_popitem() # Old implementations used _container rather than data. # Use a descriptor to provide data when we have _container instead @default def data(self): # We don't want to cause a write on read, so wer're careful not to # do anything that would cause us to become marked as changed, however, # if we're modified, then the saved record will have data, not # _container. data = self.__dict__.pop('_container') self.__dict__['data'] = data return data persistent-4.2.2/persistent/persistence.py0000644000076600000240000004670113017350405020726 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2011 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import sys from zope.interface import implementer from persistent.interfaces import IPersistent from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE from persistent.interfaces import CHANGED from persistent.interfaces import STICKY from persistent.interfaces import OID_TYPE from persistent.interfaces import SERIAL_TYPE from persistent.timestamp import TimeStamp from persistent.timestamp import _ZERO from persistent._compat import copy_reg from persistent._compat import intern from . import ring _INITIAL_SERIAL = _ZERO # Bitwise flags _CHANGED = 0x0001 _STICKY = 0x0002 _OGA = object.__getattribute__ _OSA = object.__setattr__ # These names can be used from a ghost without causing it to be # activated. These are standardized with the C implementation SPECIAL_NAMES = ('__class__', '__del__', '__dict__', '__of__', '__setstate__',) # And this is an implementation detail of this class; it holds # the standard names plus the slot names, allowing for just one # check in __getattribute__ _SPECIAL_NAMES = set(SPECIAL_NAMES) @implementer(IPersistent) class Persistent(object): """ Pure Python implmentation of Persistent base class """ __slots__ = ('__jar', '__oid', '__serial', '__flags', '__size', '__ring',) def __new__(cls, *args, **kw): inst = super(Persistent, cls).__new__(cls) # We bypass the __setattr__ implementation of this object # at __new__ time, just like the C implementation does. This # makes us compatible with subclasses that want to access # properties like _p_changed in their setattr implementation _OSA(inst, '_Persistent__jar', None) _OSA(inst, '_Persistent__oid', None) _OSA(inst, '_Persistent__serial', None) _OSA(inst, '_Persistent__flags', None) _OSA(inst, '_Persistent__size', 0) _OSA(inst, '_Persistent__ring', None) return inst # _p_jar: see IPersistent. def _get_jar(self): return _OGA(self, '_Persistent__jar') def _set_jar(self, value): jar = _OGA(self, '_Persistent__jar') if self._p_is_in_cache(jar) and value is not None and jar != value: # The C implementation only forbids changing the jar # if we're already in a cache. Match its error message raise ValueError('can not change _p_jar of cached object') if _OGA(self, '_Persistent__jar') != value: _OSA(self, '_Persistent__jar', value) _OSA(self, '_Persistent__flags', 0) def _del_jar(self): jar = _OGA(self, '_Persistent__jar') if jar is not None: if self._p_is_in_cache(jar): raise ValueError("can't delete _p_jar of cached object") _OSA(self, '_Persistent__jar', None) _OSA(self, '_Persistent__flags', None) _p_jar = property(_get_jar, _set_jar, _del_jar) # _p_oid: see IPersistent. def _get_oid(self): return _OGA(self, '_Persistent__oid') def _set_oid(self, value): if value == _OGA(self, '_Persistent__oid'): return # The C implementation allows *any* value to be # used as the _p_oid. #if value is not None: # if not isinstance(value, OID_TYPE): # raise ValueError('Invalid OID type: %s' % value) # The C implementation only forbids changing the OID # if we're in a cache, regardless of what the current # value or jar is if self._p_is_in_cache(): # match the C error message raise ValueError('can not change _p_oid of cached object') _OSA(self, '_Persistent__oid', value) def _del_oid(self): jar = _OGA(self, '_Persistent__jar') oid = _OGA(self, '_Persistent__oid') if jar is not None: if oid and jar._cache.get(oid): raise ValueError('Cannot delete _p_oid of cached object') _OSA(self, '_Persistent__oid', None) _p_oid = property(_get_oid, _set_oid, _del_oid) # _p_serial: see IPersistent. def _get_serial(self): serial = _OGA(self, '_Persistent__serial') if serial is not None: return serial return _INITIAL_SERIAL def _set_serial(self, value): if not isinstance(value, SERIAL_TYPE): raise ValueError('Invalid SERIAL type: %s' % value) if len(value) != 8: raise ValueError('SERIAL must be 8 octets') _OSA(self, '_Persistent__serial', value) def _del_serial(self): _OSA(self, '_Persistent__serial', None) _p_serial = property(_get_serial, _set_serial, _del_serial) # _p_changed: see IPersistent. def _get_changed(self): if _OGA(self, '_Persistent__jar') is None: return False flags = _OGA(self, '_Persistent__flags') if flags is None: # ghost return None return bool(flags & _CHANGED) def _set_changed(self, value): if _OGA(self, '_Persistent__flags') is None: if value: self._p_activate() self._p_set_changed_flag(value) else: if value is None: # -> ghost self._p_deactivate() else: self._p_set_changed_flag(value) def _del_changed(self): self._p_invalidate() _p_changed = property(_get_changed, _set_changed, _del_changed) # _p_mtime def _get_mtime(self): # The C implementation automatically unghostifies the object # when _p_mtime is accessed. self._p_activate() self._p_accessed() serial = _OGA(self, '_Persistent__serial') if serial is not None: ts = TimeStamp(serial) return ts.timeTime() _p_mtime = property(_get_mtime) # _p_state def _get_state(self): # Note the use of OGA and caching to avoid recursive calls to __getattribute__: # __getattribute__ calls _p_accessed calls cache.mru() calls _p_state if _OGA(self, '_Persistent__jar') is None: return UPTODATE flags = _OGA(self, '_Persistent__flags') if flags is None: return GHOST if flags & _CHANGED: result = CHANGED else: result = UPTODATE if flags & _STICKY: return STICKY return result _p_state = property(_get_state) # _p_estimated_size: XXX don't want to reserve the space? def _get_estimated_size(self): return _OGA(self, '_Persistent__size') * 64 def _set_estimated_size(self, value): if isinstance(value, int): if value < 0: raise ValueError('_p_estimated_size must not be negative') _OSA(self, '_Persistent__size', _estimated_size_in_24_bits(value)) else: raise TypeError("_p_estimated_size must be an integer") def _del_estimated_size(self): _OSA(self, '_Persistent__size', 0) _p_estimated_size = property( _get_estimated_size, _set_estimated_size, _del_estimated_size) # The '_p_sticky' property is not (yet) part of the API: for now, # it exists to simplify debugging and testing assertions. def _get_sticky(self): flags = _OGA(self, '_Persistent__flags') if flags is None: return False return bool(flags & _STICKY) def _set_sticky(self, value): flags = _OGA(self, '_Persistent__flags') if flags is None: raise ValueError('Ghost') if value: flags |= _STICKY else: flags &= ~_STICKY _OSA(self, '_Persistent__flags', flags) _p_sticky = property(_get_sticky, _set_sticky) # The '_p_status' property is not (yet) part of the API: for now, # it exists to simplify debugging and testing assertions. def _get_status(self): if _OGA(self, '_Persistent__jar') is None: return 'unsaved' flags = _OGA(self, '_Persistent__flags') if flags is None: return 'ghost' if flags & _STICKY: return 'sticky' if flags & _CHANGED: return 'changed' return 'saved' _p_status = property(_get_status) # Methods from IPersistent. def __getattribute__(self, name): """ See IPersistent. """ oga = _OGA if (not name.startswith('_p_') and name not in _SPECIAL_NAMES): if oga(self, '_Persistent__flags') is None: oga(self, '_p_activate')() oga(self, '_p_accessed')() return oga(self, name) def __setattr__(self, name, value): special_name = (name in _SPECIAL_NAMES or name.startswith('_p_')) volatile = name.startswith('_v_') if not special_name: if _OGA(self, '_Persistent__flags') is None: _OGA(self, '_p_activate')() if not volatile: _OGA(self, '_p_accessed')() _OSA(self, name, value) if (_OGA(self, '_Persistent__jar') is not None and _OGA(self, '_Persistent__oid') is not None and not special_name and not volatile): before = _OGA(self, '_Persistent__flags') after = before | _CHANGED if before != after: _OSA(self, '_Persistent__flags', after) _OGA(self, '_p_register')() def __delattr__(self, name): special_name = (name in _SPECIAL_NAMES or name.startswith('_p_')) if not special_name: if _OGA(self, '_Persistent__flags') is None: _OGA(self, '_p_activate')() _OGA(self, '_p_accessed')() before = _OGA(self, '_Persistent__flags') after = before | _CHANGED if before != after: _OSA(self, '_Persistent__flags', after) if (_OGA(self, '_Persistent__jar') is not None and _OGA(self, '_Persistent__oid') is not None): _OGA(self, '_p_register')() object.__delattr__(self, name) def _slotnames(self, _v_exclude=True): slotnames = copy_reg._slotnames(type(self)) return [x for x in slotnames if not x.startswith('_p_') and not (x.startswith('_v_') and _v_exclude) and not x.startswith('_Persistent__') and x not in Persistent.__slots__] def __getstate__(self): """ See IPersistent. """ idict = getattr(self, '__dict__', None) slotnames = self._slotnames() if idict is not None: d = dict([x for x in idict.items() if not x[0].startswith('_p_') and not x[0].startswith('_v_')]) else: d = None if slotnames: s = {} for slotname in slotnames: value = getattr(self, slotname, self) if value is not self: s[slotname] = value return d, s return d def __setstate__(self, state): """ See IPersistent. """ if isinstance(state,tuple): inst_dict, slots = state else: inst_dict, slots = state, () idict = getattr(self, '__dict__', None) if inst_dict is not None: if idict is None: raise TypeError('No instance dict') idict.clear() for k, v in inst_dict.items(): # Normally the keys for instance attributes are interned. # Do that here, but only if it is possible to do so. idict[intern(k) if type(k) is str else k] = v slotnames = self._slotnames() if slotnames: for k, v in slots.items(): setattr(self, k, v) def __reduce__(self): """ See IPersistent. """ gna = getattr(self, '__getnewargs__', lambda: ()) return (copy_reg.__newobj__, (type(self),) + gna(), self.__getstate__()) def _p_activate(self): """ See IPersistent. """ oga = _OGA before = oga(self, '_Persistent__flags') if before is None: # Only do this if we're a ghost # Begin by marking up-to-date in case we bail early _OSA(self, '_Persistent__flags', 0) jar = oga(self, '_Persistent__jar') if jar is None: return oid = oga(self, '_Persistent__oid') if oid is None: return # If we're actually going to execute a set-state, # mark as changed to prevent any recursive call # (actually, our earlier check that we're a ghost should # prevent this, but the C implementation sets it to changed # while calling jar.setstate, and this is observable to clients). # The main point of this is to prevent changes made during # setstate from registering the object with the jar. _OSA(self, '_Persistent__flags', CHANGED) try: jar.setstate(self) except: _OSA(self, '_Persistent__flags', before) raise else: # If we succeed, no matter what the implementation # of setstate did, mark ourself as up-to-date. The # C implementation unconditionally does this. _OSA(self, '_Persistent__flags', 0) # up-to-date # In the C implementation, _p_invalidate winds up calling # _p_deactivate. There are ZODB tests that depend on this; # it's not documented but there may be code in the wild # that does as well def _p_deactivate(self): """ See IPersistent. """ flags = _OGA(self, '_Persistent__flags') if flags is not None and not flags: self._p_invalidate_deactivate_helper() def _p_invalidate(self): """ See IPersistent. """ # If we think we have changes, we must pretend # like we don't so that deactivate does its job _OSA(self, '_Persistent__flags', 0) self._p_deactivate() def _p_invalidate_deactivate_helper(self): jar = _OGA(self, '_Persistent__jar') if jar is None: return if _OGA(self, '_Persistent__flags') is not None: _OSA(self, '_Persistent__flags', None) idict = getattr(self, '__dict__', None) if idict is not None: idict.clear() type_ = type(self) # ( for backward-compatibility reason we release __slots__ only if # class does not override __new__ ) if type_.__new__ is Persistent.__new__: for slotname in Persistent._slotnames(self, _v_exclude=False): try: getattr(type_, slotname).__delete__(self) except AttributeError: # AttributeError means slot variable was not initialized at all - # - we can simply skip its deletion. pass # Implementation detail: deactivating/invalidating # updates the size of the cache (if we have one) # by telling it this object no longer takes any bytes # (-1 is a magic number to compensate for the implementation, # which always adds one to the size given) try: cache = jar._cache except AttributeError: pass else: cache.update_object_size_estimation(_OGA(self, '_Persistent__oid'), -1) # See notes in PickleCache.sweep for why we have to do this cache._persistent_deactivate_ran = True def _p_getattr(self, name): """ See IPersistent. """ if name.startswith('_p_') or name in _SPECIAL_NAMES: return True self._p_activate() self._p_accessed() return False def _p_setattr(self, name, value): """ See IPersistent. """ if name.startswith('_p_'): setattr(self, name, value) return True self._p_activate() self._p_accessed() return False def _p_delattr(self, name): """ See IPersistent. """ if name.startswith('_p_'): delattr(self, name) return True self._p_activate() self._p_accessed() return False # Helper methods: not APIs: we name them with '_p_' to bypass # the __getattribute__ bit which bumps the cache. def _p_register(self): jar = _OGA(self, '_Persistent__jar') if jar is not None and _OGA(self, '_Persistent__oid') is not None: jar.register(self) def _p_set_changed_flag(self, value): if value: before = _OGA(self, '_Persistent__flags') after = before | _CHANGED if before != after: self._p_register() _OSA(self, '_Persistent__flags', after) else: flags = _OGA(self, '_Persistent__flags') flags &= ~_CHANGED _OSA(self, '_Persistent__flags', flags) def _p_accessed(self): # Notify the jar's pickle cache that we have been accessed. # This relies on what has been (until now) an implementation # detail, the '_cache' attribute of the jar. We made it a # private API to avoid the cycle of keeping a reference to # the cache on the persistent object. # The below is the equivalent of this, but avoids # several recursive through __getattribute__, especially for _p_state, # and benchmarks much faster # # if(self.__jar is None or # self.__oid is None or # self._p_state < 0 ): return oga = _OGA jar = oga(self, '_Persistent__jar') if jar is None: return oid = oga(self, '_Persistent__oid') if oid is None: return flags = oga(self, '_Persistent__flags') if flags is None: # ghost return # The KeyError arises in ZODB: ZODB.serialize.ObjectWriter # can assign a jar and an oid to newly seen persistent objects, # but because they are newly created, they aren't in the # pickle cache yet. There doesn't seem to be a way to distinguish # that at this level, all we can do is catch it. # The AttributeError arises in ZODB test cases try: jar._cache.mru(oid) except (AttributeError,KeyError): pass def _p_is_in_cache(self, jar=None): oid = _OGA(self, '_Persistent__oid') if not oid: return False jar = jar or _OGA(self, '_Persistent__jar') cache = getattr(jar, '_cache', None) if cache is not None: return cache.get(oid) is self def _estimated_size_in_24_bits(value): if value > 1073741696: return 16777215 return (value//64) + 1 _SPECIAL_NAMES.update([intern('_Persistent' + x) for x in Persistent.__slots__]) persistent-4.2.2/persistent/picklecache.py0000644000076600000240000003666112577543740020661 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2009 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import gc import weakref import sys from zope.interface import implementer from persistent.interfaces import GHOST from persistent.interfaces import IPickleCache from persistent.interfaces import OID_TYPE from persistent.interfaces import UPTODATE from persistent import Persistent from persistent.persistence import _estimated_size_in_24_bits # Tests may modify this to add additional types _CACHEABLE_TYPES = (type, Persistent) _SWEEPABLE_TYPES = (Persistent,) # The Python PickleCache implementation keeps track of the objects it # is caching in a WeakValueDictionary. The number of objects in the # cache (in this dictionary) is exposed as the len of the cache. Under # non-refcounted implementations like PyPy, the weak references in # this dictionary are only cleared when the garbage collector runs. # Thus, after an incrgc, the len of the cache is incorrect for some # period of time unless we ask the GC to run. # Furthermore, evicted objects can stay in the dictionary and be returned # from __getitem__ or possibly conflict with a new item in __setitem__. # We determine whether or not we need to do the GC based on the ability # to get a reference count: PyPy and Jython don't use refcounts and don't # expose this; this is safer than blacklisting specific platforms (e.g., # what about IronPython?). On refcounted platforms, we don't want to # run a GC to avoid possible performance regressions (e.g., it may # block all threads). # Tests may modify this _SWEEP_NEEDS_GC = not hasattr(sys, 'getrefcount') # On Jython, we need to explicitly ask it to monitor # objects if we want a more deterministic GC if hasattr(gc, 'monitorObject'): # pragma: no cover _gc_monitor = gc.monitorObject else: def _gc_monitor(o): pass _OGA = object.__getattribute__ def _sweeping_ring(f): # A decorator for functions in the PickleCache # that are sweeping the entire ring (mutating it); # serves as a pseudo-lock to not mutate the ring further # in other functions def locked(self, *args, **kwargs): self._is_sweeping_ring = True try: return f(self, *args, **kwargs) finally: self._is_sweeping_ring = False return locked from .ring import Ring @implementer(IPickleCache) class PickleCache(object): total_estimated_size = 0 cache_size_bytes = 0 # Set by functions that sweep the entire ring (via _sweeping_ring) # Serves as a pseudo-lock _is_sweeping_ring = False def __init__(self, jar, target_size=0, cache_size_bytes=0): # TODO: forward-port Dieter's bytes stuff self.jar = jar # We expect the jars to be able to have a pointer to # us; this is a reference cycle, but certain # aspects of invalidation and accessing depend on it. # The actual Connection objects we're used with do set this # automatically, but many test objects don't. # TODO: track this on the persistent objects themself? try: jar._cache = self except AttributeError: # Some ZODB tests pass in an object that cannot have an _cache pass self.cache_size = target_size self.drain_resistance = 0 self.non_ghost_count = 0 self.persistent_classes = {} self.data = weakref.WeakValueDictionary() self.ring = Ring() self.cache_size_bytes = cache_size_bytes # IPickleCache API def __len__(self): """ See IPickleCache. """ return (len(self.persistent_classes) + len(self.data)) def __getitem__(self, oid): """ See IPickleCache. """ value = self.data.get(oid) if value is not None: return value return self.persistent_classes[oid] def __setitem__(self, oid, value): """ See IPickleCache. """ # The order of checks matters for C compatibility; # the ZODB tests depend on this # The C impl requires either a type or a Persistent subclass if not isinstance(value, _CACHEABLE_TYPES): raise TypeError("Cache values must be persistent objects.") value_oid = value._p_oid if not isinstance(oid, OID_TYPE) or not isinstance(value_oid, OID_TYPE): raise TypeError('OID must be %s: key=%s _p_oid=%s' % (OID_TYPE, oid, value_oid)) if value_oid != oid: raise ValueError("Cache key does not match oid") # XXX if oid in self.persistent_classes or oid in self.data: # Have to be careful here, a GC might have just run # and cleaned up the object existing_data = self.get(oid) if existing_data is not None and existing_data is not value: # Raise the same type of exception as the C impl with the same # message. raise ValueError('A different object already has the same oid') # Match the C impl: it requires a jar jar = getattr(value, '_p_jar', None) if jar is None and not isinstance(value, type): raise ValueError("Cached object jar missing") # It also requires that it cannot be cached more than one place existing_cache = getattr(jar, '_cache', None) if (existing_cache is not None and existing_cache is not self and existing_cache.data.get(oid) is not None): raise ValueError("Object already in another cache") if isinstance(value, type): # ZODB.persistentclass.PersistentMetaClass self.persistent_classes[oid] = value else: self.data[oid] = value _gc_monitor(value) if _OGA(value, '_p_state') != GHOST and value not in self.ring: self.ring.add(value) self.non_ghost_count += 1 def __delitem__(self, oid): """ See IPickleCache. """ if not isinstance(oid, OID_TYPE): raise TypeError('OID must be %s: %s' % (OID_TYPE, oid)) if oid in self.persistent_classes: del self.persistent_classes[oid] else: value = self.data.pop(oid) self.ring.delete(value) def get(self, oid, default=None): """ See IPickleCache. """ value = self.data.get(oid, self) if value is not self: return value return self.persistent_classes.get(oid, default) def mru(self, oid): """ See IPickleCache. """ if self._is_sweeping_ring: # accessess during sweeping, such as with an # overridden _p_deactivate, don't mutate the ring # because that could leave it inconsistent return False # marker return for tests value = self.data[oid] was_in_ring = value in self.ring if not was_in_ring: if _OGA(value, '_p_state') != GHOST: self.ring.add(value) self.non_ghost_count += 1 else: self.ring.move_to_head(value) def ringlen(self): """ See IPickleCache. """ return len(self.ring) def items(self): """ See IPickleCache. """ return self.data.items() def lru_items(self): """ See IPickleCache. """ result = [] for obj in self.ring: result.append((obj._p_oid, obj)) return result def klass_items(self): """ See IPickleCache. """ return self.persistent_classes.items() def incrgc(self, ignored=None): """ See IPickleCache. """ target = self.cache_size if self.drain_resistance >= 1: size = self.non_ghost_count target2 = size - 1 - (size // self.drain_resistance) if target2 < target: target = target2 # return value for testing return self._sweep(target, self.cache_size_bytes) def full_sweep(self, target=None): """ See IPickleCache. """ # return value for testing return self._sweep(0) minimize = full_sweep def new_ghost(self, oid, obj): """ See IPickleCache. """ if obj._p_oid is not None: raise ValueError('Object already has oid') if obj._p_jar is not None: raise ValueError('Object already has jar') if oid in self.persistent_classes or oid in self.data: raise KeyError('Duplicate OID: %s' % oid) obj._p_oid = oid obj._p_jar = self.jar if not isinstance(obj, type): if obj._p_state != GHOST: # The C implementation sets this stuff directly, # but we delegate to the class. However, we must be # careful to avoid broken _p_invalidate and _p_deactivate # that don't call the super class. See ZODB's # testConnection.doctest_proper_ghost_initialization_with_empty__p_deactivate obj._p_invalidate_deactivate_helper() self[oid] = obj def reify(self, to_reify): """ See IPickleCache. """ if isinstance(to_reify, OID_TYPE): #bytes to_reify = [to_reify] for oid in to_reify: value = self[oid] if value._p_state == GHOST: value._p_activate() self.non_ghost_count += 1 self.mru(oid) def invalidate(self, to_invalidate): """ See IPickleCache. """ if isinstance(to_invalidate, OID_TYPE): self._invalidate(to_invalidate) else: for oid in to_invalidate: self._invalidate(oid) def debug_info(self): result = [] for oid, klass in self.persistent_classes.items(): result.append((oid, len(gc.get_referents(klass)), type(klass).__name__, klass._p_state, )) for oid, value in self.data.items(): result.append((oid, len(gc.get_referents(value)), type(value).__name__, value._p_state, )) return result def update_object_size_estimation(self, oid, new_size): """ See IPickleCache. """ value = self.data.get(oid) if value is not None: # Recall that while the argument is given in bytes, # we have to work with 64-block chunks (plus one) # to match the C implementation. Hence the convoluted # arithmetic new_size_in_24 = _estimated_size_in_24_bits(new_size) p_est_size_in_24 = value._Persistent__size new_est_size_in_bytes = (new_size_in_24 - p_est_size_in_24) * 64 self.total_estimated_size += new_est_size_in_bytes cache_drain_resistance = property(lambda self: self.drain_resistance) cache_non_ghost_count = property(lambda self: self.non_ghost_count) cache_data = property(lambda self: dict(self.data.items())) cache_klass_count = property(lambda self: len(self.persistent_classes)) # Helpers # Set to true when a deactivation happens in our code. For # compatibility with the C implementation, we can only remove the # node and decrement our non-ghost count if our implementation # actually runs (broken subclasses can forget to call super; ZODB # has tests for this). This gets set to false everytime we examine # a node and checked afterwards. The C implementation has a very # incestuous relationship between cPickleCache and cPersistence: # the pickle cache calls _p_deactivate, which is responsible for # both decrementing the non-ghost count and removing its node from # the cache ring (and, if it gets deallocated, from the pickle # cache's dictionary). We're trying to keep that to a minimum, but # there's no way around it if we want full compatibility. _persistent_deactivate_ran = False @_sweeping_ring def _sweep(self, target, target_size_bytes=0): # To avoid mutating datastructures in place or making a copy, # and to work efficiently with both the CFFI ring and the # deque-based ring, we collect the objects and their indexes # up front and then hand them off for ejection. # We don't use enumerate because that's slow under PyPy i = -1 to_eject = [] for value in self.ring: if self.non_ghost_count <= target and (self.total_estimated_size <= target_size_bytes or not target_size_bytes): break i += 1 if value._p_state == UPTODATE: # The C implementation will only evict things that are specifically # in the up-to-date state self._persistent_deactivate_ran = False # sweeping an object out of the cache should also # ghost it---that's what C does. This winds up # calling `update_object_size_estimation`. # Also in C, if this was the last reference to the object, # it removes itself from the `data` dictionary. # If we're under PyPy or Jython, we need to run a GC collection # to make this happen...this is only noticeable though, when # we eject objects. Also, note that we can only take any of these # actions if our _p_deactivate ran, in case of buggy subclasses. # see _persistent_deactivate_ran value._p_deactivate() if (self._persistent_deactivate_ran # Test-cases sneak in non-Persistent objects, sigh, so naturally # they don't cooperate (without this check a bunch of test_picklecache # breaks) or not isinstance(value, _SWEEPABLE_TYPES)): to_eject.append((i, value)) self.non_ghost_count -= 1 ejected = len(to_eject) if ejected: self.ring.delete_all(to_eject) del to_eject # Got to clear our local if we want the GC to get the weak refs if ejected and _SWEEP_NEEDS_GC: # See comments on _SWEEP_NEEDS_GC gc.collect() return ejected @_sweeping_ring def _invalidate(self, oid): value = self.data.get(oid) if value is not None and value._p_state != GHOST: value._p_invalidate() was_in_ring = self.ring.delete(value) self.non_ghost_count -= 1 elif oid in self.persistent_classes: persistent_class = self.persistent_classes.pop(oid) try: # ZODB.persistentclass.PersistentMetaClass objects # have this method and it must be called for transaction abort # and other forms of invalidation to work persistent_class._p_invalidate() except AttributeError: pass persistent-4.2.2/persistent/ring.c0000644000076600000240000000341312577543740017144 0ustar jimstaff00000000000000/***************************************************************************** Copyright (c) 2003 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ #define RING_C "$Id$\n" /* Support routines for the doubly-linked list of cached objects. The cache stores a doubly-linked list of persistent objects, with space for the pointers allocated in the objects themselves. The cache stores the distinguished head of the list, which is not a valid persistent object. The next pointers traverse the ring in order starting with the least recently used object. The prev pointers traverse the ring in order starting with the most recently used object. */ #include "Python.h" #include "ring.h" void ring_add(CPersistentRing *ring, CPersistentRing *elt) { assert(!elt->r_next); elt->r_next = ring; elt->r_prev = ring->r_prev; ring->r_prev->r_next = elt; ring->r_prev = elt; } void ring_del(CPersistentRing *elt) { elt->r_next->r_prev = elt->r_prev; elt->r_prev->r_next = elt->r_next; elt->r_next = NULL; elt->r_prev = NULL; } void ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt) { elt->r_prev->r_next = elt->r_next; elt->r_next->r_prev = elt->r_prev; elt->r_next = ring; elt->r_prev = ring->r_prev; ring->r_prev->r_next = elt; ring->r_prev = elt; } persistent-4.2.2/persistent/ring.h0000644000076600000240000000511712577543740017154 0ustar jimstaff00000000000000/***************************************************************************** Copyright (c) 2003 Zope Foundation and Contributors. All Rights Reserved. This software is subject to the provisions of the Zope Public License, Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ****************************************************************************/ /* Support routines for the doubly-linked list of cached objects. The cache stores a headed, doubly-linked, circular list of persistent objects, with space for the pointers allocated in the objects themselves. The cache stores the distinguished head of the list, which is not a valid persistent object. The other list members are non-ghost persistent objects, linked in LRU (least-recently used) order. The r_next pointers traverse the ring starting with the least recently used object. The r_prev pointers traverse the ring starting with the most recently used object. Obscure: While each object is pointed at twice by list pointers (once by its predecessor's r_next, again by its successor's r_prev), the refcount on the object is bumped only by 1. This leads to some possibly surprising sequences of incref and decref code. Note that since the refcount is bumped at least once, the list does hold a strong reference to each object in it. */ typedef struct CPersistentRing_struct { struct CPersistentRing_struct *r_prev; struct CPersistentRing_struct *r_next; } CPersistentRing; /* The list operations here take constant time independent of the * number of objects in the list: */ /* Add elt as the most recently used object. elt must not already be * in the list, although this isn't checked. */ void ring_add(CPersistentRing *ring, CPersistentRing *elt); /* Remove elt from the list. elt must already be in the list, although * this isn't checked. */ void ring_del(CPersistentRing *elt); /* elt must already be in the list, although this isn't checked. It's * unlinked from its current position, and relinked into the list as the * most recently used object (which is arguably the tail of the list * instead of the head -- but the name of this function could be argued * either way). This is equivalent to * * ring_del(elt); * ring_add(ring, elt); * * but may be a little quicker. */ void ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt); persistent-4.2.2/persistent/ring.py0000644000076600000240000001632612577543740017361 0ustar jimstaff00000000000000# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2015 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## #pylint: disable=W0212,E0211,W0622,E0213,W0221,E0239 from zope.interface import Interface from zope.interface import implementer class IRing(Interface): """Conceptually, a doubly-linked list for efficiently keeping track of least- and most-recently used :class:`persistent.interfaces.IPersistent` objects. This is meant to be used by the :class:`persistent.picklecache.PickleCache` and should not be considered a public API. This interface documentation exists to assist development of the picklecache and alternate implementations by explaining assumptions and performance requirements. """ def __len__(): """Return the number of persistent objects stored in the ring. Should be constant time. """ def __contains__(object): """Answer whether the given persistent object is found in the ring. Must not rely on object equality or object hashing, but only identity or the `_p_oid`. Should be constant time. """ def add(object): """Add the persistent object to the ring as most-recently used. When an object is in the ring, the ring holds a strong reference to it so it can be deactivated later by the pickle cache. Should be constant time. The object should not already be in the ring, but this is not necessarily enforced. """ def delete(object): """Remove the object from the ring if it is present. Returns a true value if it was present and a false value otherwise. An ideal implementation should be constant time, but linear time is allowed. """ def move_to_head(object): """Place the object as the most recently used object in the ring. The object should already be in the ring, but this is not necessarily enforced, and attempting to move an object that is not in the ring has undefined consequences. An ideal implementation should be constant time, but linear time is allowed. """ def delete_all(indexes_and_values): """Given a sequence of pairs (index, object), remove all of them from the ring. This should be equivalent to calling :meth:`delete` for each value, but allows for a more efficient bulk deletion process. If the index and object pairs do not match with the actual state of the ring, this operation is undefined. Should be at least linear time (not quadratic). """ def __iter__(): """Iterate over each persistent object in the ring, in the order of least recently used to most recently used. Mutating the ring while an iteration is in progress has undefined consequences. """ from collections import deque @implementer(IRing) class _DequeRing(object): """A ring backed by the :class:`collections.deque` class. Operations are a mix of constant and linear time. It is available on all platforms. """ __slots__ = ('ring', 'ring_oids') def __init__(self): self.ring = deque() self.ring_oids = set() def __len__(self): return len(self.ring) def __contains__(self, pobj): return pobj._p_oid in self.ring_oids def add(self, pobj): self.ring.append(pobj) self.ring_oids.add(pobj._p_oid) def delete(self, pobj): # Note that we do not use self.ring.remove() because that # uses equality semantics and we don't want to call the persistent # object's __eq__ method (which might wake it up just after we # tried to ghost it) for i, o in enumerate(self.ring): if o is pobj: del self.ring[i] self.ring_oids.discard(pobj._p_oid) return 1 def move_to_head(self, pobj): self.delete(pobj) self.add(pobj) def delete_all(self, indexes_and_values): for ix, value in reversed(indexes_and_values): del self.ring[ix] self.ring_oids.discard(value._p_oid) def __iter__(self): return iter(self.ring) try: from cffi import FFI except ImportError: # pragma: no cover _CFFIRing = None else: import os this_dir = os.path.dirname(os.path.abspath(__file__)) ffi = FFI() with open(os.path.join(this_dir, 'ring.h')) as f: ffi.cdef(f.read()) _FFI_RING = ffi.verify(""" #include "ring.c" """, include_dirs=[this_dir]) _OGA = object.__getattribute__ _OSA = object.__setattr__ #pylint: disable=E1101 @implementer(IRing) class _CFFIRing(object): """A ring backed by a C implementation. All operations are constant time. It is only available on platforms with ``cffi`` installed. """ __slots__ = ('ring_home', 'ring_to_obj') def __init__(self): node = self.ring_home = ffi.new("CPersistentRing*") node.r_next = node node.r_prev = node # In order for the CFFI objects to stay alive, we must keep # a strong reference to them, otherwise they get freed. We must # also keep strong references to the objects so they can be deactivated self.ring_to_obj = dict() def __len__(self): return len(self.ring_to_obj) def __contains__(self, pobj): return getattr(pobj, '_Persistent__ring', self) in self.ring_to_obj def add(self, pobj): node = ffi.new("CPersistentRing*") _FFI_RING.ring_add(self.ring_home, node) self.ring_to_obj[node] = pobj _OSA(pobj, '_Persistent__ring', node) def delete(self, pobj): its_node = getattr(pobj, '_Persistent__ring', None) our_obj = self.ring_to_obj.pop(its_node, None) if its_node is not None and our_obj is not None and its_node.r_next: _FFI_RING.ring_del(its_node) return 1 def move_to_head(self, pobj): node = _OGA(pobj, '_Persistent__ring') _FFI_RING.ring_move_to_head(self.ring_home, node) def delete_all(self, indexes_and_values): for _, value in indexes_and_values: self.delete(value) def iteritems(self): head = self.ring_home here = head.r_next while here != head: yield here here = here.r_next def __iter__(self): ring_to_obj = self.ring_to_obj for node in self.iteritems(): yield ring_to_obj[node] # Export the best available implementation Ring = _CFFIRing if _CFFIRing else _DequeRing persistent-4.2.2/persistent/tests/0000755000076600000240000000000013017351012017155 5ustar jimstaff00000000000000persistent-4.2.2/persistent/tests/__init__.py0000644000076600000240000000001212577543740021304 0ustar jimstaff00000000000000# package persistent-4.2.2/persistent/tests/attrhooks.py0000644000076600000240000001016512577543740021575 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2004 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Overriding attr methods Examples for overriding attribute access methods. """ from persistent import Persistent def _resettingJar(): from persistent.tests.utils import ResettingJar return ResettingJar() def _rememberingJar(): from persistent.tests.utils import RememberingJar return RememberingJar() class OverridesGetattr(Persistent): """Example of overriding __getattr__ """ def __getattr__(self, name): """Get attributes that can't be gotten the usual way """ # Don't pretend we have any special attributes. if name.startswith("__") and name.endswrith("__"): raise AttributeError(name) else: return name.upper(), self._p_changed class VeryPrivate(Persistent): """Example of overriding __getattribute__, __setattr__, and __delattr__ """ def __init__(self, **kw): self.__dict__['__secret__'] = kw.copy() def __getattribute__(self, name): """Get an attribute value See the very important note in the comment below! """ ################################################################# # IMPORTANT! READ THIS! 8-> # # We *always* give Persistent a chance first. # Persistent handles certain special attributes, like _p_ # attributes. In particular, the base class handles __dict__ # and __class__. # # We call _p_getattr. If it returns True, then we have to # use Persistent.__getattribute__ to get the value. # ################################################################# if Persistent._p_getattr(self, name): return Persistent.__getattribute__(self, name) # Data should be in our secret dictionary: secret = self.__dict__['__secret__'] if name in secret: return secret[name] # Maybe it's a method: meth = getattr(self.__class__, name, None) if meth is None: raise AttributeError(name) return meth.__get__(self, self.__class__) def __setattr__(self, name, value): """Set an attribute value """ ################################################################# # IMPORTANT! READ THIS! 8-> # # We *always* give Persistent a chance first. # Persistent handles certain special attributes, like _p_ # attributes. # # We call _p_setattr. If it returns True, then we are done. # It has already set the attribute. # ################################################################# if Persistent._p_setattr(self, name, value): return self.__dict__['__secret__'][name] = value if not name.startswith('tmp_'): self._p_changed = 1 def __delattr__(self, name): """Delete an attribute value """ ################################################################# # IMPORTANT! READ THIS! 8-> # # We *always* give Persistent a chance first. # Persistent handles certain special attributes, like _p_ # attributes. # # We call _p_delattr. If it returns True, then we are done. # It has already deleted the attribute. # ################################################################# if Persistent._p_delattr(self, name): return del self.__dict__['__secret__'][name] if not name.startswith('tmp_'): self._p_changed = 1 persistent-4.2.2/persistent/tests/cucumbers.py0000644000076600000240000000551712577543740021554 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2003 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## # Example objects for pickling. from persistent import Persistent from persistent._compat import PYTHON2 def print_dict(d): d = d.items() d.sort() print('{%s}' % (', '.join( [('%r: %r' % (k, v)) for (k, v) in d] ))) def cmpattrs(self, other, *attrs): for attr in attrs: if attr[:3] in ('_v_', '_p_'): continue lhs, rhs = getattr(self, attr, None), getattr(other, attr, None) if PYTHON2: c = cmp(lhs, rhs) if c: return c else: if lhs != rhs: return 1 return 0 class Simple(Persistent): def __init__(self, name, **kw): self.__name__ = name self.__dict__.update(kw) self._v_favorite_color = 'blue' self._p_foo = 'bar' @property def _attrs(self): return list(self.__dict__.keys()) def __eq__(self, other): return cmpattrs(self, other, '__class__', *self._attrs) == 0 class Custom(Simple): def __new__(cls, x, y): r = Persistent.__new__(cls) r.x, r.y = x, y return r def __init__(self, x, y): self.a = 42 def __getnewargs__(self): return self.x, self.y def __getstate__(self): return self.a def __setstate__(self, a): self.a = a class Slotted(Persistent): __slots__ = 's1', 's2', '_p_splat', '_v_eek' def __init__(self, s1, s2): self.s1, self.s2 = s1, s2 self._v_eek = 1 self._p_splat = 2 @property def _attrs(self): return list(self.__dict__.keys()) def __eq__(self, other): return cmpattrs(self, other, '__class__', *self._attrs) == 0 class SubSlotted(Slotted): __slots__ = 's3', 's4' def __init__(self, s1, s2, s3): Slotted.__init__(self, s1, s2) self.s3 = s3 @property def _attrs(self): return ('s1', 's2', 's3', 's4') class SubSubSlotted(SubSlotted): def __init__(self, s1, s2, s3, **kw): SubSlotted.__init__(self, s1, s2, s3) self.__dict__.update(kw) self._v_favorite_color = 'blue' self._p_foo = 'bar' @property def _attrs(self): return ['s1', 's2', 's3', 's4'] + list(self.__dict__.keys()) persistent-4.2.2/persistent/tests/test_list.py0000644000076600000240000001476312577543740021601 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Tests for PersistentList """ import unittest l0 = [] l1 = [0] l2 = [0, 1] class OtherList: def __init__(self, initlist): self.__data = initlist def __len__(self): return len(self.__data) def __getitem__(self, i): return self.__data[i] class TestPList(unittest.TestCase): def _getTargetClass(self): from persistent.list import PersistentList return PersistentList def test_volatile_attributes_not_persisted(self): # http://www.zope.org/Collectors/Zope/2052 m = self._getTargetClass()() m.foo = 'bar' m._v_baz = 'qux' state = m.__getstate__() self.assertTrue('foo' in state) self.assertFalse('_v_baz' in state) def testTheWorld(self): from persistent._compat import PYTHON2 # Test constructors pl = self._getTargetClass() u = pl() u0 = pl(l0) u1 = pl(l1) u2 = pl(l2) uu = pl(u) uu0 = pl(u0) uu1 = pl(u1) uu2 = pl(u2) v = pl(tuple(u)) v0 = pl(OtherList(u0)) vv = pl("this is also a sequence") # Test __repr__ eq = self.assertEqual eq(str(u0), str(l0), "str(u0) == str(l0)") eq(repr(u1), repr(l1), "repr(u1) == repr(l1)") # Test __cmp__ and __len__ if PYTHON2: def mycmp(a, b): r = cmp(a, b) if r < 0: return -1 if r > 0: return 1 return r all = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2] for a in all: for b in all: eq(mycmp(a, b), mycmp(len(a), len(b)), "mycmp(a, b) == mycmp(len(a), len(b))") # Test __getitem__ for i in range(len(u2)): eq(u2[i], i, "u2[i] == i") # Test __setitem__ uu2[0] = 0 uu2[1] = 100 try: uu2[2] = 200 except IndexError: pass else: raise TestFailed("uu2[2] shouldn't be assignable") # Test __delitem__ del uu2[1] del uu2[0] try: del uu2[0] except IndexError: pass else: raise TestFailed("uu2[0] shouldn't be deletable") # Test __getslice__ for i in range(-3, 4): eq(u2[:i], l2[:i], "u2[:i] == l2[:i]") eq(u2[i:], l2[i:], "u2[i:] == l2[i:]") for j in range(-3, 4): eq(u2[i:j], l2[i:j], "u2[i:j] == l2[i:j]") # Test __setslice__ for i in range(-3, 4): u2[:i] = l2[:i] eq(u2, l2, "u2 == l2") u2[i:] = l2[i:] eq(u2, l2, "u2 == l2") for j in range(-3, 4): u2[i:j] = l2[i:j] eq(u2, l2, "u2 == l2") uu2 = u2[:] uu2[:0] = [-2, -1] eq(uu2, [-2, -1, 0, 1], "uu2 == [-2, -1, 0, 1]") uu2[0:] = [] eq(uu2, [], "uu2 == []") # Test __contains__ for i in u2: self.assertTrue(i in u2, "i in u2") for i in min(u2)-1, max(u2)+1: self.assertTrue(i not in u2, "i not in u2") # Test __delslice__ uu2 = u2[:] del uu2[1:2] del uu2[0:1] eq(uu2, [], "uu2 == []") uu2 = u2[:] del uu2[1:] del uu2[:1] eq(uu2, [], "uu2 == []") # Test __add__, __radd__, __mul__ and __rmul__ #self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1") self.assertTrue(u1 + [1] == u2, "u1 + [1] == u2") #self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]") self.assertTrue(u2 == u2*1 == 1*u2, "u2 == u2*1 == 1*u2") self.assertTrue(u2+u2 == u2*2 == 2*u2, "u2+u2 == u2*2 == 2*u2") self.assertTrue(u2+u2+u2 == u2*3 == 3*u2, "u2+u2+u2 == u2*3 == 3*u2") # Test append u = u1[:] u.append(1) eq(u, u2, "u == u2") # Test insert u = u2[:] u.insert(0, -1) eq(u, [-1, 0, 1], "u == [-1, 0, 1]") # Test pop u = pl([0, -1, 1]) u.pop() eq(u, [0, -1], "u == [0, -1]") u.pop(0) eq(u, [-1], "u == [-1]") # Test remove u = u2[:] u.remove(1) eq(u, u1, "u == u1") # Test count u = u2*3 eq(u.count(0), 3, "u.count(0) == 3") eq(u.count(1), 3, "u.count(1) == 3") eq(u.count(2), 0, "u.count(2) == 0") # Test index eq(u2.index(0), 0, "u2.index(0) == 0") eq(u2.index(1), 1, "u2.index(1) == 1") try: u2.index(2) except ValueError: pass else: raise TestFailed("expected ValueError") # Test reverse u = u2[:] u.reverse() eq(u, [1, 0], "u == [1, 0]") u.reverse() eq(u, u2, "u == u2") # Test sort u = pl([1, 0]) u.sort() eq(u, u2, "u == u2") # Test keyword arguments to sort if PYTHON2: u.sort(cmp=lambda x,y: cmp(y, x)) eq(u, [1, 0], "u == [1, 0]") u.sort(key=lambda x:-x) eq(u, [1, 0], "u == [1, 0]") u.sort(reverse=True) eq(u, [1, 0], "u == [1, 0]") # Passing any other keyword arguments results in a TypeError try: u.sort(blah=True) except TypeError: pass else: raise TestFailed("expected TypeError") # Test extend u = u1[:] u.extend(u2) eq(u, u1 + u2, "u == u1 + u2") # Test iadd u = u1[:] u += u2 eq(u, u1 + u2, "u == u1 + u2") # Test imul u = u1[:] u *= 3 eq(u, u1 + u1 + u1, "u == u1 + u1 + u1") def test_suite(): return unittest.makeSuite(TestPList) if __name__ == "__main__": loader = unittest.TestLoader() unittest.main(testLoader=loader) persistent-4.2.2/persistent/tests/test_mapping.py0000644000076600000240000001571612577543740022260 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest class Test_default(unittest.TestCase): def _getTargetClass(self): from persistent.mapping import default return default def _makeOne(self, func): return self._getTargetClass()(func) def test___get___from_class(self): _called_with = [] def _test(inst): _called_with.append(inst) return '_test' descr = self._makeOne(_test) class Foo(object): testing = descr self.assertTrue(Foo.testing is descr) self.assertEqual(_called_with, []) def test___get___from_instance(self): _called_with = [] def _test(inst): _called_with.append(inst) return 'TESTING' descr = self._makeOne(_test) class Foo(object): testing = descr foo = Foo() self.assertEqual(foo.testing, 'TESTING') self.assertEqual(_called_with, [foo]) class PersistentMappingTests(unittest.TestCase): def _getTargetClass(self): from persistent.mapping import PersistentMapping return PersistentMapping def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_volatile_attributes_not_persisted(self): # http://www.zope.org/Collectors/Zope/2052 m = self._makeOne() m.foo = 'bar' m._v_baz = 'qux' state = m.__getstate__() self.assertTrue('foo' in state) self.assertFalse('_v_baz' in state) def testTheWorld(self): from persistent._compat import PYTHON2 # Test constructors l0 = {} l1 = {0:0} l2 = {0:0, 1:1} u = self._makeOne() u0 = self._makeOne(l0) u1 = self._makeOne(l1) u2 = self._makeOne(l2) uu = self._makeOne(u) uu0 = self._makeOne(u0) uu1 = self._makeOne(u1) uu2 = self._makeOne(u2) class OtherMapping(dict): def __init__(self, initmapping): self.__data = initmapping def items(self): return self.__data.items() v0 = self._makeOne(OtherMapping(u0)) vv = self._makeOne([(0, 0), (1, 1)]) # Test __repr__ eq = self.assertEqual eq(str(u0), str(l0), "str(u0) == str(l0)") eq(repr(u1), repr(l1), "repr(u1) == repr(l1)") # Test __cmp__ and __len__ if PYTHON2: def mycmp(a, b): r = cmp(a, b) if r < 0: return -1 if r > 0: return 1 return r all = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2] for a in all: for b in all: eq(mycmp(a, b), mycmp(len(a), len(b)), "mycmp(a, b) == mycmp(len(a), len(b))") # Test __getitem__ for i in range(len(u2)): eq(u2[i], i, "u2[i] == i") # Test get for i in range(len(u2)): eq(u2.get(i), i, "u2.get(i) == i") eq(u2.get(i, 5), i, "u2.get(i, 5) == i") for i in min(u2)-1, max(u2)+1: eq(u2.get(i), None, "u2.get(i) == None") eq(u2.get(i, 5), 5, "u2.get(i, 5) == 5") # Test __setitem__ uu2[0] = 0 uu2[1] = 100 uu2[2] = 200 # Test __delitem__ del uu2[1] del uu2[0] try: del uu2[0] except KeyError: pass else: raise TestFailed("uu2[0] shouldn't be deletable") # Test __contains__ for i in u2: self.assertTrue(i in u2, "i in u2") for i in min(u2)-1, max(u2)+1: self.assertTrue(i not in u2, "i not in u2") # Test update l = {"a":"b"} u = self._makeOne(l) u.update(u2) for i in u: self.assertTrue(i in l or i in u2, "i in l or i in u2") for i in l: self.assertTrue(i in u, "i in u") for i in u2: self.assertTrue(i in u, "i in u") # Test setdefault x = u2.setdefault(0, 5) eq(x, 0, "u2.setdefault(0, 5) == 0") x = u2.setdefault(5, 5) eq(x, 5, "u2.setdefault(5, 5) == 5") self.assertTrue(5 in u2, "5 in u2") # Test pop x = u2.pop(1) eq(x, 1, "u2.pop(1) == 1") self.assertTrue(1 not in u2, "1 not in u2") try: u2.pop(1) except KeyError: pass else: self.fail("1 should not be poppable from u2") x = u2.pop(1, 7) eq(x, 7, "u2.pop(1, 7) == 7") # Test popitem items = list(u2.items()) key, value = u2.popitem() self.assertTrue((key, value) in items, "key, value in items") self.assertTrue(key not in u2, "key not in u2") # Test clear u2.clear() eq(u2, {}, "u2 == {}") def test___repr___converts_legacy_container_attr(self): # In the past, PM used a _container attribute. For some time, the # implementation continued to use a _container attribute in pickles # (__get/setstate__) to be compatible with older releases. This isn't # really necessary any more. In fact, releases for which this might # matter can no longer share databases with current releases. Because # releases as recent as 3.9.0b5 still use _container in saved state, we # need to accept such state, but we stop producing it. pm = self._makeOne() self.assertEqual(pm.__dict__, {'data': {}}) # Make it look like an older instance pm.__dict__.clear() pm.__dict__['_container'] = {'a': 1} self.assertEqual(pm.__dict__, {'_container': {'a': 1}}) pm._p_changed = 0 self.assertEqual(repr(pm), "{'a': 1}") self.assertEqual(pm.__dict__, {'data': {'a': 1}}) self.assertEqual(pm.__getstate__(), {'data': {'a': 1}}) class Test_legacy_PersistentDict(unittest.TestCase): def _getTargetClass(self): from persistent.dict import PersistentDict return PersistentDict def test_PD_is_alias_to_PM(self): from persistent.mapping import PersistentMapping self.assertTrue(self._getTargetClass() is PersistentMapping) def test_suite(): return unittest.TestSuite(( unittest.makeSuite(Test_default), unittest.makeSuite(PersistentMappingTests), unittest.makeSuite(Test_legacy_PersistentDict), )) persistent-4.2.2/persistent/tests/test_persistence.py0000644000076600000240000017524213017350405023132 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2011 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import os import unittest import platform import sys py_impl = getattr(platform, 'python_implementation', lambda: None) _is_pypy3 = py_impl() == 'PyPy' and sys.version_info[0] > 2 _is_jython = py_impl() == 'Jython' #pylint: disable=R0904,W0212,E1101 class _Persistent_Base(object): def _getTargetClass(self): # concrete testcase classes must override raise NotImplementedError() def _makeCache(self, jar): # concrete testcase classes must override raise NotImplementedError() def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def _makeJar(self): from zope.interface import implementer from persistent.interfaces import IPersistentDataManager @implementer(IPersistentDataManager) class _Jar(object): _cache = None # Set this to a value to have our `setstate` # pass it through to the object's __setstate__ setstate_calls_object = None # Set this to a value to have our `setstate` # set the _p_serial of the object setstate_sets_serial = None def __init__(self): self._loaded = [] self._registered = [] def setstate(self, obj): self._loaded.append(obj._p_oid) if self.setstate_calls_object is not None: obj.__setstate__(self.setstate_calls_object) if self.setstate_sets_serial is not None: obj._p_serial = self.setstate_sets_serial def register(self, obj): self._registered.append(obj._p_oid) jar = _Jar() jar._cache = self._makeCache(jar) return jar def _makeBrokenJar(self): from zope.interface import implementer from persistent.interfaces import IPersistentDataManager @implementer(IPersistentDataManager) class _BrokenJar(object): def __init__(self): self.called = 0 def register(self,ob): self.called += 1 raise NotImplementedError def setstate(self,ob): raise NotImplementedError jar = _BrokenJar() jar._cache = self._makeCache(jar) return jar def _makeOneWithJar(self, klass=None): from persistent.timestamp import _makeOctets OID = _makeOctets('\x01' * 8) if klass is not None: inst = klass() else: inst = self._makeOne() jar = self._makeJar() jar._cache.new_ghost(OID, inst) # assigns _p_jar, _p_oid return inst, jar, OID def test_class_conforms_to_IPersistent(self): from zope.interface.verify import verifyClass from persistent.interfaces import IPersistent verifyClass(IPersistent, self._getTargetClass()) def test_instance_conforms_to_IPersistent(self): from zope.interface.verify import verifyObject from persistent.interfaces import IPersistent verifyObject(IPersistent, self._makeOne()) def test_ctor(self): from persistent.persistence import _INITIAL_SERIAL inst = self._makeOne() self.assertEqual(inst._p_jar, None) self.assertEqual(inst._p_oid, None) self.assertEqual(inst._p_serial, _INITIAL_SERIAL) self.assertEqual(inst._p_changed, False) self.assertEqual(inst._p_sticky, False) self.assertEqual(inst._p_status, 'unsaved') def test_del_jar_no_jar(self): inst = self._makeOne() del inst._p_jar # does not raise self.assertEqual(inst._p_jar, None) def test_del_jar_while_in_cache(self): inst, _, OID = self._makeOneWithJar() def _test(): del inst._p_jar self.assertRaises(ValueError, _test) def test_del_jar_like_ZODB_abort(self): # When a ZODB connection aborts, it removes registered objects from # the cache, deletes their jar, deletes their OID, and finally sets # p_changed to false inst, jar, OID = self._makeOneWithJar() del jar._cache[OID] del inst._p_jar self.assertEqual(inst._p_jar, None) def test_del_jar_of_inactive_object_that_has_no_state(self): # If an object is ghosted, and we try to delete its # jar, we shouldn't activate the object. # Simulate a POSKeyError on _p_activate; this can happen aborting # a transaction using ZEO broken_jar = self._makeBrokenJar() inst = self._makeOne() inst._p_oid = 42 inst._p_jar = broken_jar # make it inactive inst._p_deactivate() self.assertEqual(inst._p_status, "ghost") # delete the jar; if we activated the object, the broken # jar would raise NotImplementedError del inst._p_jar def test_assign_p_jar_w_new_jar(self): inst, jar, OID = self._makeOneWithJar() new_jar = self._makeJar() try: inst._p_jar = new_jar except ValueError as e: self.assertEqual(str(e), "can not change _p_jar of cached object") else: self.fail("Should raise ValueError") def test_assign_p_jar_w_valid_jar(self): jar = self._makeJar() inst = self._makeOne() inst._p_jar = jar self.assertEqual(inst._p_status, 'saved') self.assertTrue(inst._p_jar is jar) inst._p_jar = jar # reassign only to same DM def test_assign_p_jar_not_in_cache_allowed(self): jar = self._makeJar() inst = self._makeOne() inst._p_jar = jar # Both of these are allowed inst._p_jar = self._makeJar() inst._p_jar = None self.assertEqual(inst._p_jar, None) def test_assign_p_oid_w_invalid_oid(self): inst, jar, OID = self._makeOneWithJar() try: inst._p_oid = object() except ValueError as e: self.assertEqual(str(e), 'can not change _p_oid of cached object') else: self.fail("Should raise value error") def test_assign_p_oid_w_valid_oid(self): from persistent.timestamp import _makeOctets OID = _makeOctets('\x01' * 8) inst = self._makeOne() inst._p_oid = OID self.assertEqual(inst._p_oid, OID) inst._p_oid = OID # reassign only same OID def test_assign_p_oid_w_new_oid_wo_jar(self): from persistent.timestamp import _makeOctets OID1 = _makeOctets('\x01' * 8) OID2 = _makeOctets('\x02' * 8) inst = self._makeOne() inst._p_oid = OID1 inst._p_oid = OID2 self.assertEqual(inst._p_oid, OID2) def test_assign_p_oid_w_None_wo_jar(self): from persistent.timestamp import _makeOctets OID1 = _makeOctets('\x01' * 8) inst = self._makeOne() inst._p_oid = OID1 inst._p_oid = None self.assertEqual(inst._p_oid, None) def test_assign_p_oid_w_new_oid_w_jar(self): from persistent.timestamp import _makeOctets inst, jar, OID = self._makeOneWithJar() new_OID = _makeOctets('\x02' * 8) def _test(): inst._p_oid = new_OID self.assertRaises(ValueError, _test) def test_assign_p_oid_not_in_cache_allowed(self): jar = self._makeJar() inst = self._makeOne() inst._p_jar = jar inst._p_oid = 1 # anything goes inst._p_oid = 42 self.assertEqual(inst._p_oid, 42) def test_delete_p_oid_wo_jar(self): from persistent.timestamp import _makeOctets OID = _makeOctets('\x01' * 8) inst = self._makeOne() inst._p_oid = OID del inst._p_oid self.assertEqual(inst._p_oid, None) def test_delete_p_oid_w_jar(self): inst, jar, OID = self._makeOneWithJar() def _test(): del inst._p_oid self.assertRaises(ValueError, _test) def test_del_oid_like_ZODB_abort(self): # When a ZODB connection aborts, it removes registered objects from # the cache, deletes their jar, deletes their OID, and finally sets # p_changed to false inst, jar, OID = self._makeOneWithJar() del jar._cache[OID] del inst._p_oid self.assertEqual(inst._p_oid, None) def test_assign_p_serial_w_invalid_type(self): inst = self._makeOne() def _test(): inst._p_serial = object() self.assertRaises(ValueError, _test) def test_assign_p_serial_w_None(self): inst = self._makeOne() def _test(): inst._p_serial = None self.assertRaises(ValueError, _test) def test_assign_p_serial_too_short(self): inst = self._makeOne() def _test(): inst._p_serial = '\x01\x02\x03' self.assertRaises(ValueError, _test) def test_assign_p_serial_too_long(self): inst = self._makeOne() def _test(): inst._p_serial = '\x01\x02\x03' * 3 self.assertRaises(ValueError, _test) def test_assign_p_serial_w_valid_serial(self): from persistent.timestamp import _makeOctets SERIAL = _makeOctets('\x01' * 8) inst = self._makeOne() inst._p_serial = SERIAL self.assertEqual(inst._p_serial, SERIAL) def test_delete_p_serial(self): from persistent.timestamp import _makeOctets from persistent.persistence import _INITIAL_SERIAL SERIAL = _makeOctets('\x01' * 8) inst = self._makeOne() inst._p_serial = SERIAL self.assertEqual(inst._p_serial, SERIAL) del(inst._p_serial) self.assertEqual(inst._p_serial, _INITIAL_SERIAL) def test_query_p_changed_unsaved(self): inst = self._makeOne() self.assertEqual(inst._p_changed, False) def test_query_p_changed_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertEqual(inst._p_changed, None) def test_query_p_changed_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() self.assertEqual(inst._p_changed, False) def test_query_p_changed_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True self.assertEqual(inst._p_changed, True) def test_assign_p_changed_none_from_unsaved(self): inst = self._makeOne() inst._p_changed = None self.assertEqual(inst._p_status, 'unsaved') def test_assign_p_changed_true_from_unsaved(self): inst = self._makeOne() inst._p_changed = True self.assertEqual(inst._p_status, 'unsaved') def test_assign_p_changed_false_from_unsaved(self): inst = self._makeOne() inst._p_changed = False self.assertEqual(inst._p_status, 'unsaved') def test_assign_p_changed_none_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_changed = None self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_true_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_changed = True self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), [OID]) self.assertEqual(list(jar._registered), [OID]) def test_assign_p_changed_false_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_changed = False self.assertEqual(inst._p_status, 'ghost') # ??? this is what C does self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_none_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] inst._p_changed = None self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_true_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX jar._loaded[:] = [] inst._p_changed = True self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), [OID]) def test_assign_p_changed_false_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] inst._p_changed = False self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_none_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_changed = None # assigning None is ignored when dirty self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_true_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_changed = True self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_false_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_changed = False self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_assign_p_changed_none_when_sticky(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_changed = None self.assertEqual(inst._p_status, 'sticky') self.assertEqual(inst._p_changed, False) self.assertEqual(inst._p_sticky, True) def test_delete_p_changed_from_unsaved(self): inst = self._makeOne() del inst._p_changed self.assertEqual(inst._p_status, 'unsaved') def test_delete_p_changed_from_unsaved_w_dict(self): class Derived(self._getTargetClass()): pass inst = Derived() inst.foo = 'bar' del inst._p_changed self.assertEqual(inst._p_status, 'unsaved') self.assertEqual(inst.foo, 'bar') def test_delete_p_changed_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() del inst._p_changed self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_delete_p_changed_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] jar._registered = [] del inst._p_changed self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_delete_p_changed_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] del inst._p_changed self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test_delete_p_changed_when_sticky(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True del inst._p_changed self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_changed, None) self.assertEqual(inst._p_sticky, False) def test_assign_p_sticky_true_when_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() # XXX def _test(): inst._p_sticky = True self.assertRaises(ValueError, _test) def test_assign_p_sticky_false_when_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() # XXX def _test(): inst._p_sticky = False self.assertRaises(ValueError, _test) def test_assign_p_sticky_true_non_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True self.assertTrue(inst._p_sticky) def test_assign_p_sticky_false_non_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = False self.assertFalse(inst._p_sticky) def test__p_status_unsaved(self): inst = self._makeOne() self.assertEqual(inst._p_status, 'unsaved') def test__p_status_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertEqual(inst._p_status, 'ghost') def test__p_status_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_changed = True self.assertEqual(inst._p_status, 'changed') def test__p_status_changed_sticky(self): # 'sticky' is not a state, but a separate flag. inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True inst._p_sticky = True self.assertEqual(inst._p_status, 'sticky') def test__p_status_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False self.assertEqual(inst._p_status, 'saved') def test__p_status_saved_sticky(self): # 'sticky' is not a state, but a separate flag. inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = False inst._p_sticky = True self.assertEqual(inst._p_status, 'sticky') def test__p_mtime_no_serial(self): inst = self._makeOne() self.assertEqual(inst._p_mtime, None) def test__p_mtime_w_serial(self): from persistent.timestamp import TimeStamp WHEN_TUPLE = (2011, 2, 15, 13, 33, 27.5) ts = TimeStamp(*WHEN_TUPLE) inst, jar, OID = self._makeOneWithJar() inst._p_serial = ts.raw() self.assertEqual(inst._p_mtime, ts.timeTime()) def test__p_mtime_activates_object(self): # Accessing _p_mtime implicitly unghostifies the object from persistent.timestamp import TimeStamp WHEN_TUPLE = (2011, 2, 15, 13, 33, 27.5) ts = TimeStamp(*WHEN_TUPLE) inst, jar, OID = self._makeOneWithJar() jar.setstate_sets_serial = ts.raw() inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_mtime, ts.timeTime()) self.assertEqual(inst._p_status, 'saved') def test__p_state_unsaved(self): inst = self._makeOne() inst._p_changed = True self.assertEqual(inst._p_state, 0) def test__p_state_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertEqual(inst._p_state, -1) def test__p_state_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_changed = True self.assertEqual(inst._p_state, 1) def test__p_state_changed_sticky(self): # 'sticky' is not a state, but a separate flag. inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True inst._p_sticky = True self.assertEqual(inst._p_state, 2) def test__p_state_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False self.assertEqual(inst._p_state, 0) def test__p_state_saved_sticky(self): # 'sticky' is not a state, but a separate flag. inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = False inst._p_sticky = True self.assertEqual(inst._p_state, 2) def test_query_p_estimated_size_new(self): inst = self._makeOne() self.assertEqual(inst._p_estimated_size, 0) def test_query_p_estimated_size_del(self): inst = self._makeOne() inst._p_estimated_size = 123 self.assertEqual(inst._p_estimated_size, 128) del inst._p_estimated_size self.assertEqual(inst._p_estimated_size, 0) def test_assign_p_estimated_size_wrong_type(self): inst = self._makeOne() self.assertRaises(TypeError, lambda : setattr(inst, '_p_estimated_size', None)) try: long except NameError: pass else: self.assertRaises(TypeError, lambda : setattr(inst, '_p_estimated_size', long(1))) def test_assign_p_estimated_size_negative(self): inst = self._makeOne() def _test(): inst._p_estimated_size = -1 self.assertRaises(ValueError, _test) def test_assign_p_estimated_size_small(self): inst = self._makeOne() inst._p_estimated_size = 123 self.assertEqual(inst._p_estimated_size, 128) def test_assign_p_estimated_size_just_over_threshold(self): inst = self._makeOne() inst._p_estimated_size = 1073741697 self.assertEqual(inst._p_estimated_size, 16777215 * 64) def test_assign_p_estimated_size_bigger(self): inst = self._makeOne() inst._p_estimated_size = 1073741697 * 2 self.assertEqual(inst._p_estimated_size, 16777215 * 64) def test___getattribute___p__names(self): NAMES = ['_p_jar', '_p_oid', '_p_changed', '_p_serial', '_p_state', '_p_estimated_size', '_p_sticky', '_p_status', ] inst, jar, OID = self._makeOneWithJar() self._clearMRU(jar) for name in NAMES: getattr(inst, name) self._checkMRU(jar, []) # _p_mtime is special, it activates the object getattr(inst, '_p_mtime') self._checkMRU(jar, [OID]) def test___getattribute__special_name(self): from persistent.persistence import SPECIAL_NAMES inst, jar, OID = self._makeOneWithJar() self._clearMRU(jar) for name in SPECIAL_NAMES: getattr(inst, name, None) self._checkMRU(jar, []) def test___getattribute__normal_name_from_unsaved(self): class Derived(self._getTargetClass()): normal = 'value' inst = Derived() self.assertEqual(getattr(inst, 'normal', None), 'value') def test___getattribute__normal_name_from_ghost(self): class Derived(self._getTargetClass()): normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_deactivate() self._clearMRU(jar) self.assertEqual(getattr(inst, 'normal', None), 'value') self._checkMRU(jar, [OID]) def test___getattribute__normal_name_from_saved(self): class Derived(self._getTargetClass()): normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = False self._clearMRU(jar) self.assertEqual(getattr(inst, 'normal', None), 'value') self._checkMRU(jar, [OID]) def test___getattribute__normal_name_from_changed(self): class Derived(self._getTargetClass()): normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = True self._clearMRU(jar) self.assertEqual(getattr(inst, 'normal', None), 'value') self._checkMRU(jar, [OID]) def test___getattribute___non_cooperative(self): # Getting attributes is NOT cooperative with the superclass. # This comes from the C implementation and is maintained # for backwards compatibility. (For example, Persistent and # ExtensionClass.Base/Acquisition take special care to mix together.) class Base(object): def __getattribute__(self, name): if name == 'magic': return 42 return super(Base,self).__getattribute__(name) self.assertEqual(getattr(Base(), 'magic'), 42) class Derived(self._getTargetClass(), Base): pass self.assertRaises(AttributeError, getattr, Derived(), 'magic') def test___setattr___p__names(self): from persistent.timestamp import _makeOctets SERIAL = _makeOctets('\x01' * 8) inst, jar, OID = self._makeOneWithJar() inst._p_activate() NAMES = [('_p_jar', jar), ('_p_oid', OID), ('_p_changed', False), ('_p_serial', SERIAL), ('_p_estimated_size', 0), ('_p_sticky', False), ] self._clearMRU(jar) for name, value in NAMES: setattr(inst, name, value) self._checkMRU(jar, []) def test___setattr___v__name(self): class Derived(self._getTargetClass()): pass inst, jar, OID = self._makeOneWithJar(Derived) self._clearMRU(jar) inst._v_foo = 'bar' self.assertEqual(inst._p_status, 'saved') self._checkMRU(jar, []) def test___setattr__normal_name_from_unsaved(self): class Derived(self._getTargetClass()): normal = 'before' inst = Derived() setattr(inst, 'normal', 'after') self.assertEqual(getattr(inst, 'normal', None), 'after') self.assertEqual(inst._p_status, 'unsaved') def test___setattr__normal_name_from_ghost(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_deactivate() self._clearMRU(jar) setattr(inst, 'normal', 'after') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'after') self.assertEqual(inst._p_status, 'changed') def test___setattr__normal_name_from_saved(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = False self._clearMRU(jar) setattr(inst, 'normal', 'after') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'after') self.assertEqual(inst._p_status, 'changed') def test___setattr__normal_name_from_changed(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = True self._clearMRU(jar) jar._registered = [] setattr(inst, 'normal', 'after') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, []) self.assertEqual(getattr(inst, 'normal', None), 'after') self.assertEqual(inst._p_status, 'changed') def test___delattr___p__names(self): NAMES = ['_p_changed', '_p_serial', ] inst, jar, OID = self._makeOneWithJar() self._clearMRU(jar) jar._registered = [] for name in NAMES: delattr(inst, name) self._checkMRU(jar, []) self.assertEqual(jar._registered, []) def test___delattr__normal_name_from_unsaved(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() delattr(inst, 'normal') self.assertEqual(getattr(inst, 'normal', None), 'before') def test___delattr__normal_name_from_ghost(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_deactivate() self._clearMRU(jar) jar._registered = [] def _test(): delattr(inst, 'normal') self.assertRaises(AttributeError, _test) self.assertEqual(inst._p_status, 'changed') # ??? this is what C does self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'before') def test___delattr__normal_name_from_saved(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = False self._clearMRU(jar) jar._registered = [] delattr(inst, 'normal') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'before') def test___delattr__normal_name_from_changed(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_changed = True self._clearMRU(jar) jar._registered = [] delattr(inst, 'normal') self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, []) self.assertEqual(getattr(inst, 'normal', None), 'before') def test___getstate__(self): inst = self._makeOne() self.assertEqual(inst.__getstate__(), None) def test___getstate___derived_w_dict(self): class Derived(self._getTargetClass()): pass inst = Derived() inst.foo = 'bar' inst._p_baz = 'bam' inst._v_qux = 'spam' self.assertEqual(inst.__getstate__(), {'foo': 'bar'}) def test___getstate___derived_w_slots(self): class Derived(self._getTargetClass()): __slots__ = ('foo', 'baz', '_p_baz', '_v_qux') inst = Derived() inst.foo = 'bar' inst._p_baz = 'bam' inst._v_qux = 'spam' self.assertEqual(inst.__getstate__(), (None, {'foo': 'bar'})) def test___getstate___derived_w_slots_in_base_and_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) class Derived(Base): __slots__ = ('baz', 'qux',) inst = Derived() inst.foo = 'bar' inst.baz = 'bam' inst.qux = 'spam' self.assertEqual(inst.__getstate__(), (None, {'foo': 'bar', 'baz': 'bam', 'qux': 'spam'})) def test___getstate___derived_w_slots_in_base_but_not_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) class Derived(Base): pass inst = Derived() inst.foo = 'bar' inst.baz = 'bam' inst.qux = 'spam' self.assertEqual(inst.__getstate__(), ({'baz': 'bam', 'qux': 'spam'}, {'foo': 'bar'})) def test___setstate___empty(self): inst = self._makeOne() inst.__setstate__(None) # doesn't raise, but doesn't change anything def test___setstate___nonempty(self): from persistent.persistence import _INITIAL_SERIAL inst = self._makeOne() self.assertRaises((ValueError, TypeError), inst.__setstate__, {'bogus': 1}) self.assertEqual(inst._p_jar, None) self.assertEqual(inst._p_oid, None) self.assertEqual(inst._p_serial, _INITIAL_SERIAL) self.assertEqual(inst._p_changed, False) self.assertEqual(inst._p_sticky, False) def test___setstate___nonempty_derived_w_dict(self): class Derived(self._getTargetClass()): pass inst = Derived() inst.foo = 'bar' inst.__setstate__({'baz': 'bam'}) self.assertEqual(inst.__dict__, {'baz': 'bam'}) def test___setstate___nonempty_derived_w_dict_w_two_keys(self): class Derived(self._getTargetClass()): pass inst = Derived() inst.foo = 'bar' inst.__setstate__({'baz': 'bam', 'biz': 'boz'}) self.assertEqual(inst.__dict__, {'baz': 'bam', 'biz': 'boz'}) def test___setstate___derived_w_slots(self): class Derived(self._getTargetClass()): __slots__ = ('foo', '_p_baz', '_v_qux') inst = Derived() inst.__setstate__((None, {'foo': 'bar'})) self.assertEqual(inst.foo, 'bar') def test___setstate___derived_w_slots_in_base_classes(self): class Base(self._getTargetClass()): __slots__ = ('foo',) class Derived(Base): __slots__ = ('baz', 'qux',) inst = Derived() inst.__setstate__((None, {'foo': 'bar', 'baz': 'bam', 'qux': 'spam'})) self.assertEqual(inst.foo, 'bar') self.assertEqual(inst.baz, 'bam') self.assertEqual(inst.qux, 'spam') def test___setstate___derived_w_slots_in_base_but_not_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) class Derived(Base): pass inst = Derived() inst.__setstate__(({'baz': 'bam', 'qux': 'spam'}, {'foo': 'bar'})) self.assertEqual(inst.foo, 'bar') self.assertEqual(inst.baz, 'bam') self.assertEqual(inst.qux, 'spam') if not _is_pypy3 and not _is_jython: def test___setstate___interns_dict_keys(self): class Derived(self._getTargetClass()): pass inst1 = Derived() inst2 = Derived() key1 = 'key' key2 = 'ke'; key2 += 'y' # construct in a way that won't intern the literal self.assertFalse(key1 is key2) inst1.__setstate__({key1: 1}) inst2.__setstate__({key2: 2}) key1 = list(inst1.__dict__.keys())[0] key2 = list(inst2.__dict__.keys())[0] self.assertTrue(key1 is key2) def test___setstate___doesnt_fail_on_non_string_keys(self): class Derived(self._getTargetClass()): pass inst1 = Derived() inst1.__setstate__({1: 2}) self.assertTrue(1 in inst1.__dict__) class MyStr(str): pass mystr = MyStr('mystr') inst1.__setstate__({mystr: 2}) self.assertTrue(mystr in inst1.__dict__) def test___reduce__(self): from persistent._compat import copy_reg inst = self._makeOne() first, second, third = inst.__reduce__() self.assertTrue(first is copy_reg.__newobj__) self.assertEqual(second, (self._getTargetClass(),)) self.assertEqual(third, None) def test___reduce__w_subclass_having_getnewargs(self): from persistent._compat import copy_reg class Derived(self._getTargetClass()): def __getnewargs__(self): return ('a', 'b') inst = Derived() first, second, third = inst.__reduce__() self.assertTrue(first is copy_reg.__newobj__) self.assertEqual(second, (Derived, 'a', 'b')) self.assertEqual(third, {}) def test___reduce__w_subclass_having_getstate(self): from persistent._compat import copy_reg class Derived(self._getTargetClass()): def __getstate__(self): return {} inst = Derived() first, second, third = inst.__reduce__() self.assertTrue(first is copy_reg.__newobj__) self.assertEqual(second, (Derived,)) self.assertEqual(third, {}) def test___reduce__w_subclass_having_getnewargs_and_getstate(self): from persistent._compat import copy_reg class Derived(self._getTargetClass()): def __getnewargs__(self): return ('a', 'b') def __getstate__(self): return {'foo': 'bar'} inst = Derived() first, second, third = inst.__reduce__() self.assertTrue(first is copy_reg.__newobj__) self.assertEqual(second, (Derived, 'a', 'b')) self.assertEqual(third, {'foo': 'bar'}) def test_pickle_roundtrip_simple(self): import pickle # XXX s.b. 'examples' from persistent.tests.cucumbers import Simple inst = Simple('testing') copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_getnewargs_and_getstate(self): import pickle # XXX s.b. 'examples' from persistent.tests.cucumbers import Custom inst = Custom('x', 'y') copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_slots_missing_slot(self): import pickle # XXX s.b. 'examples' from persistent.tests.cucumbers import SubSlotted inst = SubSlotted('x', 'y', 'z') copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_slots_filled_slot(self): import pickle # XXX s.b. 'examples' from persistent.tests.cucumbers import SubSlotted inst = SubSlotted('x', 'y', 'z') inst.s4 = 'a' copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_slots_and_empty_dict(self): import pickle # XXX s.b. 'examples' from persistent.tests.cucumbers import SubSubSlotted inst = SubSubSlotted('x', 'y', 'z') copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test_pickle_roundtrip_w_slots_and_filled_dict(self): import pickle # XXX s.b. 'examples' from persistent.tests.cucumbers import SubSubSlotted inst = SubSubSlotted('x', 'y', 'z', foo='bar', baz='bam') inst.s4 = 'a' copy = pickle.loads(pickle.dumps(inst)) self.assertEqual(copy, inst) for protocol in 0, 1, 2: copy = pickle.loads(pickle.dumps(inst, protocol)) self.assertEqual(copy, inst) def test__p_activate_from_unsaved(self): inst = self._makeOne() inst._p_activate() # noop w/o jar self.assertEqual(inst._p_status, 'unsaved') def test__p_activate_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_activate() self.assertEqual(inst._p_status, 'saved') def test__p_activate_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_changed = False inst._p_activate() # noop from 'saved' state self.assertEqual(inst._p_status, 'saved') def test__p_activate_only_sets_state_once(self): inst, jar, OID = self._makeOneWithJar() # No matter how many times we call _p_activate, it # only sets state once, the first time inst._p_invalidate() # make it a ghost self.assertEqual(list(jar._loaded), []) inst._p_activate() self.assertEqual(list(jar._loaded), [OID]) inst._p_activate() self.assertEqual(list(jar._loaded), [OID]) def test__p_activate_leaves_object_in_saved_even_if_object_mutated_self(self): # If the object's __setstate__ set's attributes # when called by p_activate, the state is still # 'saved' when done. Furthemore, the object is not # registered with the jar class WithSetstate(self._getTargetClass()): state = None def __setstate__(self, state): self.state = state inst, jar, OID = self._makeOneWithJar(klass=WithSetstate) inst._p_invalidate() # make it a ghost self.assertEqual(inst._p_status, 'ghost') jar.setstate_calls_object = 42 inst._p_activate() # It get loaded self.assertEqual(list(jar._loaded), [OID]) # and __setstate__ got called to mutate the object self.assertEqual(inst.state, 42) # but it's still in the saved state self.assertEqual(inst._p_status, 'saved') # and it is not registered as changed by the jar self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_unsaved(self): inst = self._makeOne() inst._p_deactivate() self.assertEqual(inst._p_status, 'unsaved') def test__p_deactivate_from_unsaved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() inst._p_changed = True inst._p_deactivate() self.assertEqual(inst._p_status, 'unsaved') self.assertEqual(inst.__dict__, {'normal': 'after'}) def test__p_deactivate_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] inst._p_deactivate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_saved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() jar._loaded = [] inst._p_deactivate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst.__dict__, {}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_changed(self): class Derived(self._getTargetClass()): normal = 'before' inst, jar, OID = self._makeOneWithJar(Derived) inst.normal = 'after' jar._loaded = [] jar._registered = [] inst._p_deactivate() # assigning None is ignored when dirty self.assertEqual(inst._p_status, 'changed') self.assertEqual(inst.__dict__, {'normal': 'after'}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_from_changed_w_dict(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_deactivate() # assigning None is ignored when dirty self.assertEqual(inst._p_status, 'changed') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_deactivate_when_sticky(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_deactivate() self.assertEqual(inst._p_status, 'sticky') self.assertEqual(inst._p_changed, False) self.assertEqual(inst._p_sticky, True) def test__p_invalidate_from_unsaved(self): inst = self._makeOne() inst._p_invalidate() self.assertEqual(inst._p_status, 'unsaved') def test__p_invalidate_from_unsaved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() inst._p_invalidate() self.assertEqual(inst._p_status, 'unsaved') self.assertEqual(inst.__dict__, {'normal': 'after'}) def test__p_invalidate_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() jar._loaded = [] jar._registered = [] inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_saved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() jar._loaded = [] jar._registered = [] inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst.__dict__, {}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_changed(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_changed_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst.__dict__, {}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_changed_w_slots(self): class Derived(self._getTargetClass()): __slots__ = ('myattr1', 'myattr2', 'unset') def __init__(self): self.myattr1 = 'value1' self.myattr2 = 'value2' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] self.assertEqual(Derived.myattr1.__get__(inst), 'value1') self.assertEqual(Derived.myattr2.__get__(inst), 'value2') inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertRaises(AttributeError, lambda: Derived.myattr1.__get__(inst)) self.assertRaises(AttributeError, lambda: Derived.myattr2.__get__(inst)) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_changed_w_slots_compat(self): # check that (for backward-compatibility reason) slots are not released # for classes where __new__ is overwritten. Attributes in __dict__ # should be always released. class Derived(self._getTargetClass()): __slots__ = ('myattr1', 'myattr2', '__dict__') def __new__(cls): obj = cls.__base__.__new__(cls) obj.myattr1 = 'value1' obj.myattr2 = 'value2' obj.foo = 'foo1' # .foo & .bar are in __dict__ obj.bar = 'bar2' return obj inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() inst._p_changed = True jar._loaded = [] jar._registered = [] self.assertEqual(Derived.myattr1.__get__(inst), 'value1') self.assertEqual(Derived.myattr2.__get__(inst), 'value2') self.assertEqual(inst.__dict__, {'foo': 'foo1', 'bar': 'bar2'}) inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self.assertEqual(Derived.myattr1.__get__(inst), 'value1') self.assertEqual(Derived.myattr2.__get__(inst), 'value2') self.assertEqual(inst.__dict__, {}) self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) def test__p_invalidate_from_sticky(self): inst, jar, OID = self._makeOneWithJar() inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True self.assertEqual(inst._p_status, 'sticky') inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_changed, None) self.assertEqual(inst._p_sticky, False) def test__p_invalidate_from_sticky_w_dict(self): class Derived(self._getTargetClass()): def __init__(self): self.normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_invalidate() self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_changed, None) self.assertEqual(inst._p_sticky, False) self.assertEqual(inst.__dict__, {}) def test__p_getattr_w__p__names(self): NAMES = ['_p_jar', '_p_oid', '_p_changed', '_p_serial', '_p_mtime', '_p_state', '_p_estimated_size', '_p_sticky', '_p_status', ] inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() for name in NAMES: self.assertTrue(inst._p_getattr(name)) self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self._checkMRU(jar, []) def test__p_getattr_w_special_names(self): from persistent.persistence import SPECIAL_NAMES inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() for name in SPECIAL_NAMES: self.assertTrue(inst._p_getattr(name)) self.assertEqual(inst._p_status, 'ghost') self.assertEqual(list(jar._loaded), []) self._checkMRU(jar, []) def test__p_getattr_w_normal_name(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertFalse(inst._p_getattr('normal')) self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), [OID]) self._checkMRU(jar, [OID]) def test__p_setattr_w__p__name(self): from persistent.timestamp import _makeOctets SERIAL = _makeOctets('\x01' * 8) inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertTrue(inst._p_setattr('_p_serial', SERIAL)) self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_serial, SERIAL) self.assertEqual(list(jar._loaded), []) self._checkMRU(jar, []) def test__p_setattr_w_normal_name(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() self.assertFalse(inst._p_setattr('normal', 'value')) # _p_setattr doesn't do the actual write for normal names self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), [OID]) self._checkMRU(jar, [OID]) def test__p_delattr_w__p__names(self): NAMES = ['_p_changed', '_p_serial', ] inst, jar, OID = self._makeOneWithJar() inst._p_changed = True jar._loaded = [] for name in NAMES: self.assertTrue(inst._p_delattr(name)) self.assertEqual(inst._p_status, 'ghost') self.assertEqual(inst._p_changed, None) self.assertEqual(list(jar._loaded), []) self._checkMRU(jar, []) def test__p_delattr_w_normal_name(self): class Derived(self._getTargetClass()): normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) inst._p_deactivate() self.assertFalse(inst._p_delattr('normal')) # _p_delattr doesn't do the actual delete for normal names self.assertEqual(inst._p_status, 'saved') self.assertEqual(list(jar._loaded), [OID]) self._checkMRU(jar, [OID]) def test_set__p_changed_w_broken_jar(self): # When an object is modified, it registers with its data manager. # If that registration fails, the exception is propagated and the # object stays in the up-to-date state. # It shouldn't change to the modified state, because it won't # be saved when the transaction commits. from persistent._compat import _b class P(self._getTargetClass()): def __init__(self): self.x = 0 def inc(self): self.x += 1 p = P() p._p_oid = _b('1') p._p_jar = self._makeBrokenJar() self.assertEqual(p._p_state, 0) self.assertEqual(p._p_jar.called, 0) def _try(): p._p_changed = 1 self.assertRaises(NotImplementedError, _try) self.assertEqual(p._p_jar.called, 1) self.assertEqual(p._p_state, 0) def test__p_activate_w_broken_jar(self): # Make sure that exceptions that occur inside the data manager's # ``setstate()`` method propagate out to the caller. from persistent._compat import _b class P(self._getTargetClass()): def __init__(self): self.x = 0 def inc(self): self.x += 1 p = P() p._p_oid = _b('1') p._p_jar = self._makeBrokenJar() p._p_deactivate() self.assertEqual(p._p_state, -1) self.assertRaises(NotImplementedError, p._p_activate) self.assertEqual(p._p_state, -1) def test__ancient_dict_layout_bug(self): # We once had a bug in the `Persistent` class that calculated an # incorrect offset for the ``__dict__`` attribute. It assigned # ``__dict__`` and ``_p_jar`` to the same location in memory. # This is a simple test to make sure they have different locations. class P(self._getTargetClass()): def __init__(self): self.x = 0 def inc(self): self.x += 1 p = P() p.inc() p.inc() self.assertTrue('x' in p.__dict__) self.assertTrue(p._p_jar is None) def test_w_diamond_inheritance(self): class A(self._getTargetClass()): pass class B(self._getTargetClass()): pass class C(A, B): pass class D(object): pass class E(D, B): pass # no raise A(), B(), C(), D(), E() def test_w_alternate_metaclass(self): class alternateMeta(type): pass class alternate(object): __metaclass__ = alternateMeta class mixedMeta(alternateMeta, type): pass # no raise class mixed1(alternate, self._getTargetClass()): pass class mixed2(self._getTargetClass(), alternate): pass def test_setattr_in_subclass_is_not_called_creating_an_instance(self): class subclass(self._getTargetClass()): _v_setattr_called = False def __setattr__(self, name, value): object.__setattr__(self, '_v_setattr_called', True) super(subclass,self).__setattr__(name, value) inst = subclass() self.assertEqual(object.__getattribute__(inst,'_v_setattr_called'), False) def test_can_set__p_attrs_if_subclass_denies_setattr(self): from persistent._compat import _b # ZODB defines a PersistentBroken subclass that only lets us # set things that start with _p, so make sure we can do that class Broken(self._getTargetClass()): def __setattr__(self, name, value): if name.startswith('_p_'): super(Broken,self).__setattr__(name, value) else: raise TypeError("Can't change broken objects") KEY = _b('123') jar = self._makeJar() broken = Broken() broken._p_oid = KEY broken._p_jar = jar broken._p_changed = True broken._p_changed = 0 def test_p_invalidate_calls_p_deactivate(self): class P(self._getTargetClass()): deactivated = False def _p_deactivate(self): self.deactivated = True p = P() p._p_invalidate() self.assertTrue(p.deactivated) class PyPersistentTests(unittest.TestCase, _Persistent_Base): def _getTargetClass(self): from persistent.persistence import Persistent return Persistent def _makeCache(self, jar): class _Cache(object): def __init__(self, jar): self._jar = jar self._mru = [] self._data = {} def mru(self, oid): self._mru.append(oid) def new_ghost(self, oid, obj): obj._p_jar = self._jar obj._p_oid = oid self._data[oid] = obj def get(self, oid): return self._data.get(oid) def __delitem__(self, oid): del self._data[oid] def update_object_size_estimation(self, oid, new_size): pass return _Cache(jar) def _checkMRU(self, jar, value): self.assertEqual(list(jar._cache._mru), value) def _clearMRU(self, jar): jar._cache._mru[:] = [] def test_accessed_with_jar_and_oid_but_not_in_cache(self): # This scenario arises in ZODB: ZODB.serialize.ObjectWriter # can assign a jar and an oid to newly seen persistent objects, # but because they are newly created, they aren't in the # pickle cache yet. # Nothing should blow up when this happens from persistent._compat import _b KEY = _b('123') jar = self._makeJar() c1 = self._makeOne() c1._p_oid = KEY c1._p_jar = jar orig_mru = jar._cache.mru def mru(oid): # Mimic what the real cache does if oid not in jar._cache._mru: raise KeyError(oid) orig_mru(oid) jar._cache.mru = mru c1._p_accessed() self._checkMRU(jar, []) def test_accessed_invalidated_with_jar_and_oid_but_no_cache(self): # This scenario arises in ZODB tests where the jar is faked from persistent._compat import _b KEY = _b('123') class Jar(object): accessed = False def __getattr__(self, name): if name == '_cache': self.accessed = True raise AttributeError(name) def register(self, *args): pass c1 = self._makeOne() c1._p_oid = KEY c1._p_jar = Jar() c1._p_changed = True self.assertEqual(c1._p_state, 1) c1._p_accessed() self.assertTrue(c1._p_jar.accessed) c1._p_jar.accessed = False c1._p_invalidate_deactivate_helper() self.assertTrue(c1._p_jar.accessed) c1._p_jar.accessed = False c1._Persistent__flags = None # coverage c1._p_invalidate_deactivate_helper() self.assertTrue(c1._p_jar.accessed) def test_p_activate_with_jar_without_oid(self): # Works, but nothing happens inst = self._makeOne() inst._p_jar = object() inst._p_oid = None object.__setattr__(inst, '_Persistent__flags', None) inst._p_activate() def test_p_accessed_with_jar_without_oid(self): # Works, but nothing happens inst = self._makeOne() inst._p_jar = object() inst._p_accessed() def test_p_accessed_with_jar_with_oid_as_ghost(self): # Works, but nothing happens inst = self._makeOne() inst._p_jar = object() inst._p_oid = 42 inst._Persistent__flags = None inst._p_accessed() _add_to_suite = [PyPersistentTests] if not os.environ.get('PURE_PYTHON'): try: from persistent import cPersistence except ImportError: pass else: class CPersistentTests(unittest.TestCase, _Persistent_Base): def _getTargetClass(self): from persistent.cPersistence import Persistent return Persistent def _checkMRU(self, jar, value): pass # Figure this out later def _clearMRU(self, jar): pass # Figure this out later def _makeCache(self, jar): from persistent.cPickleCache import PickleCache return PickleCache(jar) _add_to_suite.append(CPersistentTests) class Test_simple_new(unittest.TestCase): def _callFUT(self, x): from persistent.cPersistence import simple_new return simple_new(x) def test_w_non_type(self): self.assertRaises(TypeError, self._callFUT, '') def test_w_type(self): import sys TO_CREATE = [type, list, tuple, object] # Python 3.3 segfaults when destroying a dict created via # PyType_GenericNew. See http://bugs.python.org/issue16676 if sys.version_info < (3, 3): TO_CREATE.append(dict) for typ in TO_CREATE: self.assertTrue(isinstance(self._callFUT(typ), typ)) _add_to_suite.append(Test_simple_new) def test_suite(): return unittest.TestSuite([unittest.makeSuite(x) for x in _add_to_suite]) persistent-4.2.2/persistent/tests/test_picklecache.py0000644000076600000240000011225312577543740023052 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2009 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import gc import os import platform import sys import unittest _py_impl = getattr(platform, 'python_implementation', lambda: None) _is_pypy = _py_impl() == 'PyPy' _is_jython = 'java' in sys.platform _marker = object() class PickleCacheTests(unittest.TestCase): def setUp(self): import persistent.picklecache self.orig_types = persistent.picklecache._CACHEABLE_TYPES persistent.picklecache._CACHEABLE_TYPES += (DummyPersistent,) self.orig_sweep_gc = persistent.picklecache._SWEEP_NEEDS_GC persistent.picklecache._SWEEP_NEEDS_GC = True # coverage def tearDown(self): import persistent.picklecache persistent.picklecache._CACHEABLE_TYPES = self.orig_types persistent.picklecache._SWEEP_NEEDS_GC = self.orig_sweep_gc def _getTargetClass(self): from persistent.picklecache import PickleCache return PickleCache def _makeOne(self, jar=None, target_size=10): if jar is None: jar = DummyConnection() return self._getTargetClass()(jar, target_size) def _makePersist(self, state=None, oid='foo', jar=_marker): from persistent.interfaces import GHOST from persistent._compat import _b if state is None: state = GHOST if jar is _marker: jar = DummyConnection() persist = DummyPersistent() persist._p_state = state persist._p_oid = _b(oid) persist._p_jar = jar return persist def test_class_conforms_to_IPickleCache(self): from zope.interface.verify import verifyClass from persistent.interfaces import IPickleCache verifyClass(IPickleCache, self._getTargetClass()) def test_instance_conforms_to_IPickleCache(self): from zope.interface.verify import verifyObject from persistent.interfaces import IPickleCache verifyObject(IPickleCache, self._makeOne()) def test_empty(self): cache = self._makeOne() self.assertEqual(len(cache), 0) self.assertEqual(_len(cache.items()), 0) self.assertEqual(_len(cache.klass_items()), 0) self.assertEqual(cache.ringlen(), 0) self.assertEqual(len(cache.lru_items()), 0) self.assertEqual(cache.cache_size, 10) self.assertEqual(cache.cache_drain_resistance, 0) self.assertEqual(cache.cache_non_ghost_count, 0) self.assertEqual(dict(cache.cache_data), {}) self.assertEqual(cache.cache_klass_count, 0) def test___getitem___nonesuch_raises_KeyError(self): cache = self._makeOne() self.assertRaises(KeyError, lambda: cache['nonesuch']) def test_get_nonesuch_no_default(self): cache = self._makeOne() self.assertEqual(cache.get('nonesuch'), None) def test_get_nonesuch_w_default(self): cache = self._makeOne() default = object self.assertTrue(cache.get('nonesuch', default) is default) def test___setitem___non_string_oid_raises_TypeError(self): cache = self._makeOne() try: cache[object()] = self._makePersist() except TypeError: pass else: self.fail("Didn't raise ValueError with non-string OID.") def test___setitem___duplicate_oid_same_obj(self): from persistent._compat import _b KEY = _b('original') cache = self._makeOne() original = self._makePersist(oid=KEY) cache[KEY] = original cache[KEY] = original def test___setitem___duplicate_oid_raises_ValueError(self): from persistent._compat import _b KEY = _b('original') cache = self._makeOne() original = self._makePersist(oid=KEY) cache[KEY] = original duplicate = self._makePersist(oid=KEY) try: cache[KEY] = duplicate except ValueError: pass else: self.fail("Didn't raise KeyError with duplicate OID.") def test___setitem___ghost(self): from persistent.interfaces import GHOST from persistent._compat import _b KEY = _b('ghost') cache = self._makeOne() ghost = self._makePersist(state=GHOST, oid=KEY) cache[KEY] = ghost self.assertEqual(len(cache), 1) items = list(cache.items()) self.assertEqual(len(items), 1) self.assertEqual(_len(cache.klass_items()), 0) self.assertEqual(items[0][0], KEY) self.assertEqual(cache.ringlen(), 0) self.assertTrue(items[0][1] is ghost) self.assertTrue(cache[KEY] is ghost) def test___setitem___mismatch_key_oid(self): from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('uptodate') cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE) try: cache[KEY] = uptodate except ValueError: pass else: self.fail("Didn't raise ValueError when the key didn't match the OID") def test___setitem___non_ghost(self): from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('uptodate') cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE, oid=KEY) cache[KEY] = uptodate self.assertEqual(len(cache), 1) items = list(cache.items()) self.assertEqual(len(items), 1) self.assertEqual(_len(cache.klass_items()), 0) self.assertEqual(items[0][0], KEY) self.assertEqual(cache.ringlen(), 1) self.assertTrue(items[0][1] is uptodate) self.assertTrue(cache[KEY] is uptodate) self.assertTrue(cache.get(KEY) is uptodate) def test___setitem___persistent_class(self): from persistent._compat import _b KEY = _b('pclass') class pclass(object): _p_oid = KEY cache = self._makeOne() cache[KEY] = pclass kitems = list(cache.klass_items()) self.assertEqual(len(cache), 1) self.assertEqual(_len(cache.items()), 0) self.assertEqual(len(kitems), 1) self.assertEqual(kitems[0][0], KEY) self.assertTrue(kitems[0][1] is pclass) self.assertTrue(cache[KEY] is pclass) self.assertTrue(cache.get(KEY) is pclass) def test___delitem___non_string_oid_raises_TypeError(self): cache = self._makeOne() try: del cache[object()] except TypeError: pass else: self.fail("Didn't raise ValueError with non-string OID.") def test___delitem___nonesuch_raises_KeyError(self): from persistent._compat import _b cache = self._makeOne() original = self._makePersist() try: del cache[_b('nonesuch')] except KeyError: pass else: self.fail("Didn't raise KeyError with nonesuch OID.") def test___delitem___w_persistent_class(self): from persistent._compat import _b KEY = _b('pclass') cache = self._makeOne() class pclass(object): _p_oid = KEY cache = self._makeOne() cache[KEY] = pclass del cache[KEY] self.assertTrue(cache.get(KEY, self) is self) self.assertFalse(KEY in cache.persistent_classes) self.assertEqual(cache.ringlen(), 0) def test___delitem___w_normal_object(self): from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('uptodate') cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE, oid=KEY) cache[KEY] = uptodate del cache[KEY] self.assertTrue(cache.get(KEY, self) is self) def test___delitem___w_ghost(self): from persistent.interfaces import GHOST from persistent._compat import _b cache = self._makeOne() KEY = _b('ghost') ghost = self._makePersist(state=GHOST, oid=KEY) cache[KEY] = ghost del cache[KEY] self.assertTrue(cache.get(KEY, self) is self) def test___delitem___w_remaining_object(self): from persistent.interfaces import UPTODATE from persistent._compat import _b cache = self._makeOne() REMAINS = _b('remains') UPTODATE = _b('uptodate') remains = self._makePersist(state=UPTODATE, oid=REMAINS) uptodate = self._makePersist(state=UPTODATE, oid=UPTODATE) cache[REMAINS] = remains cache[UPTODATE] = uptodate del cache[UPTODATE] self.assertTrue(cache.get(UPTODATE, self) is self) self.assertTrue(cache.get(REMAINS, self) is remains) def test_lruitems(self): from persistent.interfaces import UPTODATE from persistent._compat import _b cache = self._makeOne() ONE = _b('one') TWO = _b('two') THREE = _b('three') cache[ONE] = self._makePersist(oid='one', state=UPTODATE) cache[TWO] = self._makePersist(oid='two', state=UPTODATE) cache[THREE] = self._makePersist(oid='three', state=UPTODATE) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], TWO) self.assertEqual(items[2][0], THREE) def test_mru_nonesuch_raises_KeyError(self): cache = self._makeOne() from persistent._compat import _b self.assertRaises(KeyError, cache.mru, _b('nonesuch')) def test_mru_normal(self): from persistent.interfaces import UPTODATE from persistent._compat import _b ONE = _b('one') TWO = _b('two') THREE = _b('three') cache = self._makeOne() cache[ONE] = self._makePersist(oid='one', state=UPTODATE) cache[TWO] = self._makePersist(oid='two', state=UPTODATE) cache[THREE] = self._makePersist(oid='three', state=UPTODATE) cache.mru(TWO) self.assertEqual(cache.ringlen(), 3) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], THREE) self.assertEqual(items[2][0], TWO) def test_mru_ghost(self): from persistent.interfaces import UPTODATE from persistent.interfaces import GHOST from persistent._compat import _b ONE = _b('one') TWO = _b('two') THREE = _b('three') cache = self._makeOne() cache[ONE] = self._makePersist(oid='one', state=UPTODATE) two = cache[TWO] = self._makePersist(oid='two', state=GHOST) cache[THREE] = self._makePersist(oid='three', state=UPTODATE) cache.mru(TWO) self.assertEqual(cache.ringlen(), 2) items = cache.lru_items() self.assertEqual(_len(items), 2) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], THREE) def test_mru_was_ghost_now_active(self): from persistent.interfaces import UPTODATE from persistent.interfaces import GHOST from persistent._compat import _b ONE = _b('one') TWO = _b('two') THREE = _b('three') cache = self._makeOne() cache[ONE] = self._makePersist(oid='one', state=UPTODATE) two = cache[TWO] = self._makePersist(oid='two', state=GHOST) cache[THREE] = self._makePersist(oid='three', state=UPTODATE) two._p_state = UPTODATE cache.mru(TWO) self.assertEqual(cache.ringlen(), 3) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], THREE) self.assertEqual(items[2][0], TWO) def test_mru_first(self): from persistent.interfaces import UPTODATE from persistent._compat import _b ONE = _b('one') TWO = _b('two') THREE = _b('three') cache = self._makeOne() cache[ONE] = self._makePersist(oid='one', state=UPTODATE) cache[TWO] = self._makePersist(oid='two', state=UPTODATE) cache[THREE] = self._makePersist(oid='three', state=UPTODATE) cache.mru(ONE) self.assertEqual(cache.ringlen(), 3) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], TWO) self.assertEqual(items[1][0], THREE) self.assertEqual(items[2][0], ONE) def test_mru_last(self): from persistent.interfaces import UPTODATE from persistent._compat import _b ONE = _b('one') TWO = _b('two') THREE = _b('three') cache = self._makeOne() cache[ONE] = self._makePersist(oid='one', state=UPTODATE) cache[TWO] = self._makePersist(oid='two', state=UPTODATE) cache[THREE] = self._makePersist(oid='three', state=UPTODATE) cache.mru(THREE) self.assertEqual(cache.ringlen(), 3) items = cache.lru_items() self.assertEqual(_len(items), 3) self.assertEqual(items[0][0], ONE) self.assertEqual(items[1][0], TWO) self.assertEqual(items[2][0], THREE) def test_incrgc_simple(self): import gc from persistent.interfaces import UPTODATE from persistent._compat import _b cache = self._makeOne() oids = [] for i in range(100): oid = _b('oid_%04d' % i) oids.append(oid) cache[oid] = self._makePersist(oid=oid, state=UPTODATE) self.assertEqual(cache.cache_non_ghost_count, 100) cache.incrgc() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 10) items = cache.lru_items() self.assertEqual(_len(items), 10) self.assertEqual(items[0][0], _b('oid_0090')) self.assertEqual(items[1][0], _b('oid_0091')) self.assertEqual(items[2][0], _b('oid_0092')) self.assertEqual(items[3][0], _b('oid_0093')) self.assertEqual(items[4][0], _b('oid_0094')) self.assertEqual(items[5][0], _b('oid_0095')) self.assertEqual(items[6][0], _b('oid_0096')) self.assertEqual(items[7][0], _b('oid_0097')) self.assertEqual(items[8][0], _b('oid_0098')) self.assertEqual(items[9][0], _b('oid_0099')) for oid in oids[:90]: self.assertTrue(cache.get(oid) is None) for oid in oids[90:]: self.assertFalse(cache.get(oid) is None) def test_incrgc_w_smaller_drain_resistance(self): from persistent.interfaces import UPTODATE from persistent._compat import _b cache = self._makeOne() cache.drain_resistance = 2 oids = [] for i in range(100): oid = _b('oid_%04d' % i) oids.append(oid) cache[oid] = self._makePersist(oid=oid, state=UPTODATE) self.assertEqual(cache.cache_non_ghost_count, 100) cache.incrgc() self.assertEqual(cache.cache_non_ghost_count, 10) def test_incrgc_w_larger_drain_resistance(self): from persistent.interfaces import UPTODATE from persistent._compat import _b cache = self._makeOne() cache.drain_resistance = 2 cache.cache_size = 90 oids = [] for i in range(100): oid = _b('oid_%04d' % i) oids.append(oid) cache[oid] = self._makePersist(oid=oid, state=UPTODATE) self.assertEqual(cache.cache_non_ghost_count, 100) cache.incrgc() self.assertEqual(cache.cache_non_ghost_count, 49) def test_full_sweep(self): import gc from persistent.interfaces import UPTODATE from persistent._compat import _b cache = self._makeOne() oids = [] for i in range(100): oid = _b('oid_%04d' % i) oids.append(oid) cache[oid] = self._makePersist(oid=oid, state=UPTODATE) self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) for oid in oids: self.assertTrue(cache.get(oid) is None) def test_full_sweep_w_sticky(self): import gc from persistent.interfaces import UPTODATE from persistent.interfaces import STICKY from persistent._compat import _b cache = self._makeOne() oids = [] for i in range(100): oid = _b('oid_%04d' % i) oids.append(oid) state = UPTODATE if i > 0 else STICKY cache[oid] = self._makePersist(oid=oid, state=state) self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 1) self.assertTrue(cache.get(oids[0]) is not None) for oid in oids[1:]: self.assertTrue(cache.get(oid) is None) def test_full_sweep_w_changed(self): import gc from persistent.interfaces import UPTODATE from persistent.interfaces import CHANGED from persistent._compat import _b cache = self._makeOne() oids = [] for i in range(100): oid = _b('oid_%04d' % i) oids.append(oid) state = UPTODATE if i > 0 else CHANGED cache[oid] = self._makePersist(oid=oid, state=state) self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 1) self.assertTrue(cache.get(oids[0]) is not None) for oid in oids[1:]: self.assertTrue(cache.get(oid) is None) def test_minimize(self): import gc from persistent.interfaces import UPTODATE from persistent._compat import _b cache = self._makeOne() oids = [] for i in range(100): oid = _b('oid_%04d' % i) oids.append(oid) cache[oid] = self._makePersist(oid=oid, state=UPTODATE) self.assertEqual(cache.cache_non_ghost_count, 100) cache.minimize() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) for oid in oids: self.assertTrue(cache.get(oid) is None) def test_minimize_turns_into_ghosts(self): import gc from persistent.interfaces import UPTODATE from persistent.interfaces import GHOST from persistent._compat import _b cache = self._makeOne() oid = _b('oid_%04d' % 1) obj = cache[oid] = self._makePersist(oid=oid, state=UPTODATE) self.assertEqual(cache.cache_non_ghost_count, 1) cache.minimize() gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) self.assertEqual(obj._p_state, GHOST) def test_new_ghost_non_persistent_object(self): from persistent._compat import _b cache = self._makeOne() self.assertRaises(AttributeError, cache.new_ghost, _b('123'), object()) def test_new_ghost_obj_already_has_oid(self): from persistent._compat import _b from persistent.interfaces import GHOST candidate = self._makePersist(oid=_b('123'), state=GHOST) cache = self._makeOne() self.assertRaises(ValueError, cache.new_ghost, _b('123'), candidate) def test_new_ghost_obj_already_has_jar(self): from persistent._compat import _b class Dummy(object): _p_oid = None _p_jar = object() cache = self._makeOne() candidate = self._makePersist(oid=None, jar=object()) self.assertRaises(ValueError, cache.new_ghost, _b('123'), candidate) def test_new_ghost_obj_already_in_cache(self): from persistent._compat import _b KEY = _b('123') cache = self._makeOne() candidate = self._makePersist(oid=KEY) cache[KEY] = candidate # Now, normally we can't get in the cache without an oid and jar # (the C implementation doesn't allow it), so if we try to create # a ghost, we get the value error self.assertRaises(ValueError, cache.new_ghost, KEY, candidate) candidate._p_oid = None self.assertRaises(ValueError, cache.new_ghost, KEY, candidate) # if we're sneaky and remove the OID and jar, then we get the duplicate # key error candidate._p_jar = None self.assertRaises(KeyError, cache.new_ghost, KEY, candidate) def test_new_ghost_success_already_ghost(self): from persistent.interfaces import GHOST from persistent._compat import _b KEY = _b('123') cache = self._makeOne() candidate = self._makePersist(oid=None, jar=None) cache.new_ghost(KEY, candidate) self.assertTrue(cache.get(KEY) is candidate) self.assertEqual(candidate._p_oid, KEY) self.assertEqual(candidate._p_jar, cache.jar) self.assertEqual(candidate._p_state, GHOST) def test_new_ghost_success_not_already_ghost(self): from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('123') cache = self._makeOne() candidate = self._makePersist(oid=None, jar=None, state=UPTODATE) cache.new_ghost(KEY, candidate) self.assertTrue(cache.get(KEY) is candidate) self.assertEqual(candidate._p_oid, KEY) self.assertEqual(candidate._p_jar, cache.jar) self.assertEqual(candidate._p_state, GHOST) def test_new_ghost_w_pclass_non_ghost(self): from persistent._compat import _b KEY = _b('123') class Pclass(object): _p_oid = None _p_jar = None cache = self._makeOne() cache.new_ghost(KEY, Pclass) self.assertTrue(cache.get(KEY) is Pclass) self.assertTrue(cache.persistent_classes[KEY] is Pclass) self.assertEqual(Pclass._p_oid, KEY) self.assertEqual(Pclass._p_jar, cache.jar) def test_new_ghost_w_pclass_ghost(self): from persistent._compat import _b KEY = _b('123') class Pclass(object): _p_oid = None _p_jar = None cache = self._makeOne() cache.new_ghost(KEY, Pclass) self.assertTrue(cache.get(KEY) is Pclass) self.assertTrue(cache.persistent_classes[KEY] is Pclass) self.assertEqual(Pclass._p_oid, KEY) self.assertEqual(Pclass._p_jar, cache.jar) def test_reify_miss_single(self): from persistent._compat import _b KEY = _b('123') cache = self._makeOne() self.assertRaises(KeyError, cache.reify, KEY) def test_reify_miss_multiple(self): from persistent._compat import _b KEY = _b('123') KEY2 = _b('456') cache = self._makeOne() self.assertRaises(KeyError, cache.reify, [KEY, KEY2]) def test_reify_hit_single_ghost(self): from persistent.interfaces import GHOST from persistent._compat import _b KEY = _b('123') from persistent.interfaces import UPTODATE cache = self._makeOne() candidate = self._makePersist(oid=KEY, jar=cache.jar, state=GHOST) cache[KEY] = candidate self.assertEqual(cache.ringlen(), 0) cache.reify(KEY) self.assertEqual(cache.ringlen(), 1) items = cache.lru_items() self.assertEqual(items[0][0], KEY) self.assertTrue(items[0][1] is candidate) self.assertEqual(candidate._p_state, UPTODATE) def test_reify_hit_single_non_ghost(self): from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('123') cache = self._makeOne() candidate = self._makePersist(oid=KEY, jar=cache.jar, state=UPTODATE) cache[KEY] = candidate self.assertEqual(cache.ringlen(), 1) cache.reify(KEY) self.assertEqual(cache.ringlen(), 1) self.assertEqual(candidate._p_state, UPTODATE) def test_reify_hit_multiple_mixed(self): from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('123') KEY2 = _b('456') cache = self._makeOne() c1 = self._makePersist(oid=KEY, jar=cache.jar, state=GHOST) cache[KEY] = c1 c2 = self._makePersist(oid=KEY2, jar=cache.jar, state=UPTODATE) cache[KEY2] = c2 self.assertEqual(cache.ringlen(), 1) cache.reify([KEY, KEY2]) self.assertEqual(cache.ringlen(), 2) self.assertEqual(c1._p_state, UPTODATE) self.assertEqual(c2._p_state, UPTODATE) def test_invalidate_miss_single(self): from persistent._compat import _b KEY = _b('123') cache = self._makeOne() cache.invalidate(KEY) # doesn't raise def test_invalidate_miss_multiple(self): from persistent._compat import _b KEY = _b('123') KEY2 = _b('456') cache = self._makeOne() cache.invalidate([KEY, KEY2]) # doesn't raise def test_invalidate_hit_single_ghost(self): from persistent.interfaces import GHOST from persistent._compat import _b KEY = _b('123') cache = self._makeOne() candidate = self._makePersist(oid='123', jar=cache.jar, state=GHOST) cache[KEY] = candidate self.assertEqual(cache.ringlen(), 0) cache.invalidate(KEY) self.assertEqual(cache.ringlen(), 0) self.assertEqual(candidate._p_state, GHOST) def test_invalidate_hit_single_non_ghost(self): from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('123') cache = self._makeOne() candidate = self._makePersist(oid='123', jar=cache.jar, state=UPTODATE) cache[KEY] = candidate self.assertEqual(cache.ringlen(), 1) cache.invalidate(KEY) self.assertEqual(cache.ringlen(), 0) self.assertEqual(candidate._p_state, GHOST) def test_invalidate_hit_multiple_mixed(self): from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('123') KEY2 = _b('456') cache = self._makeOne() c1 = self._makePersist(oid=KEY, jar=cache.jar, state=GHOST) cache[KEY] = c1 c2 = self._makePersist(oid=KEY2, jar=cache.jar, state=UPTODATE) cache[KEY2] = c2 self.assertEqual(cache.ringlen(), 1) cache.invalidate([KEY, KEY2]) self.assertEqual(cache.ringlen(), 0) self.assertEqual(c1._p_state, GHOST) self.assertEqual(c2._p_state, GHOST) def test_invalidate_hit_multiple_non_ghost(self): from persistent.interfaces import UPTODATE from persistent.interfaces import GHOST from persistent._compat import _b KEY = _b('123') KEY2 = _b('456') cache = self._makeOne() c1 = self._makePersist(oid=KEY, jar=cache.jar, state=UPTODATE) cache[KEY] = c1 c2 = self._makePersist(oid=KEY2, jar=cache.jar, state=UPTODATE) cache[KEY2] = c2 self.assertEqual(cache.ringlen(), 2) # These should be in the opposite order of how they were # added to the ring to ensure ring traversal works cache.invalidate([KEY2, KEY]) self.assertEqual(cache.ringlen(), 0) self.assertEqual(c1._p_state, GHOST) self.assertEqual(c2._p_state, GHOST) def test_invalidate_hit_pclass(self): from persistent._compat import _b KEY = _b('123') class Pclass(object): _p_oid = KEY _p_jar = None cache = self._makeOne() cache[KEY] = Pclass self.assertTrue(cache.persistent_classes[KEY] is Pclass) cache.invalidate(KEY) self.assertFalse(KEY in cache.persistent_classes) def test_debug_info_w_persistent_class(self): import gc from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('pclass') class pclass(object): _p_oid = KEY cache = self._makeOne() pclass._p_state = UPTODATE cache[KEY] = pclass gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) oid, refc, typ, state = info[0] self.assertEqual(oid, KEY) self.assertEqual(refc, len(gc.get_referents(pclass))) self.assertEqual(typ, 'type') self.assertEqual(state, UPTODATE) def test_debug_info_w_normal_object(self): import gc from persistent.interfaces import UPTODATE from persistent._compat import _b KEY = _b('uptodate') cache = self._makeOne() uptodate = self._makePersist(state=UPTODATE, oid=KEY) cache[KEY] = uptodate gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) oid, refc, typ, state = info[0] self.assertEqual(oid, KEY) self.assertEqual(refc, len(gc.get_referents(uptodate))) self.assertEqual(typ, 'DummyPersistent') self.assertEqual(state, UPTODATE) def test_debug_info_w_ghost(self): import gc from persistent.interfaces import GHOST from persistent._compat import _b KEY = _b('ghost') cache = self._makeOne() ghost = self._makePersist(state=GHOST, oid=KEY) cache[KEY] = ghost gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) oid, refc, typ, state = info[0] self.assertEqual(oid, KEY) self.assertEqual(refc, len(gc.get_referents(ghost))) self.assertEqual(typ, 'DummyPersistent') self.assertEqual(state, GHOST) def test_init_with_cacheless_jar(self): # Sometimes ZODB tests pass objects that don't # have a _cache class Jar(object): was_set = False def __setattr__(self, name, value): if name == '_cache': object.__setattr__(self, 'was_set', True) raise AttributeError(name) jar = Jar() self._makeOne(jar) self.assertTrue(jar.was_set) def test_setting_non_persistent_item(self): cache = self._makeOne() try: cache[None] = object() except TypeError as e: self.assertEqual(str(e), "Cache values must be persistent objects.") else: self.fail("Should raise TypeError") def test_setting_without_jar(self): cache = self._makeOne() p = self._makePersist(jar=None) try: cache[p._p_oid] = p except ValueError as e: self.assertEqual(str(e), "Cached object jar missing") else: self.fail("Should raise ValueError") def test_setting_already_cached(self): cache1 = self._makeOne() p = self._makePersist(jar=cache1.jar) cache1[p._p_oid] = p cache2 = self._makeOne() try: cache2[p._p_oid] = p except ValueError as e: self.assertEqual(str(e), "Object already in another cache") else: self.fail("Should raise value error") def test_cannot_update_mru_while_already_locked(self): cache = self._makeOne() cache._is_sweeping_ring = True updated = cache.mru(None) self.assertFalse(updated) def test_update_object_size_estimation_simple(self): cache = self._makeOne() p = self._makePersist(jar=cache.jar) cache[p._p_oid] = p # The cache accesses the private attribute directly to bypass # the bit conversion. # Note that the _p_estimated_size is set *after* # the update call is made in ZODB's serialize p._Persistent__size = 0 cache.update_object_size_estimation(p._p_oid, 2) self.assertEqual(cache.total_estimated_size, 64) # A missing object does nothing cache.update_object_size_estimation(None, 2) self.assertEqual(cache.total_estimated_size, 64) def test_cache_size(self): size = 42 cache = self._makeOne(target_size=size) self.assertEqual(cache.cache_size, size) cache.cache_size = 64 self.assertEqual(cache.cache_size, 64) def test_sweep_empty(self): cache = self._makeOne() self.assertEqual(cache.incrgc(), 0) def test_sweep_of_non_deactivating_object(self): cache = self._makeOne() p = self._makePersist(jar=cache.jar) p._p_state = 0 # non-ghost, get in the ring cache[p._p_oid] = p def bad_deactivate(): "Doesn't call super, for it's own reasons, so can't be ejected" return p._p_deactivate = bad_deactivate import persistent.picklecache sweep_types = persistent.picklecache._SWEEPABLE_TYPES persistent.picklecache._SWEEPABLE_TYPES = DummyPersistent try: self.assertEqual(cache.full_sweep(), 0) finally: persistent.picklecache._SWEEPABLE_TYPES = sweep_types del p._p_deactivate self.assertEqual(cache.full_sweep(), 1) if _is_jython: def with_deterministic_gc(f): def test(self): old_flags = gc.getMonitorGlobal() gc.setMonitorGlobal(True) try: f(self, force_collect=True) finally: gc.setMonitorGlobal(old_flags) return test else: def with_deterministic_gc(f): return f @with_deterministic_gc def test_cache_garbage_collection_bytes_also_deactivates_object(self, force_collect=False): from persistent.interfaces import UPTODATE from persistent._compat import _b cache = self._makeOne() cache.cache_size = 1000 oids = [] for i in range(100): oid = _b('oid_%04d' % i) oids.append(oid) o = cache[oid] = self._makePersist(oid=oid, state=UPTODATE) o._Persistent__size = 0 # must start 0, ZODB sets it AFTER updating the size cache.update_object_size_estimation(oid, 64) o._Persistent__size = 2 # mimic what the real persistent object does to update the cache # size; if we don't get deactivated by sweeping, the cache size # won't shrink so this also validates that _p_deactivate gets # called when ejecting an object. o._p_deactivate = lambda: cache.update_object_size_estimation(oid, -1) self.assertEqual(cache.cache_non_ghost_count, 100) # A GC at this point does nothing cache.incrgc() self.assertEqual(cache.cache_non_ghost_count, 100) self.assertEqual(len(cache), 100) # Now if we set a byte target: cache.cache_size_bytes = 1 # verify the change worked as expected self.assertEqual(cache.cache_size_bytes, 1) # verify our entrance assumption is fulfilled self.assertTrue(cache.cache_size > 100) self.assertTrue(cache.total_estimated_size > 1) # A gc shrinks the bytes cache.incrgc() self.assertEqual(cache.total_estimated_size, 0) # It also shrank the measured size of the cache; # this would fail under PyPy if _SWEEP_NEEDS_GC was False if force_collect: gc.collect() self.assertEqual(len(cache), 1) def test_invalidate_persistent_class_calls_p_invalidate(self): from persistent._compat import _b KEY = _b('pclass') class pclass(object): _p_oid = KEY invalidated = False @classmethod def _p_invalidate(cls): cls.invalidated = True cache = self._makeOne() cache[KEY] = pclass cache.invalidate(KEY) self.assertTrue(pclass.invalidated) def test_ring_impl(self): from .. import ring if _is_pypy or os.getenv('USING_CFFI'): self.assertTrue(ring.Ring is ring._CFFIRing) else: self.assertTrue(ring.Ring is ring._DequeRing) class DummyPersistent(object): def _p_invalidate(self): from persistent.interfaces import GHOST self._p_state = GHOST _p_deactivate = _p_invalidate _p_invalidate_deactivate_helper = _p_invalidate def _p_activate(self): from persistent.interfaces import UPTODATE self._p_state = UPTODATE class DummyConnection: pass def _len(seq): return len(list(seq)) def test_suite(): return unittest.TestSuite(( unittest.makeSuite(PickleCacheTests), )) if __name__ == '__main__': unittest.main() persistent-4.2.2/persistent/tests/test_ring.py0000644000076600000240000000764512577543740021566 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2015 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest from .. import ring #pylint: disable=R0904,W0212,E1101 class DummyPersistent(object): _p_oid = None __next_oid = 0 @classmethod def _next_oid(cls): cls.__next_oid += 1 return cls.__next_oid def __init__(self, oid=None): if oid is None: self._p_oid = self._next_oid() def __repr__(self): return "" % self._p_oid class _Ring_Base(object): def _getTargetClass(self): """Return the type of the ring to test""" raise NotImplementedError() def _makeOne(self): return self._getTargetClass()() def test_empty_len(self): self.assertEqual(0, len(self._makeOne())) def test_empty_contains(self): r = self._makeOne() self.assertFalse(DummyPersistent() in r) def test_empty_iter(self): self.assertEqual([], list(self._makeOne())) def test_add_one_len1(self): r = self._makeOne() p = DummyPersistent() r.add(p) self.assertEqual(1, len(r)) def test_add_one_contains(self): r = self._makeOne() p = DummyPersistent() r.add(p) self.assertTrue(p in r) def test_delete_one_len0(self): r = self._makeOne() p = DummyPersistent() r.add(p) r.delete(p) self.assertEqual(0, len(r)) def test_delete_one_multiple(self): r = self._makeOne() p = DummyPersistent() r.add(p) r.delete(p) self.assertEqual(0, len(r)) self.assertFalse(p in r) r.delete(p) self.assertEqual(0, len(r)) self.assertFalse(p in r) def test_delete_from_wrong_ring(self): r1 = self._makeOne() r2 = self._makeOne() p1 = DummyPersistent() p2 = DummyPersistent() r1.add(p1) r2.add(p2) r2.delete(p1) self.assertEqual(1, len(r1)) self.assertEqual(1, len(r2)) self.assertEqual([p1], list(r1)) self.assertEqual([p2], list(r2)) def test_move_to_head(self): r = self._makeOne() p1 = DummyPersistent() p2 = DummyPersistent() p3 = DummyPersistent() r.add(p1) r.add(p2) r.add(p3) self.assertEqual([p1, p2, p3], list(r)) self.assertEqual(3, len(r)) r.move_to_head(p1) self.assertEqual([p2, p3, p1], list(r)) r.move_to_head(p3) self.assertEqual([p2, p1, p3], list(r)) r.move_to_head(p3) self.assertEqual([p2, p1, p3], list(r)) def test_delete_all(self): r = self._makeOne() p1 = DummyPersistent() p2 = DummyPersistent() p3 = DummyPersistent() r.add(p1) r.add(p2) r.add(p3) self.assertEqual([p1, p2, p3], list(r)) r.delete_all([(0, p1), (2, p3)]) self.assertEqual([p2], list(r)) self.assertEqual(1, len(r)) class DequeRingTests(unittest.TestCase, _Ring_Base): def _getTargetClass(self): return ring._DequeRing _add_to_suite = [DequeRingTests] if ring._CFFIRing: class CFFIRingTests(unittest.TestCase, _Ring_Base): def _getTargetClass(self): return ring._CFFIRing _add_to_suite.append(CFFIRingTests) def test_suite(): return unittest.TestSuite([unittest.makeSuite(x) for x in _add_to_suite]) persistent-4.2.2/persistent/tests/test_timestamp.py0000644000076600000240000003514413017331206022603 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2011 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest MAX_32_BITS = 2 ** 31 - 1 MAX_64_BITS = 2 ** 63 - 1 class Test__UTC(unittest.TestCase): def _getTargetClass(self): from persistent.timestamp import _UTC return _UTC def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_tzname(self): utc = self._makeOne() self.assertEqual(utc.tzname(), 'UTC') def test_utcoffset(self): from datetime import timedelta utc = self._makeOne() self.assertEqual(utc.utcoffset(object()), timedelta(0)) def test_dst(self): utc = self._makeOne() self.assertEqual(utc.dst(), 0) def test_fromutc(self): source = object() utc = self._makeOne() self.assertTrue(utc.fromutc(source) is source) class pyTimeStampTests(unittest.TestCase): def _getTargetClass(self): from persistent.timestamp import pyTimeStamp return pyTimeStamp def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_ctor_invalid_arglist(self): BAD_ARGS = [(), (1,), (1, 2), (1, 2, 3), (1, 2, 3, 4), (1, 2, 3, 4, 5), ('1', '2', '3', '4', '5', '6'), (1, 2, 3, 4, 5, 6, 7), ] for args in BAD_ARGS: self.assertRaises((TypeError, ValueError), self._makeOne, *args) def test_ctor_from_invalid_strings(self): BAD_ARGS = ['' '\x00', '\x00' * 2, '\x00' * 3, '\x00' * 4, '\x00' * 5, '\x00' * 7, ] for args in BAD_ARGS: self.assertRaises((TypeError, ValueError), self._makeOne, *args) def test_ctor_from_string(self): from persistent.timestamp import _makeOctets from persistent.timestamp import _makeUTC ZERO = _makeUTC(1900, 1, 1, 0, 0, 0) EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) DELTA = ZERO - EPOCH DELTA_SECS = DELTA.days * 86400 + DELTA.seconds SERIAL = _makeOctets('\x00' * 8) ts = self._makeOne(SERIAL) self.assertEqual(ts.raw(), SERIAL) self.assertEqual(ts.year(), 1900) self.assertEqual(ts.month(), 1) self.assertEqual(ts.day(), 1) self.assertEqual(ts.hour(), 0) self.assertEqual(ts.minute(), 0) self.assertEqual(ts.second(), 0.0) self.assertEqual(ts.timeTime(), DELTA_SECS) def test_ctor_from_string_non_zero(self): before = self._makeOne(2011, 2, 16, 14, 37, 22.80544) after = self._makeOne(before.raw()) self.assertEqual(before.raw(), after.raw()) self.assertEqual(before.timeTime(), 1297867042.80544) def test_ctor_from_elements(self): from persistent.timestamp import _makeOctets from persistent.timestamp import _makeUTC ZERO = _makeUTC(1900, 1, 1, 0, 0, 0) EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) DELTA = ZERO - EPOCH DELTA_SECS = DELTA.days * 86400 + DELTA.seconds SERIAL = _makeOctets('\x00' * 8) ts = self._makeOne(1900, 1, 1, 0, 0, 0.0) self.assertEqual(ts.raw(), SERIAL) self.assertEqual(ts.year(), 1900) self.assertEqual(ts.month(), 1) self.assertEqual(ts.day(), 1) self.assertEqual(ts.hour(), 0) self.assertEqual(ts.minute(), 0) self.assertEqual(ts.second(), 0.0) self.assertEqual(ts.timeTime(), DELTA_SECS) def test_laterThan_invalid(self): from persistent.timestamp import _makeOctets ERRORS = (ValueError, TypeError) SERIAL = _makeOctets('\x01' * 8) ts = self._makeOne(SERIAL) self.assertRaises(ERRORS, ts.laterThan, None) self.assertRaises(ERRORS, ts.laterThan, '') self.assertRaises(ERRORS, ts.laterThan, ()) self.assertRaises(ERRORS, ts.laterThan, []) self.assertRaises(ERRORS, ts.laterThan, {}) self.assertRaises(ERRORS, ts.laterThan, object()) def test_laterThan_self_is_earlier(self): from persistent.timestamp import _makeOctets SERIAL1 = _makeOctets('\x01' * 8) SERIAL2 = _makeOctets('\x02' * 8) ts1 = self._makeOne(SERIAL1) ts2 = self._makeOne(SERIAL2) later = ts1.laterThan(ts2) self.assertEqual(later.raw(), _makeOctets('\x02' * 7 + '\x03')) def test_laterThan_self_is_later(self): from persistent.timestamp import _makeOctets SERIAL1 = _makeOctets('\x01' * 8) SERIAL2 = _makeOctets('\x02' * 8) ts1 = self._makeOne(SERIAL1) ts2 = self._makeOne(SERIAL2) later = ts2.laterThan(ts1) self.assertTrue(later is ts2) def test_repr(self): from persistent.timestamp import _makeOctets SERIAL = _makeOctets('\x01' * 8) ts = self._makeOne(SERIAL) self.assertEqual(repr(ts), repr(SERIAL)) def test_comparisons_to_non_timestamps(self): import operator from persistent._compat import PYTHON2 # Check the corner cases when comparing non-comparable types ts = self._makeOne(2011, 2, 16, 14, 37, 22.0) def check_py2(op, passes): if passes == 'neither': self.assertFalse(op(ts, None)) self.assertFalse(op(None, ts)) elif passes == 'both': self.assertTrue(op(ts, None)) self.assertTrue(op(None, ts)) elif passes == 'first': self.assertTrue(op(ts, None)) self.assertFalse(op(None, ts)) else: self.assertFalse(op(ts, None)) self.assertTrue(op(None, ts)) def check_py3(op, passes): if passes == 'neither': self.assertFalse(op(ts, None)) self.assertFalse(op(None, ts)) elif passes == 'both': self.assertTrue(op(ts, None)) self.assertTrue(op(None, ts)) else: self.assertRaises(TypeError, op, ts, None) self.assertRaises(TypeError, op, None, ts) check = check_py2 if PYTHON2 else check_py3 for op_name, passes in (('lt', 'second'), ('gt', 'first'), ('le', 'second'), ('ge', 'first'), ('eq', 'neither'), ('ne', 'both')): op = getattr(operator, op_name) check(op, passes) class TimeStampTests(pyTimeStampTests): def _getTargetClass(self): from persistent.timestamp import TimeStamp return TimeStamp class PyAndCComparisonTests(unittest.TestCase): """ Compares C and Python implementations. """ # A particular instant in time now = 1229959248.3 # That instant in time split as the result of this expression: # (time.gmtime(now)[:5] + (now % 60,)) now_ts_args = (2008, 12, 22, 15, 20, 48.299999952316284) def _make_many_instants(self): # Given the above data, return many slight variations on # it to test matching yield self.now_ts_args for i in range(2000): yield self.now_ts_args[:-1] + (self.now_ts_args[-1] + (i % 60.0)/100.0 , ) def _makeC(self, *args, **kwargs): from persistent.timestamp import TimeStamp return TimeStamp(*args, **kwargs) def _makePy(self, *args, **kwargs): from persistent.timestamp import pyTimeStamp return pyTimeStamp(*args, **kwargs) @property def _is_jython(self): import platform py_impl = getattr(platform, 'python_implementation', lambda: None) return py_impl() == 'Jython' def _make_C_and_Py(self, *args, **kwargs): return self._makeC(*args, **kwargs), self._makePy(*args, **kwargs) def test_reprs_equal(self): for args in self._make_many_instants(): c, py = self._make_C_and_Py(*args) self.assertEqual(repr(c), repr(py)) def test_strs_equal(self): for args in self._make_many_instants(): c, py = self._make_C_and_Py(*args) self.assertEqual(str(c), str(py)) def test_raw_equal(self): c, py = self._make_C_and_Py(*self.now_ts_args) self.assertEqual(c.raw(), py.raw()) def test_equal(self): c, py = self._make_C_and_Py(*self.now_ts_args) self.assertEqual(c, py) def test_hash_equal(self): c, py = self._make_C_and_Py(*self.now_ts_args) self.assertEqual(hash(c), hash(py)) def test_py_hash_32_64_bit(self): # We happen to know that on a 32-bit platform, the hashcode # of the c version should be exactly # -1419374591 # and the 64-bit should be exactly: # -3850693964765720575 # Fake out the python version to think it's on a 32-bit # platform and test the same; also verify 64 bit from persistent import timestamp as MUT bit_32_hash = -1419374591 bit_64_hash = -3850693964765720575 orig_maxint = MUT._MAXINT try: MUT._MAXINT = MAX_32_BITS py = self._makePy(*self.now_ts_args) self.assertEqual(hash(py), bit_32_hash) MUT._MAXINT = int(2 ** 63 - 1) # call __hash__ directly to avoid interpreter truncation # in hash() on 32-bit platforms if not self._is_jython: self.assertEqual(py.__hash__(), bit_64_hash) else: # Jython 2.7's ctypes module doesn't properly # implement the 'value' attribute by truncating. # (It does for native calls, but not visibly to Python). # Therefore we get back the full python long. The actual # hash() calls are correct, though, because the JVM uses # 32-bit ints for its hashCode methods. self.assertEqual( py.__hash__(), 384009219096809580920179179233996861765753210540033) finally: MUT._MAXINT = orig_maxint # These are *usually* aliases, but aren't required # to be (and aren't under Jython 2.7). if orig_maxint == MAX_32_BITS: self.assertEqual(py.__hash__(), bit_32_hash) elif orig_maxint == MAX_64_BITS: self.assertEqual(py.__hash__(), bit_64_hash) def test_hash_equal_constants(self): # The simple constants make it easier to diagnose # a difference in algorithms import persistent.timestamp as MUT # We get 32-bit hash values of 32-bit platforms, or on the JVM is_32_bit = MUT._MAXINT == (2**31 - 1) or self._is_jython c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x00\x00') self.assertEqual(c.__hash__(), 8) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x00\x01') self.assertEqual(c.__hash__(), 9) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x00\x01\x00') self.assertEqual(c.__hash__(), 1000011) self.assertEqual(hash(c), hash(py)) # overflow kicks in here on 32-bit platforms c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x00\x01\x00\x00') if is_32_bit: self.assertEqual(c.__hash__(), -721379967) else: self.assertEqual(c.__hash__(), 1000006000001) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x00\x00\x00\x00\x01\x00\x00\x00') if is_32_bit: self.assertEqual(c.__hash__(), 583896275) else: self.assertEqual(c.__hash__(), 1000009000027000019) self.assertEqual(hash(c), hash(py)) # Overflow kicks in at this point on 64-bit platforms c, py = self._make_C_and_Py(b'\x00\x00\x00\x01\x00\x00\x00\x00') if is_32_bit: self.assertEqual(c.__hash__(), 1525764953) else: self.assertEqual(c.__hash__(), -4442925868394654887) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x00\x00\x01\x00\x00\x00\x00\x00') if is_32_bit: self.assertEqual(c.__hash__(), -429739973) else: self.assertEqual(c.__hash__(), -3993531167153147845) self.assertEqual(hash(c), hash(py)) c, py = self._make_C_and_Py(b'\x01\x00\x00\x00\x00\x00\x00\x00') if is_32_bit: self.assertEqual(c.__hash__(), 263152323) else: self.assertEqual(c.__hash__(), -3099646879006235965) self.assertEqual(hash(c), hash(py)) def test_ordering(self): small_c = self._makeC(b'\x00\x00\x00\x00\x00\x00\x00\x01') big_c = self._makeC(b'\x01\x00\x00\x00\x00\x00\x00\x00') small_py = self._makePy(b'\x00\x00\x00\x00\x00\x00\x00\x01') big_py = self._makePy(b'\x01\x00\x00\x00\x00\x00\x00\x00') self.assertTrue(small_py < big_py) self.assertTrue(small_py <= big_py) self.assertTrue(small_py < big_c) self.assertTrue(small_py <= big_c) self.assertTrue(small_py <= small_c) self.assertTrue(small_c < big_c) self.assertTrue(small_c <= big_c) self.assertTrue(small_c <= big_py) self.assertTrue(big_c > small_py) self.assertTrue(big_c >= big_py) self.assertFalse(big_c == small_py) self.assertFalse(small_py == big_c) self.assertTrue(big_c != small_py) self.assertTrue(small_py != big_c) def test_suite(): suite = [ unittest.makeSuite(Test__UTC), unittest.makeSuite(pyTimeStampTests), unittest.makeSuite(TimeStampTests), ] try: from persistent.timestamp import pyTimeStamp from persistent.timestamp import TimeStamp except ImportError: pass else: if pyTimeStamp != TimeStamp: # We have both implementations available suite.append(unittest.makeSuite(PyAndCComparisonTests)) return unittest.TestSuite(suite) persistent-4.2.2/persistent/tests/test_wref.py0000644000076600000240000002654712712637364021571 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2003 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import unittest class WeakRefTests(unittest.TestCase): def _getTargetClass(self): from persistent.wref import WeakRef return WeakRef def _makeOne(self, ob): return self._getTargetClass()(ob) def test_ctor_target_wo_jar(self): from persistent._compat import _b target = _makeTarget() wref = self._makeOne(target) self.assertTrue(wref._v_ob is target) self.assertEqual(wref.oid, _b('OID')) self.assertTrue(wref.dm is None) self.assertFalse('database_name' in wref.__dict__) def test_ctor_target_w_jar(self): from persistent._compat import _b target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) self.assertTrue(wref._v_ob is target) self.assertEqual(wref.oid, _b('OID')) self.assertTrue(wref.dm is jar) self.assertEqual(wref.database_name, 'testing') def test___call___target_in_volatile(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) self.assertTrue(wref() is target) def test___call___target_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() jar[target._p_oid] = target wref = self._makeOne(target) del wref._v_ob self.assertTrue(wref() is target) def test___call___target_not_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) del wref._v_ob self.assertTrue(wref() is None) def test___hash___w_target(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) self.assertEqual(hash(wref), hash(target)) def test___hash___wo_target(self): target = _makeTarget() target._p_jar = jar = _makeJar() wref = self._makeOne(target) del wref._v_ob self.assertRaises(TypeError, hash, wref) def test___eq___w_non_weakref(self): target = _makeTarget() lhs = self._makeOne(target) self.assertNotEqual(lhs, object()) # Test belt-and-suspenders directly self.assertFalse(lhs.__eq__(object())) def test___eq___w_both_same_target(self): target = _makeTarget() lhs = self._makeOne(target) rhs_target = _makeTarget() rhs = self._makeOne(target) self.assertEqual(lhs, rhs) def test___eq___w_both_different_targets(self): lhs_target = _makeTarget(oid='LHS') lhs = self._makeOne(lhs_target) rhs_target = _makeTarget(oid='RHS') rhs = self._makeOne(rhs_target) self.assertNotEqual(lhs, rhs) def test___eq___w_lhs_gone_target_not_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() lhs = self._makeOne(target) del lhs._v_ob rhs = self._makeOne(target) self.assertRaises(TypeError, lambda: lhs == rhs) def test___eq___w_lhs_gone_target_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() jar[target._p_oid] = target lhs = self._makeOne(target) del lhs._v_ob rhs_target = _makeTarget() rhs = self._makeOne(target) self.assertEqual(lhs, rhs) def test___eq___w_rhs_gone_target_not_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() lhs = self._makeOne(target) rhs = self._makeOne(target) del rhs._v_ob self.assertRaises(TypeError, lambda: lhs == rhs) def test___eq___w_rhs_gone_target_in_jar(self): target = _makeTarget() target._p_jar = jar = _makeJar() jar[target._p_oid] = target lhs = self._makeOne(target) rhs = self._makeOne(target) del rhs._v_ob self.assertEqual(lhs, rhs) class PersistentWeakKeyDictionaryTests(unittest.TestCase): def _getTargetClass(self): from persistent.wref import PersistentWeakKeyDictionary return PersistentWeakKeyDictionary def _makeOne(self, adict, **kw): return self._getTargetClass()(adict, **kw) def test_ctor_w_adict_none_no_kwargs(self): pwkd = self._makeOne(None) self.assertEqual(pwkd.data, {}) def test_ctor_w_adict_as_dict(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne({key: value}) self.assertTrue(pwkd[key] is value) def test_ctor_w_adict_as_items(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) self.assertTrue(pwkd[key] is value) def test___getstate___empty(self): pwkd = self._makeOne(None) self.assertEqual(pwkd.__getstate__(), {'data': []}) def test___getstate___filled(self): from persistent.wref import WeakRef jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) self.assertEqual(pwkd.__getstate__(), {'data': [(WeakRef(key), value)]}) def test___setstate___empty(self): from persistent.wref import WeakRef from persistent._compat import _b jar = _makeJar() KEY = _b('KEY') KEY2 = _b('KEY2') KEY3 = _b('KEY3') VALUE = _b('VALUE') VALUE2 = _b('VALUE2') VALUE3 = _b('VALUE3') key = jar[KEY] = _makeTarget(oid=KEY) key._p_jar = jar kref = WeakRef(key) value = jar[VALUE] = _makeTarget(oid=VALUE) value._p_jar = jar key2 = _makeTarget(oid=KEY2) key2._p_jar = jar # not findable kref2 = WeakRef(key2) del kref2._v_ob # force a miss value2 = jar[VALUE2] = _makeTarget(oid=VALUE2) value2._p_jar = jar key3 = jar[KEY3] = _makeTarget(oid=KEY3) # findable key3._p_jar = jar kref3 = WeakRef(key3) del kref3._v_ob # force a miss, but win in the lookup value3 = jar[VALUE3] = _makeTarget(oid=VALUE3) value3._p_jar = jar pwkd = self._makeOne(None) pwkd.__setstate__({'data': [(kref, value), (kref2, value2), (kref3, value3)]}) self.assertTrue(pwkd[key] is value) self.assertTrue(pwkd.get(key2) is None) self.assertTrue(pwkd[key3] is value3) def test___setitem__(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) pwkd[key] = value self.assertTrue(pwkd[key] is value) def test___getitem___miss(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) def _try(): return pwkd[key] self.assertRaises(KeyError, _try) def test___delitem__(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) del pwkd[key] self.assertTrue(pwkd.get(key) is None) def test___delitem___miss(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) def _try(): del pwkd[key] self.assertRaises(KeyError, _try) def test_get_miss_w_explicit_default(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) self.assertTrue(pwkd.get(key, value) is value) def test___contains___miss(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar pwkd = self._makeOne(None) self.assertFalse(key in pwkd) def test___contains___hit(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) self.assertTrue(key in pwkd) def test___iter___empty(self): jar = _makeJar() pwkd = self._makeOne(None) self.assertEqual(list(pwkd), []) def test___iter___filled(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne([(key, value)]) self.assertEqual(list(pwkd), [key]) def test_update_w_other_pwkd(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar source = self._makeOne([(key, value)]) target = self._makeOne(None) target.update(source) self.assertTrue(target[key] is value) def test_update_w_dict(self): jar = _makeJar() key = jar['key'] = _makeTarget(oid='KEY') key._p_jar = jar value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar source = dict([(key, value)]) target = self._makeOne(None) target.update(source) self.assertTrue(target[key] is value) def _makeTarget(oid='OID', **kw): from persistent import Persistent from persistent._compat import _b class Derived(Persistent): def __hash__(self): return hash(self._p_oid) def __eq__(self, other): return self._p_oid == other._p_oid def __repr__(self): return 'Derived: %s' % self._p_oid derived = Derived() for k, v in kw.items(): setattr(derived, k, v) derived._p_oid = _b(oid) return derived def _makeJar(): class _DB(object): database_name = 'testing' class _Jar(dict): db = lambda self: _DB() return _Jar() def test_suite(): return unittest.TestSuite(( unittest.makeSuite(WeakRefTests), unittest.makeSuite(PersistentWeakKeyDictionaryTests), )) persistent-4.2.2/persistent/tests/utils.py0000644000076600000240000000455712577543740020727 0ustar jimstaff00000000000000 class ResettingJar(object): """Testing stub for _p_jar attribute. """ def __init__(self): from persistent import PickleCache # XXX stub it! from persistent.interfaces import IPersistentDataManager from zope.interface import directlyProvides self.cache = self._cache = PickleCache(self) self.oid = 1 self.registered = {} directlyProvides(self, IPersistentDataManager) def add(self, obj): import struct obj._p_oid = struct.pack(">Q", self.oid) self.oid += 1 obj._p_jar = self self.cache[obj._p_oid] = obj def close(self): pass # the following methods must be implemented to be a jar def setklassstate(self): # I don't know what this method does, but the pickle cache # constructor calls it. pass def register(self, obj): self.registered[obj] = 1 def setstate(self, obj): # Trivial setstate() implementation that just re-initializes # the object. This isn't what setstate() is supposed to do, # but it suffices for the tests. obj.__class__.__init__(obj) class RememberingJar(object): """Testing stub for _p_jar attribute. """ def __init__(self): from persistent import PickleCache # XXX stub it! self.cache = PickleCache(self) self.oid = 1 self.registered = {} def add(self, obj): import struct obj._p_oid = struct.pack(">Q", self.oid) self.oid += 1 obj._p_jar = self self.cache[obj._p_oid] = obj # Remember object's state for later. self.obj = obj self.remembered = obj.__getstate__() def close(self): pass def fake_commit(self): self.remembered = self.obj.__getstate__() self.obj._p_changed = 0 # the following methods must be implemented to be a jar def setklassstate(self): # I don't know what this method does, but the pickle cache # constructor calls it. pass def register(self, obj): self.registered[obj] = 1 def setstate(self, obj): # Trivial setstate() implementation that resets the object's # state as of the time it was added to the jar. # This isn't what setstate() is supposed to do, # but it suffices for the tests. obj.__setstate__(self.remembered) persistent-4.2.2/persistent/timestamp.py0000644000076600000240000001334713017331206020403 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2011 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## __all__ = ('TimeStamp',) import datetime import math import struct import sys _RAWTYPE = bytes _MAXINT = sys.maxsize def _makeOctets(s): if sys.version_info < (3,): return bytes(s) return bytes(s, 'ascii') #pragma NO COVERAGE _ZERO = _makeOctets('\x00' * 8) def _wraparound(x): # Make sure to overflow and wraparound just # like the C code does. return int(((x + (_MAXINT + 1)) & ((_MAXINT << 1) + 1)) - (_MAXINT + 1)) class _UTC(datetime.tzinfo): def tzname(self): return 'UTC' def utcoffset(self, when): return datetime.timedelta(0, 0, 0) def dst(self): return 0 def fromutc(self, dt): return dt def _makeUTC(y, mo, d, h, mi, s): usec, sec = math.modf(s) sec = int(sec) usec = int(usec * 1e6) return datetime.datetime(y, mo, d, h, mi, sec, usec, tzinfo=_UTC()) _EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) _SCONV = 60.0 / (1<<16) / (1<<16) def _makeRaw(year, month, day, hour, minute, second): a = (((year - 1900) * 12 + month - 1) * 31 + day - 1) a = (a * 24 + hour) * 60 + minute b = int(second / _SCONV) # Don't round() this; the C version does simple truncation return struct.pack('>II', a, b) def _parseRaw(octets): a, b = struct.unpack('>II', octets) minute = a % 60 hour = a // 60 % 24 day = a // (60 * 24) % 31 + 1 month = a // (60 * 24 * 31) % 12 + 1 year = a // (60 * 24 * 31 * 12) + 1900 second = round(b * _SCONV, 6) #microsecond precision return (year, month, day, hour, minute, second) class pyTimeStamp(object): __slots__ = ('_raw', '_elements') def __init__(self, *args): if len(args) == 1: raw = args[0] if not isinstance(raw, _RAWTYPE): raise TypeError('Raw octets must be of type: %s' % _RAWTYPE) if len(raw) != 8: raise TypeError('Raw must be 8 octets') self._raw = raw self._elements = _parseRaw(raw) elif len(args) == 6: self._raw = _makeRaw(*args) self._elements = args else: raise TypeError('Pass either a single 8-octet arg ' 'or 5 integers and a float') def raw(self): return self._raw def __repr__(self): return repr(self._raw) def __str__(self): return "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%09.6f" % ( self.year(), self.month(), self.day(), self.hour(), self.minute(), self.second()) def year(self): return self._elements[0] def month(self): return self._elements[1] def day(self): return self._elements[2] def hour(self): return self._elements[3] def minute(self): return self._elements[4] def second(self): return self._elements[5] def timeTime(self): """ -> seconds since epoch, as a float. """ delta = _makeUTC(*self._elements) - _EPOCH return delta.days * 86400 + delta.seconds + delta.microseconds / 1e6 def laterThan(self, other): """ Return a timestamp instance which is later than 'other'. If self already qualifies, return self. Otherwise, return a new instance one moment later than 'other'. """ if not isinstance(other, self.__class__): raise ValueError() if self._raw > other._raw: return self a, b = struct.unpack('>II', other._raw) later = struct.pack('>II', a, b + 1) return self.__class__(later) def __eq__(self, other): try: return self.raw() == other.raw() except AttributeError: return NotImplemented def __ne__(self, other): try: return self.raw() != other.raw() except AttributeError: return NotImplemented def __hash__(self): # Match the C implementation a = bytearray(self._raw) x = a[0] << 7 for i in a: x = (1000003 * x) ^ i x ^= 8 x = _wraparound(x) if x == -1: #pragma: no cover # The C version has this condition, but it's not clear # why; it's also not immediately obvious what bytestring # would generate this---hence the no-cover x = -2 return x # Now the rest of the comparison operators # Sigh. Python 2.6 doesn't have functools.total_ordering # so we have to do it by hand def __lt__(self, other): try: return self.raw() < other.raw() except AttributeError: return NotImplemented def __gt__(self, other): try: return self.raw() > other.raw() except AttributeError: return NotImplemented def __le__(self, other): try: return self.raw() <= other.raw() except AttributeError: return NotImplemented def __ge__(self, other): try: return self.raw() >= other.raw() except AttributeError: return NotImplemented try: from persistent._timestamp import TimeStamp except ImportError: #pragma NO COVER TimeStamp = pyTimeStamp persistent-4.2.2/persistent/wref.py0000644000076600000240000001000112577543740017345 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2003 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """ZODB-based persistent weakrefs """ from persistent import Persistent WeakRefMarker = object() class WeakRef(object): """Persistent weak references Persistent weak references are used much like Python weak references. The major difference is that you can't specify an object to be called when the object is removed from the database. """ # We set _p_oid to a marker so that the serialization system can # provide special handling of weakrefs. _p_oid = WeakRefMarker def __init__(self, ob): self._v_ob = ob self.oid = ob._p_oid self.dm = ob._p_jar if self.dm is not None: self.database_name = self.dm.db().database_name def __call__(self): try: return self._v_ob except AttributeError: try: self._v_ob = self.dm[self.oid] except (KeyError, AttributeError): return None return self._v_ob def __hash__(self): self = self() if self is None: raise TypeError('Weakly-referenced object has gone away') return hash(self) def __eq__(self, other): if not isinstance(other, WeakRef): return False self = self() if self is None: raise TypeError('Weakly-referenced object has gone away') other = other() if other is None: raise TypeError('Weakly-referenced object has gone away') return self == other class PersistentWeakKeyDictionary(Persistent): """Persistent weak key dictionary This is akin to WeakKeyDictionaries. Note, however, that removal of items is extremely lazy. """ # TODO: It's expensive trying to load dead objects from the database. # It would be helpful if the data manager/connection cached these. def __init__(self, adict=None, **kwargs): self.data = {} if adict is not None: keys = getattr(adict, "keys", None) if keys is None: adict = dict(adict) self.update(adict) # XXX 'kwargs' is pointless, because keys must be strings, but we # are going to try (and fail) to wrap a WeakRef around them. if kwargs: #pragma NO COVER self.update(kwargs) def __getstate__(self): state = Persistent.__getstate__(self) state['data'] = list(state['data'].items()) return state def __setstate__(self, state): state['data'] = dict([ (k, v) for (k, v) in state['data'] if k() is not None ]) Persistent.__setstate__(self, state) def __setitem__(self, key, value): self.data[WeakRef(key)] = value def __getitem__(self, key): return self.data[WeakRef(key)] def __delitem__(self, key): del self.data[WeakRef(key)] def get(self, key, default=None): """D.get(k[, d]) -> D[k] if k in D, else d. """ return self.data.get(WeakRef(key), default) def __contains__(self, key): return WeakRef(key) in self.data def __iter__(self): for k in self.data: yield k() def update(self, adict): if isinstance(adict, PersistentWeakKeyDictionary): self.data.update(adict.data) else: for k, v in adict.items(): self.data[WeakRef(k)] = v # TODO: May need more methods and tests. persistent-4.2.2/persistent.egg-info/0000755000076600000240000000000013017351012017505 5ustar jimstaff00000000000000persistent-4.2.2/persistent.egg-info/dependency_links.txt0000644000076600000240000000000113017351012023553 0ustar jimstaff00000000000000 persistent-4.2.2/persistent.egg-info/entry_points.txt0000644000076600000240000000000613017351012022777 0ustar jimstaff00000000000000 persistent-4.2.2/persistent.egg-info/not-zip-safe0000644000076600000240000000000112712637774021762 0ustar jimstaff00000000000000 persistent-4.2.2/persistent.egg-info/PKG-INFO0000644000076600000240000002274113017351012020610 0ustar jimstaff00000000000000Metadata-Version: 1.1 Name: persistent Version: 4.2.2 Summary: Translucent persistent objects Home-page: http://www.zope.org/Products/ZODB Author: Zope Corporation Author-email: zodb-dev@zope.org License: ZPL 2.1 Description: ``persistent``: automatic persistence for Python objects ========================================================= .. image:: https://travis-ci.org/zopefoundation/persistent.png?branch=master :target: https://travis-ci.org/zopefoundation/persistent .. image:: https://readthedocs.org/projects/persistent/badge/?version=latest :target: http://persistent.readthedocs.org/en/latest/ :alt: Documentation Status .. image:: https://img.shields.io/pypi/v/persistent.svg :target: https://pypi.python.org/pypi/persistent :alt: PyPI .. image:: https://img.shields.io/pypi/pyversions/persistent.svg :target: https://pypi.python.org/pypi/persistent :alt: Python versions This package contains a generic persistence implementation for Python. It forms the core protocol for making objects interact "transparently" with a database such as the ZODB. Please see the Sphinx documentation (``docs/index.rst``) for further information. .. note:: Use of this standalone ``persistent`` release is not recommended or supported with ZODB < 3.11. ZODB 3.10 and earlier bundle their own version of the ``persistent`` package. ``persistent`` Changelog ======================== 4.2.2 (2016-11-29) ------------------ - Drop use of ``ctypes`` for determining maximum integer size, to increase pure-Python compatibility. - Ensure that ``__slots__`` attributes are cleared when a persistent object is ghostified. (This excluses classes that override ``__new__``. See https://github.com/zopefoundation/persistent/wiki/Notes_on_state_new_and_slots if you're curious.) 4.2.1 (2016-05-26) ------------------ - Fix the hashcode of C ``TimeStamp`` objects on 64-bit Python 3 on Windows. 4.2.0 (2016-05-05) ------------------ - Fixed the Python(/PYPY) implementation ``TimeStamp.timeTime`` method to have subsecond precision. - When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning the user's global wheel cache. - Add support for Python 3.5. - Drop support for Python 2.6 and 3.2. 4.1.1 (2015-06-02) ------------------ - Fix manifest and re-upload to fix stray files included in 4.1.0. 4.1.0 (2015-05-19) ------------------ - Make the Python implementation of ``Persistent`` and ``PickleCache`` behave more similarly to the C implementation. In particular, the Python version can now run the complete ZODB and ZEO test suites. - Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms. 4.0.9 (2015-04-08) ------------------ - Make the C and Python ``TimeStamp`` objects behave more alike. The Python version now produces the same ``repr`` and ``.raw()`` output as the C version, and has the same hashcode. In addition, the Python version is now supports ordering and equality like the C version. - Intern keys of object state in ``__setstate__`` to reduce memory usage when unpickling multiple objects with the same attributes. - Add support for PyPy3. - 100% branch coverage. 4.0.8 (2014-03-20) ------------------ - Add support for Python 3.4. - In pure-Python ``Persistent``, avoid loading state in ``_p_activate`` for non-ghost objects (which could corrupt their state). (PR #9) - In pure-Python, and don't throw ``POSKeyError`` if ``_p_activate`` is called on an object that has never been committed. (PR #9) - In pure-Python ``Persistent``, avoid calling a subclass's ``__setattr__`` at instance creation time. (PR #8) - Make it possible to delete ``_p_jar`` / ``_p_oid`` of a pure-Python ``Persistent`` object which has been removed from the jar's cache (fixes aborting a ZODB Connection that has added objects). (PR #7) 4.0.7 (2014-02-20) ------------------ - Avoid a KeyError from ``_p_accessed()`` on newly-created objects under pure-Python: these objects may be assigned to a jar, but not yet added to its cache. (PR #6) - Avoid a failure in ``Persistent.__setstate__`` when the state dict contains exactly two keys. (PR #5) - Fix a hang in ``picklecache`` invalidation if OIDs are manually passed out-of-order. (PR #4) - Add ``PURE_PYTHON`` environment variable support: if set, the C extensions will not be built, imported, or tested. 4.0.6 (2013-01-03) ------------------ - Updated Trove classifiers. 4.0.5 (2012-12-14) ------------------ - Fixed the C-extensions under Py3k (previously they compiled but were not importable). 4.0.4 (2012-12-11) ------------------ - Added support for Python 3.3. - C extenstions now build under Python 3.2, passing the same tests as the pure-Python reference implementation. 4.0.3 (2012-11-19) ------------------ - Fixed: In the C implimentation, an integer was compared with a pointer, with undefined results and a compiler warning. - Fixed: the Python implementation of the ``_p_estimated_size`` propety didn't support deletion. - Simplified implementation of the ``_p_estimated_size`` property to only accept integers. A TypeError is raised if an incorrect type is provided. 4.0.2 (2012-08-27) ------------------ - Correct initialization functions in renamed ``_timestamp`` extension. 4.0.1 (2012-08-26) ------------------ - Worked around test failure due to overflow to long on 32-bit systems. - Renamed ``TimeStamp`` extension module to avoid clash with pure-Python ``timestamp`` module on case-insensitive filesystems. N.B: the canonical way to import the ``TimeStamp`` class is now:: from persistent.timestamp import TimeStamp which will yield the class from the extension module (if available), falling back to the pure-Python reference implementation. 4.0.0 (2012-08-11) ------------------ Platform Changes ################ - Added explicit support for Python 3.2 and PyPy. - Note that the C implementations of Persistent, PickleCache, and Timestamp are not built (yet) on these platforms. - Dropped support for Python < 2.6. Testing Changes ############### - 100% unit test coverage. - Removed all ``ZODB``-dependent tests: - Rewrote some to avoid the dependency - Cloned the remainder into new ``ZODB.tests`` modules. - Refactored some doctests refactored as unittests. - Completed pure-Python reference implementations of 'Persistent', 'PickleCache', and 'TimeStamp'. - All covered platforms tested under ``tox``. - Added support for continuous integration using ``tox`` and ``jenkins``. - Added ``setup.py dev`` alias (installs ``nose`` and ``coverage``). - Dropped dependency on ``zope.testing`` / ``zope.testrunner``: tests now run with ``setup.py test``. Documentation Changes ##################### - Refactored many Doctests as Sphinx documentation (snippets are exercised via 'tox'). - Added ``setup.py docs`` alias (installs ``Sphinx`` and ``repoze.sphinx.autointerface``). Platform: any Classifier: Development Status :: 6 - Mature Classifier: License :: OSI Approved :: Zope Public License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Framework :: ZODB Classifier: Topic :: Database Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Operating System :: Microsoft :: Windows Classifier: Operating System :: Unix persistent-4.2.2/persistent.egg-info/requires.txt0000644000076600000240000000013313017351012022102 0ustar jimstaff00000000000000zope.interface [docs] Sphinx repoze.sphinx.autointerface [test] [testing] nose coverage persistent-4.2.2/persistent.egg-info/SOURCES.txt0000644000076600000240000000245513017351012021377 0ustar jimstaff00000000000000.coveragerc .travis.yml CHANGES.rst COPYRIGHT.txt LICENSE.txt MANIFEST.in README.rst bootstrap.py buildout.cfg rtd.txt setup.cfg setup.py tox.ini docs/Makefile docs/api.rst docs/conf.py docs/glossary.rst docs/index.rst docs/make.bat docs/using.rst docs/api/attributes.rst docs/api/cache.rst docs/api/interfaces.rst docs/api/pickling.rst persistent/__init__.py persistent/_compat.h persistent/_compat.py persistent/_timestamp.c persistent/cPersistence.c persistent/cPersistence.h persistent/cPickleCache.c persistent/dict.py persistent/interfaces.py persistent/list.py persistent/mapping.py persistent/persistence.py persistent/picklecache.py persistent/ring.c persistent/ring.h persistent/ring.py persistent/timestamp.py persistent/wref.py persistent.egg-info/PKG-INFO persistent.egg-info/SOURCES.txt persistent.egg-info/dependency_links.txt persistent.egg-info/entry_points.txt persistent.egg-info/not-zip-safe persistent.egg-info/requires.txt persistent.egg-info/top_level.txt persistent/tests/__init__.py persistent/tests/attrhooks.py persistent/tests/cucumbers.py persistent/tests/test_list.py persistent/tests/test_mapping.py persistent/tests/test_persistence.py persistent/tests/test_picklecache.py persistent/tests/test_ring.py persistent/tests/test_timestamp.py persistent/tests/test_wref.py persistent/tests/utils.pypersistent-4.2.2/persistent.egg-info/top_level.txt0000644000076600000240000000001313017351012022231 0ustar jimstaff00000000000000persistent persistent-4.2.2/PKG-INFO0000644000076600000240000002274113017351012014716 0ustar jimstaff00000000000000Metadata-Version: 1.1 Name: persistent Version: 4.2.2 Summary: Translucent persistent objects Home-page: http://www.zope.org/Products/ZODB Author: Zope Corporation Author-email: zodb-dev@zope.org License: ZPL 2.1 Description: ``persistent``: automatic persistence for Python objects ========================================================= .. image:: https://travis-ci.org/zopefoundation/persistent.png?branch=master :target: https://travis-ci.org/zopefoundation/persistent .. image:: https://readthedocs.org/projects/persistent/badge/?version=latest :target: http://persistent.readthedocs.org/en/latest/ :alt: Documentation Status .. image:: https://img.shields.io/pypi/v/persistent.svg :target: https://pypi.python.org/pypi/persistent :alt: PyPI .. image:: https://img.shields.io/pypi/pyversions/persistent.svg :target: https://pypi.python.org/pypi/persistent :alt: Python versions This package contains a generic persistence implementation for Python. It forms the core protocol for making objects interact "transparently" with a database such as the ZODB. Please see the Sphinx documentation (``docs/index.rst``) for further information. .. note:: Use of this standalone ``persistent`` release is not recommended or supported with ZODB < 3.11. ZODB 3.10 and earlier bundle their own version of the ``persistent`` package. ``persistent`` Changelog ======================== 4.2.2 (2016-11-29) ------------------ - Drop use of ``ctypes`` for determining maximum integer size, to increase pure-Python compatibility. - Ensure that ``__slots__`` attributes are cleared when a persistent object is ghostified. (This excluses classes that override ``__new__``. See https://github.com/zopefoundation/persistent/wiki/Notes_on_state_new_and_slots if you're curious.) 4.2.1 (2016-05-26) ------------------ - Fix the hashcode of C ``TimeStamp`` objects on 64-bit Python 3 on Windows. 4.2.0 (2016-05-05) ------------------ - Fixed the Python(/PYPY) implementation ``TimeStamp.timeTime`` method to have subsecond precision. - When testing ``PURE_PYTHON`` environments under ``tox``, avoid poisoning the user's global wheel cache. - Add support for Python 3.5. - Drop support for Python 2.6 and 3.2. 4.1.1 (2015-06-02) ------------------ - Fix manifest and re-upload to fix stray files included in 4.1.0. 4.1.0 (2015-05-19) ------------------ - Make the Python implementation of ``Persistent`` and ``PickleCache`` behave more similarly to the C implementation. In particular, the Python version can now run the complete ZODB and ZEO test suites. - Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms. 4.0.9 (2015-04-08) ------------------ - Make the C and Python ``TimeStamp`` objects behave more alike. The Python version now produces the same ``repr`` and ``.raw()`` output as the C version, and has the same hashcode. In addition, the Python version is now supports ordering and equality like the C version. - Intern keys of object state in ``__setstate__`` to reduce memory usage when unpickling multiple objects with the same attributes. - Add support for PyPy3. - 100% branch coverage. 4.0.8 (2014-03-20) ------------------ - Add support for Python 3.4. - In pure-Python ``Persistent``, avoid loading state in ``_p_activate`` for non-ghost objects (which could corrupt their state). (PR #9) - In pure-Python, and don't throw ``POSKeyError`` if ``_p_activate`` is called on an object that has never been committed. (PR #9) - In pure-Python ``Persistent``, avoid calling a subclass's ``__setattr__`` at instance creation time. (PR #8) - Make it possible to delete ``_p_jar`` / ``_p_oid`` of a pure-Python ``Persistent`` object which has been removed from the jar's cache (fixes aborting a ZODB Connection that has added objects). (PR #7) 4.0.7 (2014-02-20) ------------------ - Avoid a KeyError from ``_p_accessed()`` on newly-created objects under pure-Python: these objects may be assigned to a jar, but not yet added to its cache. (PR #6) - Avoid a failure in ``Persistent.__setstate__`` when the state dict contains exactly two keys. (PR #5) - Fix a hang in ``picklecache`` invalidation if OIDs are manually passed out-of-order. (PR #4) - Add ``PURE_PYTHON`` environment variable support: if set, the C extensions will not be built, imported, or tested. 4.0.6 (2013-01-03) ------------------ - Updated Trove classifiers. 4.0.5 (2012-12-14) ------------------ - Fixed the C-extensions under Py3k (previously they compiled but were not importable). 4.0.4 (2012-12-11) ------------------ - Added support for Python 3.3. - C extenstions now build under Python 3.2, passing the same tests as the pure-Python reference implementation. 4.0.3 (2012-11-19) ------------------ - Fixed: In the C implimentation, an integer was compared with a pointer, with undefined results and a compiler warning. - Fixed: the Python implementation of the ``_p_estimated_size`` propety didn't support deletion. - Simplified implementation of the ``_p_estimated_size`` property to only accept integers. A TypeError is raised if an incorrect type is provided. 4.0.2 (2012-08-27) ------------------ - Correct initialization functions in renamed ``_timestamp`` extension. 4.0.1 (2012-08-26) ------------------ - Worked around test failure due to overflow to long on 32-bit systems. - Renamed ``TimeStamp`` extension module to avoid clash with pure-Python ``timestamp`` module on case-insensitive filesystems. N.B: the canonical way to import the ``TimeStamp`` class is now:: from persistent.timestamp import TimeStamp which will yield the class from the extension module (if available), falling back to the pure-Python reference implementation. 4.0.0 (2012-08-11) ------------------ Platform Changes ################ - Added explicit support for Python 3.2 and PyPy. - Note that the C implementations of Persistent, PickleCache, and Timestamp are not built (yet) on these platforms. - Dropped support for Python < 2.6. Testing Changes ############### - 100% unit test coverage. - Removed all ``ZODB``-dependent tests: - Rewrote some to avoid the dependency - Cloned the remainder into new ``ZODB.tests`` modules. - Refactored some doctests refactored as unittests. - Completed pure-Python reference implementations of 'Persistent', 'PickleCache', and 'TimeStamp'. - All covered platforms tested under ``tox``. - Added support for continuous integration using ``tox`` and ``jenkins``. - Added ``setup.py dev`` alias (installs ``nose`` and ``coverage``). - Dropped dependency on ``zope.testing`` / ``zope.testrunner``: tests now run with ``setup.py test``. Documentation Changes ##################### - Refactored many Doctests as Sphinx documentation (snippets are exercised via 'tox'). - Added ``setup.py docs`` alias (installs ``Sphinx`` and ``repoze.sphinx.autointerface``). Platform: any Classifier: Development Status :: 6 - Mature Classifier: License :: OSI Approved :: Zope Public License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Framework :: ZODB Classifier: Topic :: Database Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Operating System :: Microsoft :: Windows Classifier: Operating System :: Unix persistent-4.2.2/README.rst0000644000076600000240000000223112712637364015322 0ustar jimstaff00000000000000``persistent``: automatic persistence for Python objects ========================================================= .. image:: https://travis-ci.org/zopefoundation/persistent.png?branch=master :target: https://travis-ci.org/zopefoundation/persistent .. image:: https://readthedocs.org/projects/persistent/badge/?version=latest :target: http://persistent.readthedocs.org/en/latest/ :alt: Documentation Status .. image:: https://img.shields.io/pypi/v/persistent.svg :target: https://pypi.python.org/pypi/persistent :alt: PyPI .. image:: https://img.shields.io/pypi/pyversions/persistent.svg :target: https://pypi.python.org/pypi/persistent :alt: Python versions This package contains a generic persistence implementation for Python. It forms the core protocol for making objects interact "transparently" with a database such as the ZODB. Please see the Sphinx documentation (``docs/index.rst``) for further information. .. note:: Use of this standalone ``persistent`` release is not recommended or supported with ZODB < 3.11. ZODB 3.10 and earlier bundle their own version of the ``persistent`` package. persistent-4.2.2/rtd.txt0000644000076600000240000000003412712637364015164 0ustar jimstaff00000000000000repoze.sphinx.autointerface persistent-4.2.2/setup.cfg0000644000076600000240000000047213017351012015437 0ustar jimstaff00000000000000[nosetests] nocapture = 1 cover-package = persistent cover-erase = 1 cover-branches = 1 cover-min-percentage = 100 with-doctest = 0 where = persistent [aliases] dev = develop easy_install persistent[testing] docs = develop easy_install persistent[docs] [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 persistent-4.2.2/setup.py0000644000076600000240000001031513017350643015336 0ustar jimstaff00000000000000############################################################################## # # Copyright (c) 2008 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## __version__ = '4.2.2' import os import platform import sys from setuptools import Extension from setuptools import find_packages from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) def _read_file(filename): with open(os.path.join(here, filename)) as f: return f.read() README = (_read_file('README.rst') + '\n\n' + _read_file('CHANGES.rst')) py_impl = getattr(platform, 'python_implementation', lambda: None) is_pypy = py_impl() == 'PyPy' is_jython = 'java' in sys.platform is_pure = os.environ.get('PURE_PYTHON') # Jython cannot build the C optimizations, while on PyPy they are # anti-optimizations (the C extension compatibility layer is known-slow, # and defeats JIT opportunities). if is_pypy or is_jython or is_pure: ext_modules = headers = [] else: ext_modules = [Extension(name = 'persistent.cPersistence', sources= ['persistent/cPersistence.c', 'persistent/ring.c', ], depends = ['persistent/cPersistence.h', 'persistent/ring.h', 'persistent/ring.c', ] ), Extension(name = 'persistent.cPickleCache', sources= ['persistent/cPickleCache.c', 'persistent/ring.c' ], depends = ['persistent/cPersistence.h', 'persistent/ring.h', 'persistent/ring.c', ] ), Extension(name = 'persistent._timestamp', sources= ['persistent/_timestamp.c', ], ), ] headers = ['persistent/cPersistence.h', 'persistent/ring.h'] setup(name='persistent', version=__version__, description='Translucent persistent objects', long_description=README, classifiers=[ "Development Status :: 6 - Mature", "License :: OSI Approved :: Zope Public License", "Programming Language :: Python", 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Framework :: ZODB", "Topic :: Database", "Topic :: Software Development :: Libraries :: Python Modules", "Operating System :: Microsoft :: Windows", "Operating System :: Unix", ], author="Zope Corporation", author_email="zodb-dev@zope.org", url="http://www.zope.org/Products/ZODB", license="ZPL 2.1", platforms=["any"], packages=find_packages(), include_package_data=True, zip_safe=False, ext_modules = ext_modules, headers = headers, extras_require = { 'test': (), 'testing': ['nose', 'coverage'], 'docs': ['Sphinx', 'repoze.sphinx.autointerface'], }, test_suite="persistent.tests", install_requires=[ 'zope.interface', ], entry_points = """\ """ ) persistent-4.2.2/tox.ini0000644000076600000240000000237612712637364015160 0ustar jimstaff00000000000000[tox] envlist = # Jython 2.7rc2 does work, but unfortunately has an issue running # with Tox 1.9.2 (http://bugs.jython.org/issue2325) # py27,py27-pure,pypy,py33,py34,pypy3,jython,coverage,docs py27,py27-pure,py27-pure-cffi,pypy,py33,py34,py35,pypy3,coverage,docs [testenv] deps = zope.interface commands = python setup.py -q test -q [testenv:jython] commands = jython setup.py -q test -q [testenv:py27-pure] basepython = python2.7 setenv = PURE_PYTHON = 1 PIP_CACHE_DIR = {envdir}/.cache deps = {[testenv]deps} commands = python setup.py -q test -q [testenv:py27-pure-cffi] basepython = python2.7 setenv = PURE_PYTHON = 1 PIP_CACHE_DIR = {envdir}/.cache USING_CFFI = 1 deps = {[testenv]deps} cffi commands = python setup.py -q test -q [testenv:coverage] basepython = python2.7 setenv = USING_CFFI = 1 commands = nosetests --with-xunit --with-xcoverage deps = zope.interface nose coverage nosexcover cffi [testenv:docs] basepython = python2.7 commands = sphinx-build -b html -d docs/_build/doctrees docs docs/_build/html sphinx-build -b doctest -d docs/_build/doctrees docs docs/_build/doctest deps = zope.interface Sphinx repoze.sphinx.autointerface