AddOns-0.7/0000755000000000000000000000000012655467315011244 5ustar rootrootAddOns-0.7/peak/0000755000000000000000000000000012655467315012164 5ustar rootrootAddOns-0.7/peak/util/0000755000000000000000000000000012655467315013141 5ustar rootrootAddOns-0.7/peak/util/__init__.py0000666000000000000000000000007110710315252015232 0ustar rootroot__import__('pkg_resources').declare_namespace(__name__) AddOns-0.7/peak/util/addons.py0000666000000000000000000001237010712400544014751 0ustar rootrootfrom peak.util.decorators import decorate, decorate_class, enclosing_frame, classy from weakref import ref import sys __all__ = ['AddOn', 'ClassAddOn', 'Registry', 'addons_for'] _addons = {} def addons_for(ob): """Get the dictionary that should contain add-ons for `ob`""" try: d = ob.__dict__ sd = d.setdefault return d except (AttributeError, TypeError): r = ref(ob) try: return _addons[r] except KeyError: return _addons.setdefault(ref(ob, _addons.__delitem__), {}) def additional_tests(): import doctest return doctest.DocFileSuite( 'README.txt', package='__main__', optionflags=doctest.ELLIPSIS|doctest.NORMALIZE_WHITESPACE, ) class AddOn(classy): """Attach extra state to (almost) any object""" __slots__ = () decorate(classmethod) def __class_call__(cls, ob, *data): a = addons_for(ob) addon_key = cls.addon_key(*data) try: return a[addon_key] except KeyError: # Use setdefault() to prevent race conditions ob = a.setdefault(addon_key, super(AddOn, cls).__class_call__(ob, *data)) return ob decorate(classmethod) def addon_key(cls, *args): if args: return (cls,)+args return cls decorate(classmethod) def exists_for(cls, ob, *key): """Does an aspect of this type for the given key exist?""" return cls.addon_key(*key) in addons_for(ob) decorate(classmethod) def delete_from(cls, ob, *key): """Ensure an aspect of this type for the given key does not exist""" a = addons_for(ob) try: del a[cls.addon_key(*key)] except KeyError: pass def __init__(self, subject): pass class ClassAddOn(AddOn): """Attachment/annotation for classes and types""" __slots__ = () decorate(classmethod) def __class_call__(cls, ob, *data): addon_key = cls.addon_key(*data) d = ob.__dict__ if addon_key in d: return d[addon_key] d2 = addons_for(ob) try: return d2[addon_key] except KeyError: # Use setdefault() to prevent race conditions ob = d2.setdefault( addon_key, super(ClassAddOn, cls).__class_call__(ob, *data) ) return ob decorate(classmethod) def for_enclosing_class(cls, *args, **kw): if 'frame' in kw: frame = kw.pop('frame') else: if 'level' in kw: level = kw.pop('level') else: level = 2 frame = sys._getframe(level) if kw: raise TypeError("Unexpected keyword arguments", kw) return cls.for_frame(frame, *args) decorate(classmethod) def for_frame(cls, frame, *args): a = enclosing_frame(frame).f_locals addon_key = cls.addon_key(*args) try: return a[addon_key] except KeyError: # Use setdefault() to prevent race conditions ob = a.setdefault(addon_key, type.__call__(cls, None, *args)) # we use a lambda here so that if we are a registry, Python 2.5 # won't consider our method equal to some other registry's method decorate_class(lambda c: ob.__decorate(c), frame=frame) return ob decorate(classmethod) def exists_for(cls, ob, *key): """Does an aspect of this type for the given key exist?""" addon_key = cls.addon_key(*key) return addon_key in ob.__dict__ or addon_key in addons_for(ob) decorate(classmethod) def delete_from(cls, ob, *key): """Class AddOns are not deletable!""" raise TypeError("ClassAddOns cannot be deleted") def __decorate(self, cls): self.created_for(cls) return cls def created_for(self, cls): """Override to access the decorated class, as soon as it's known""" def __init__(self, subject): """Ensure ``created_for()`` is called, if class already exists""" if subject is not None: self.created_for(subject) class Registry(ClassAddOn, dict): """ClassAddOn that's a dictionary with mro-based inheritance""" __slots__ = () def __new__(cls, subject): if cls is Registry: raise TypeError("You must subclass Registry to use it") return super(Registry, cls).__new__(cls) def __init__(self, subject): dict.__init__(self) super(Registry, self).__init__(subject) def created_for(self, cls): """Inherit the contents of base classes""" try: mro = cls.__mro__[::-1] except AttributeError: mro = type(cls.__name__, (cls,object), {}).__mro__[1:][::-1] data = {} self.defined_in_class = dict(self) mytype = type(self) for base in mro[:-1]: data.update(mytype(base)) data.update(self) self.update(data) def set(self, key, value): if key in self and self[key]!=value: raise ValueError("%s[%r] already contains %r; can't set to %r" % (self.__class__.__name__, key, self[key], value) ) self[key] = value AddOns-0.7/peak/__init__.py0000666000000000000000000000007110710315252014255 0ustar rootroot__import__('pkg_resources').declare_namespace(__name__) AddOns-0.7/ez_setup/0000755000000000000000000000000012655467315013102 5ustar rootrootAddOns-0.7/ez_setup/__init__.py0000666000000000000000000002276411237767572015235 0ustar rootroot#!python """Bootstrap setuptools installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from ez_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import sys DEFAULT_VERSION = "0.6c9" DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3] md5_data = { 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb', 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b', 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a', 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618', 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac', 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5', 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4', 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c', 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b', 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27', 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277', 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa', 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e', 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e', 'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f', 'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2', 'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc', 'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167', 'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64', 'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d', 'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20', 'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab', 'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53', 'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2', 'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e', 'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372', 'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902', 'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de', 'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b', 'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03', 'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a', 'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6', 'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a', } import sys, os try: from hashlib import md5 except ImportError: from md5 import md5 def _validate_md5(egg_name, data): if egg_name in md5_data: digest = md5(data).hexdigest() if digest != md5_data[egg_name]: print >>sys.stderr, ( "md5 validation of %s failed! (Possible download problem?)" % egg_name ) sys.exit(2) return data def use_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15 ): """Automatically find/download setuptools and make it available on sys.path `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where setuptools will be downloaded, if it is not already available. If `download_delay` is specified, it should be the number of seconds that will be paused before initiating a download, should one be required. If an older version of setuptools is installed, this routine will print a message to ``sys.stderr`` and raise SystemExit in an attempt to abort the calling script. """ was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules def do_download(): egg = download_setuptools(version, download_base, to_dir, download_delay) sys.path.insert(0, egg) import setuptools; setuptools.bootstrap_install_from = egg try: import pkg_resources except ImportError: return do_download() try: pkg_resources.require("setuptools>="+version); return except pkg_resources.VersionConflict, e: if was_imported: print >>sys.stderr, ( "The required version of setuptools (>=%s) is not available, and\n" "can't be installed while this script is running. Please install\n" " a more recent version first, using 'easy_install -U setuptools'." "\n\n(Currently using %r)" ) % (version, e.args[0]) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return do_download() except pkg_resources.DistributionNotFound: return do_download() def download_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay = 15 ): """Download setuptools from a specified location and return its filename `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. """ import urllib2, shutil egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3]) url = download_base + egg_name saveto = os.path.join(to_dir, egg_name) src = dst = None if not os.path.exists(saveto): # Avoid repeated downloads try: from distutils import log if delay: log.warn(""" --------------------------------------------------------------------------- This script requires setuptools version %s to run (even to display help). I will attempt to download it for you (from %s), but you may need to enable firewall access for this script first. I will start the download in %d seconds. (Note: if this machine does not have network access, please obtain the file %s and place it in this directory before rerunning this script.) ---------------------------------------------------------------------------""", version, download_base, delay, url ); from time import sleep; sleep(delay) log.warn("Downloading %s", url) src = urllib2.urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = _validate_md5(egg_name, src.read()) dst = open(saveto,"wb"); dst.write(data) finally: if src: src.close() if dst: dst.close() return os.path.realpath(saveto) def main(argv, version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" try: import setuptools except ImportError: egg = None try: egg = download_setuptools(version, delay=0) sys.path.insert(0,egg) from setuptools.command.easy_install import main return main(list(argv)+[egg]) # we're done here finally: if egg and os.path.exists(egg): os.unlink(egg) else: if setuptools.__version__ == '0.0.1': print >>sys.stderr, ( "You have an obsolete version of setuptools installed. Please\n" "remove it from your system entirely before rerunning this script." ) sys.exit(2) req = "setuptools>="+version import pkg_resources try: pkg_resources.require(req) except pkg_resources.VersionConflict: try: from setuptools.command.easy_install import main except ImportError: from easy_install import main main(list(argv)+[download_setuptools(delay=0)]) sys.exit(0) # try to force an exit else: if argv: from setuptools.command.easy_install import main main(argv) else: print "Setuptools version",version,"or greater has been installed." print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' def update_md5(filenames): """Update our built-in md5 registry""" import re for name in filenames: base = os.path.basename(name) f = open(name,'rb') md5_data[base] = md5(f.read()).hexdigest() f.close() data = [" %r: %r,\n" % it for it in md5_data.items()] data.sort() repl = "".join(data) import inspect srcfile = inspect.getsourcefile(sys.modules[__name__]) f = open(srcfile, 'rb'); src = f.read(); f.close() match = re.search("\nmd5_data = {\n([^}]+)}", src) if not match: print >>sys.stderr, "Internal error!" sys.exit(2) src = src[:match.start(1)] + repl + src[match.end(1):] f = open(srcfile,'w') f.write(src) f.close() if __name__=='__main__': if len(sys.argv)>2 and sys.argv[1]=='--md5update': update_md5(sys.argv[2:]) else: main(sys.argv[1:]) AddOns-0.7/ez_setup/README.txt0000666000000000000000000000114610710315252014564 0ustar rootrootThis directory exists so that Subversion-based projects can share a single copy of the ``ez_setup`` bootstrap module for ``setuptools``, and have it automatically updated in their projects when ``setuptools`` is updated. For your convenience, you may use the following svn:externals definition:: ez_setup svn://svn.eby-sarna.com/svnroot/ez_setup You can set this by executing this command in your project directory:: svn propedit svn:externals . And then adding the line shown above to the file that comes up for editing. Then, whenever you update your project, ``ez_setup`` will be updated as well. AddOns-0.7/AddOns.egg-info/0000755000000000000000000000000012655467315014106 5ustar rootrootAddOns-0.7/AddOns.egg-info/top_level.txt0000666000000000000000000000000511237771626016635 0ustar rootrootpeak AddOns-0.7/AddOns.egg-info/SOURCES.txt0000666000000000000000000000047511237771626016002 0ustar rootrootREADME.txt setup.cfg setup.py AddOns.egg-info/PKG-INFO AddOns.egg-info/SOURCES.txt AddOns.egg-info/dependency_links.txt AddOns.egg-info/namespace_packages.txt AddOns.egg-info/requires.txt AddOns.egg-info/top_level.txt ez_setup/README.txt ez_setup/__init__.py peak/__init__.py peak/util/__init__.py peak/util/addons.pyAddOns-0.7/AddOns.egg-info/requires.txt0000666000000000000000000000002311237771626016503 0ustar rootrootDecoratorTools>=1.6AddOns-0.7/AddOns.egg-info/PKG-INFO0000666000000000000000000007543111237771626015217 0ustar rootrootMetadata-Version: 1.0 Name: AddOns Version: 0.7 Summary: Dynamically extend other objects with AddOns (formerly ObjectRoles) Home-page: http://pypi.python.org/pypi/AddOns Author: Phillip J. Eby Author-email: peak@eby-sarna.com License: PSF or ZPL Description: ======================================== Separating Concerns Using Object Add-Ons ======================================== (NEW in version 0.6: the``Registry`` base class, and the ``ClassAddOn.for_frame()`` classmethod.) In any sufficiently-sized application or framework, it's common to end up lumping a lot of different concerns into the same class. For example, you may have business logic, persistence code, and UI all jammed into a single class. Attribute and method names for all sorts of different operations get shoved into a single namespace -- even when using mixin classes. Separating concerns into different objects, however, makes it easier to write reusable and separately-testable components. The AddOns package (``peak.util.addons``) lets you manage concerns using ``AddOn`` classes. ``AddOn`` classes are like dynamic mixins, but with their own private attribute and method namespaces. A concern implemented using add-ons can be added at runtime to any object that either has a writable ``__dict__`` attribute, or is weak-referenceable. ``AddOn`` classes are also like adapters, but rather than creating a new instance each time you ask for one, an existing instance is returned if possible. In this way, add-ons can keep track of ongoing state. For example, a ``Persistence`` add-on might keep track of whether its subject has been saved to disk yet:: >>> from peak.util.addons import AddOn >>> class Persistence(AddOn): ... saved = True ... def changed(self): ... self.saved = False ... def save_if_needed(self): ... if not self.saved: ... print "saving" ... self.saved = True >>> class Thing: pass >>> aThing = Thing() >>> Persistence(aThing).saved True >>> Persistence(aThing).changed() >>> Persistence(aThing).saved False >>> Persistence(aThing).save_if_needed() saving >>> Persistence(aThing).save_if_needed() # no action taken This makes it easy for us to, for example, write a loop that saves a bunch of objects, because we don't need to concern ourselves with initializing the state of the persistence add-on. A class doesn't need to inherit from a special base in order to be able to have this state tracked, and it doesn't need to know *how* to initialize it, either. Of course, in the case of persistence, a class does need to know *when* to call the persistence methods, to indicate changedness and to request saving. However, a library providing such an add-on can also provide decorators and other tools to make this easier, while still remaining largely independent of the objects involved. Indeed, the AddOns library was actually created to make it easier to implement functionality using function or method decorators. For example, one can create a ``@synchronized`` decorator that safely locks an object -- see the example below under `Threading Concerns`_. In summary, the AddOns library provides you with a basic form of AOP, that lets you attach (or "introduce", in AspectJ terminology) additional attributes and methods to an object, using a private namespace. (If you also want to do AspectJ-style "advice", the PEAK-Rules package can be used to do "before", "after", and "around" advice in combination with add-ons.) .. contents:: **Table of Contents** Basic API --------- If you need to, you can query for the existence of an add-on:: >>> Persistence.exists_for(aThing) True And by default, it won't exist:: >>> anotherThing = Thing() >>> Persistence.exists_for(anotherThing) False Until you refer to it directly, e.g.:: >>> Persistence(aThing) is Persistence(anotherThing) False At which point it will of course exist:: >>> Persistence.exists_for(anotherThing) True And maintain its state, linked to its subject:: >>> Persistence(anotherThing) is Persistence(anotherThing) True Until/unless you delete it (or its subject is garbage collected):: >>> Persistence.delete_from(anotherThing) >>> Persistence.exists_for(anotherThing) False AddOn Keys and Instances ------------------------ Add-ons are stored either in their subject's ``__dict__``, or if it does not have one (or is a type object with a read-only ``__dict__``), they are stored in a special dictionary linked to the subject via a weak reference. By default, the dictionary key is the add-on class, so there is exactly one add-on instance per subject:: >>> aThing.__dict__ {: } But in some cases, you may wish to have more than one instance of a given add-on class for a subject. (For example, PEAK-Rules uses add-ons to represent indexes on different expressions contained within rules.) For this purpose, you can redefine your AddOn's ``__init__`` method to accept additional arguments besides its subject. The additional arguments become part of the key that instances are stored under, such that more than one add-on instance can exist for a given object:: >>> class Index(AddOn, dict): ... def __init__(self, subject, expression): ... self.expression = expression >>> something = Thing() >>> Index(something, "x>y")["a"] = "b" >>> dir(something) ['__doc__', '__module__', (, 'x>y')] >>> "a" in Index(something, "z<22") False >>> Index(something, "x>y") {'a': 'b'} >>> Index(something, "x>y").expression 'x>y' >>> dir(something) ['__doc__', '__module__', (, 'x>y'), (, 'z<22')] >>> Index.exists_for(something, 'x>y') True >>> Index.exists_for(anotherThing, 'q==42') False By default, an add-on class' key is either the class by itself, or a tuple containing the class, followed by any arguments that appeared in the constructor call after the add-on's subject. However, you can redefine the ``addon_key()`` classmethod in your subclass, and change it to do something different. For example, you could make different add-on classes generate overlapping keys, or you could use attributes of the arguments to generate the key. You could even generate a string key, to cause the add-on to be attached as an attribute!:: >>> class Leech(AddOn): ... def addon_key(cls): ... return "__leech__" ... addon_key = classmethod(addon_key) >>> something = Thing() >>> Leech(something) is something.__leech__ True The ``addon_key`` method only receives the arguments that appear *after* the subject in the constructor call. So, in the case above, it receives no arguments. Had we called it with additional arguments, we'd have gotten an error:: >>> Leech(something, 42) Traceback (most recent call last): ... TypeError: addon_key() takes exactly 1 argument (2 given) Naturally, your ``addon_key()`` and ``__init__()`` (and/or ``__new__()``) methods should also agree on how many arguments there can be, and what they mean! In general, you should include your add-on class (or some add-on class) as part of your key, so as to make collisions with other people's add-on classes impossible. Keys should also be designed for thread-safety, where applicable. (See the section below on `Threading Concerns`_ for more details.) Role Storage and Garbage Collection ----------------------------------- By the way, the approach above of using an string as an add-on key won't always make the add-on into an attribute of the subject! If an object doesn't have a ``__dict__``, or that ``__dict__`` isn't writable (as in the case of type objects), then the add-on is stored in a weakly-keyed dictionary, maintained elsewhere:: >>> class NoDict(object): ... __slots__ = '__weakref__' >>> dictless = NoDict() >>> Leech(dictless) >>> dictless.__leech__ Traceback (most recent call last): ... AttributeError: 'NoDict' object has no attribute '__leech__' Of course, if an object doesn't have a dictionary *and* isn't weak-referenceable, there's simply no way to store an add-on for it:: >>> ob = object() >>> Leech(ob) Traceback (most recent call last): ... TypeError: cannot create weak reference to 'object' object However, there is an ``addons_for()`` function in the ``peak.util.addons`` module that you can extend using PEAK-Rules advice. Once you add a method to support a type that otherwise can't be used with add-ons, you should be able to use any and all kinds of add-on objects with that type. (Assuming, of course, that you can implement a suitable storage mechanism!) Finally, a few words regarding garbage collection. If you don't want to create a reference cycle, don't store a reference to your subject in your add-on. Even though the ``__init__`` and ``__new__`` messages get the subject passed in, you are not under any obligation to *store* the subject, and often won't need to. Usually, the code that is accessing the add-on knows what subject is in use, and can pass the subject to the add-on's methods if needed. It's rare that the add-on really needs to keep a reference to the subject past the ``__new__()`` and ``__init__()`` calls. Add-on instances will usually be garbage collected at the same time as their subject, unless there is some other reference to them. If they keep a reference to their subject, their garbage collection may be delayed until Python's cycle collector is run. But if they don't keep a reference, they will usually be deleted as soon as the subject is:: >>> def deleting(r): ... print "deleting", r >>> from weakref import ref >>> r = ref(Leech(something), deleting) >>> del something deleting (Add-ons that are stored outside the instance dictionary of their subject, however, may take slightly longer, as Python processes weak reference callbacks.) It is also *not* recommended that you have ``__del__`` methods on your add-on objects, especially if you keep a reference to your subject. In such a case, garbage collection may become impossible, and both the add-on and its subject would "leak" (i.e., take up memory forever without being recoverable). Class Add-Ons ------------- Sometimes, it's useful to attach add-ons to classes instead of instances. You could use normal ``AddOn`` classes, of course, as they work just fine with both classic classes and new-style types -- even built-ins:: >>> Persistence.exists_for(int) False >>> Persistence(int) is Persistence(int) True >>> Persistence.exists_for(int) True >>> class X: pass >>> Persistence.exists_for(X) False >>> Persistence(X) is Persistence(X) True >>> Persistence.exists_for(X) True But, sometimes you have add-ons that are specifically intended for adding metadata to classes -- perhaps by way of class or method decorators. In such a case, you need a way to access the add-on *before* its subject even exists! The ``ClassAddOn`` base class provides a mechanism for this. It adds an extra classmethod, ``for_enclosing_class()``, that you can use to access the add-on for the class that is currently being defined in the scope that invoked the caller. For example, suppose we want to have a method decorator that adds the method to some class-level registry:: >>> from peak.util.addons import ClassAddOn >>> class SpecialMethodRegistry(ClassAddOn): ... def __init__(self, subject): ... self.special_methods = {} ... super(SpecialMethodRegistry, self).__init__(subject) >>> def specialmethod(func): ... smr = SpecialMethodRegistry.for_enclosing_class() ... smr.special_methods[func.__name__] = func ... return func >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) >>> SpecialMethodRegistry(Demo).special_methods {'dummy': } >>> class Demo2(object): ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) >>> SpecialMethodRegistry(Demo2).special_methods {'dummy': } You can of course use the usual add-on API for class add-ons:: >>> SpecialMethodRegistry.exists_for(int) False >>> SpecialMethodRegistry(int).special_methods['x'] = 123 >>> SpecialMethodRegistry.exists_for(int) True Except that you cannot explicitly delete them, they must be garbage collected naturally:: >>> SpecialMethodRegistry.delete_from(Demo) Traceback (most recent call last): ... TypeError: ClassAddOns cannot be deleted Delayed Initialization ~~~~~~~~~~~~~~~~~~~~~~ When a class add-on is initialized, the class may not exist yet. In this case, ``None`` is passed as the first argument to the ``__new__`` and ``__init__`` methods. You must be able to handle this case correctly, if your add-on will be accessed inside a class definition with ``for_enclosing_class()``. You can, however, define a ``created_for()`` instance method that will be called as soon as the actual class is available. It is also called by the default ``__init__`` method, if the add-on is initially created for a class that already exists. Either way, the ``created_for()`` method should be called at most once for any given add-on instance. For example:: >>> class SpecialMethodRegistry(ClassAddOn): ... def __init__(self, subject): ... print "init called for", subject ... self.special_methods = {} ... super(SpecialMethodRegistry, self).__init__(subject) ... ... def created_for(self, cls): ... print "created for", cls.__name__ >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) init called for None created for Demo Above, ``__init__`` was called with ``None`` since the type didn't exist yet. However, accessing the add-on for an existing type (that doesn't have the add- on yet) will call ``__init__`` with the type, and the default implementation of ``ClassAddOn.__init__`` will also call ``created_for()`` for us, when it sees the subject is not ``None``:: >>> SpecialMethodRegistry(float) init called for created for float >>> SpecialMethodRegistry(float) # created_for doesn't get called again One of the most useful features of having this ``created_for()`` method is that it allows you to set up class-level metadata that involves inherited settings from base classes. In ``created_for()``, you have access to the class' ``__bases__`` and or ``__mro__``, and you can just ask for an instance of the same add-on for those base classes, then incorporate their data into your own instance as appropriate. You are guaranteed that any such add-ons you access will already be initialized, including having their ``created_for()`` method called. Since this works recursively, and because class add-ons can be attached even to built-in types like ``object``, the work of creating a correct class metadata registry is immensely simplified, compared to having to special case such base classes, check for bases where no metadata was added or defined, etc. Instead, classes that didn't define any metadata will just have an add-on instance containing whatever was setup by your add-on's ``__init__()`` method, plus whatever additional data was added by its ``created_for()`` method. Thus, metadata accumulation using class add-ons can actually be simpler than doing the same things with metaclasses, since metaclasses can't be retroactively added to existing classes. Of course, class add-ons can't entirely replace metaclasses or base class mixins, but for the things they *can* do, they are much easier to implement correctly. Keys, Decoration, and ``for_enclosing_class()`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Class add-ons can have add-on keys, just like regular add-ons, and they're implemented in the same way. And, you can pass the extra arguments as positional arguments to ``for_enclosing_class()``. For example:: >>> class Index(ClassAddOn): ... def __init__(self, subject, expr): ... self.expr = expr ... self.funcs = [] ... super(Index, self).__init__(subject) >>> def indexedmethod(expr): ... def decorate(func): ... Index.for_enclosing_class(expr).funcs.append(func) ... return func ... return decorate >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = indexedmethod("x*y")(dummy) >>> Index(Demo, "x*y").funcs [] >>> Index(Demo, "y+z").funcs [] Note, by the way, that you do not need to use a function decorator to add metadata to a class. You just need to be calling ``for_enclosing_class()`` in a function called directly from the class body:: >>> def special_methods(**kw): ... smr = SpecialMethodRegistry.for_enclosing_class() ... smr.special_methods.update(kw) >>> class Demo: ... special_methods(x=23, y=55) init called for None created for Demo >>> SpecialMethodRegistry(Demo).special_methods {'y': 55, 'x': 23} By default, the ``for_enclosing_class()`` method assumes is it being called by a function that is being called directly from the class suite, such as a method decorator, or a standalone function call as shown above. But if you make a call from somewhere else, such as outside a class statement, you will get an error:: >>> special_methods(z=42) Traceback (most recent call last): ... SyntaxError: Class decorators may only be used inside a class statement Similarly, if you have a function that calls ``for_enclosing_class()``, but then you call that function from another function, it will still fail:: >>> def sm(**kw): ... special_methods(**kw) >>> class Demo: ... sm(x=23, y=55) Traceback (most recent call last): ... SyntaxError: Class decorators may only be used inside a class statement This is because ``for_enclosing_class()`` assumes the class is being defined two stack levels above its frame. You can change this assumption, however, by using the ``level`` keyword argument:: >>> def special_methods(level=2, **kw): ... smr = SpecialMethodRegistry.for_enclosing_class(level=level) ... smr.special_methods.update(kw) >>> def sm(**kw): ... special_methods(level=3, **kw) >>> class Demo: ... sm(x=23) ... special_methods(y=55) init called for None created for Demo >>> SpecialMethodRegistry(Demo).special_methods {'y': 55, 'x': 23} Alternately, you can pass a specific Python frame object via the ``frame`` keyword argument to ``for_enclosing_class()``, or use the ``for_frame()`` classmethod instead. ``for_frame()`` takes a Python stack frame, followed by any extra positional arguments needed to create the key. Class Registries (NEW in version 0.6) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ For many of common class add-on use cases, you just want a dict-like object with "inheritance" for the values in base classes. The ``Registry`` base class provides this behavior, by subclassing ``ClassAddOn`` and the Python ``dict`` builtin type, to create a class add-on that's also a dictionary. It then overrides the ``created_for()`` method to automatically populate itself with any inherited values from base classes. Let's define a ``MethodGoodness`` registry that will store a "goodness" rating for methods:: >>> from peak.util.addons import Registry >>> class MethodGoodness(Registry): ... """Dictionary of method goodness""" >>> def goodness(value): ... def decorate(func): ... MethodGoodness.for_enclosing_class()[func.__name__]=value ... return func ... return decorate >>> class Demo(object): ... def aMethod(self, foo): ... pass ... aMethod = goodness(17)(aMethod) ... def another_method(whinge, spam): ... woohoo ... another_method = goodness(-99)(another_method) >>> MethodGoodness(Demo) {'aMethod': 17, 'another_method': -99} So far, so good. Let's see what happens with a subclass:: >>> class Demo2(Demo): ... def another_method(self, fixed): ... pass ... another_method = goodness(42)(another_method) >>> MethodGoodness(Demo2) {'another_method': 42, 'aMethod': 17} Values set in base class registries are automatically added to the current class' registry of the same type and key, if the current class doesn't have an entry defined. Python's new-style method resolution order is used to determine the precedence of inherited attributes. (For classic classes, a temporary new-style class is created that inherits from the classic class, in order to determine the resolution order, then discarded.) Once the class in question has been created, the registry gets an extra attribute, ``defined_in_class``, which is a dictionary listing the entries that were actually defined in the corresponding class, e.g.:: >>> MethodGoodness(Demo).defined_in_class {'aMethod': 17, 'another_method': -99} >>> MethodGoodness(Demo2).defined_in_class {'another_method': 42} As you can see, this second dictionary contains only the values registered in that class, and not any inherited values. Finally, note that ``Registry`` objects have one additional method that can be useful to call from a decorator: ``set(key, value)``. This method will raise an error if a different value already exists for the given key, and is useful for catching errors in class definitions, e.g.: >>> def goodness(value): ... def decorate(func): ... MethodGoodness.for_enclosing_class().set(func.__name__, value) ... return func ... return decorate >>> class Demo3(object): ... def aMethod(self, foo): ... pass ... aMethod = goodness(17)(aMethod) ... def aMethod(self, foo): ... pass ... aMethod = goodness(27)(aMethod) Traceback (most recent call last): ... ValueError: MethodGoodness['aMethod'] already contains 17; can't set to 27 Threading Concerns ------------------ Add-on lookup and creation is thread-safe (i.e. race-condition free), so long as the add-on key contains no objects with ``__hash__`` or ``__equals__`` methods involve any Python code (as opposed to being pure C code that doesn't call any Python code). So, unkeyed add-ons, or add-ons whose keys consist only of instances of built-in types (recursively, in the case of tuples) or types that inherit their ``__hash__`` and ``__equals__`` methods from built-in types, can be initialized in a thread-safe manner. This does *not* mean, however, that two or more add-on instances can't be created for the same subject at the same time! Code in an add-on class' ``__new__`` or ``__init__`` methods **must not** assume that it will in fact be the only add-on instance attached to its subject, if you wish the code to be thread-safe. This is because the ``AddOn`` access machinery allows multiple threads to *create* an add-on instance at the same time, but only one of those objects will *win* the race to become "the" add-on instance, and no thread can know in advance whether it will win. Thus, if you wish your ``AddOn`` instances to do something *to* their constructor arguments at initialization time, you must either give up on your add-on being thread-safe, or use some other locking mechanism. Of course, add-on initialization is only one small part of the overall thread- safety puzzle. Unless your add-on exists only to compute some immutable metadata about its subject, the rest of your add-on's methods need to be thread-safe also. One way to do that, is to use a ``@synchronized`` decorator, combined with a ``Locking`` add-on:: >>> class Locking(AddOn): ... def __init__(self, subject): ... from threading import RLock ... self.lock = RLock() ... def acquire(self): ... print "acquiring" ... self.lock.acquire() ... def release(self): ... self.lock.release() ... print "released" >>> def synchronized(func): ... def wrapper(self, *__args,**__kw): ... Locking(self).acquire() ... try: ... func(self, *__args,**__kw) ... finally: ... Locking(self).release() ... ... from peak.util.decorators import rewrap ... return rewrap(func, wrapper) >>> class AnotherThing: ... def ping(self): ... print "ping" ... ping = synchronized(ping) >>> AnotherThing().ping() acquiring ping released If the ``Locking()`` add-on constructor were not thread-safe, this decorator would not be able to do its job correctly, because two threads accessing an object that didn't *have* the add-on yet, could end up locking two different locks, and proceeding to run the supposedly-"synchronized" method at the same time! (In general, thread-safety is harder than it looks. But at least you don't have to worry about this one tiny part of correctly implementing it.) Of course, synchronized methods will be slower than normal methods, which is why AddOns doesn't do anything besides that one small part of the thread-safety puzzle, to avoid penalizing non-threaded code. As the PEAK motto says, STASCTAP! (Simple Things Are Simple, Complex Things Are Possible.) Mailing List ------------ Questions, discussion, and bug reports for this software should be directed to the PEAK mailing list; see http://www.eby-sarna.com/mailman/listinfo/PEAK/ for details. Platform: UNKNOWN AddOns-0.7/AddOns.egg-info/namespace_packages.txt0000666000000000000000000000001711237771626020441 0ustar rootrootpeak peak.util AddOns-0.7/AddOns.egg-info/dependency_links.txt0000666000000000000000000000000111237771626020156 0ustar rootroot AddOns-0.7/setup.py0000666000000000000000000000213410712402132012734 0ustar rootroot#!/usr/bin/env python """Distutils setup file""" import ez_setup ez_setup.use_setuptools() from setuptools import setup # Metadata PACKAGE_NAME = "AddOns" PACKAGE_VERSION = "0.7" PACKAGES = ['peak', 'peak.util'] def get_description(): # Get our long description from the documentation f = file('README.txt') lines = [] for line in f: if not line.strip(): break # skip to first blank line for line in f: if line.startswith('.. contents::'): break # read to table of contents lines.append(line) f.close() return ''.join(lines) setup( name=PACKAGE_NAME, version=PACKAGE_VERSION, description="Dynamically extend other objects with AddOns (formerly ObjectRoles)", long_description = open('README.txt').read(), # get_description(), install_requires=['DecoratorTools>=1.6'], author="Phillip J. Eby", author_email="peak@eby-sarna.com", license="PSF or ZPL", url="http://pypi.python.org/pypi/AddOns", test_suite = 'peak.util.addons', packages = PACKAGES, namespace_packages = PACKAGES, ) AddOns-0.7/setup.cfg0000666000000000000000000000010011237771654013057 0ustar rootroot[egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 AddOns-0.7/README.txt0000666000000000000000000006271710710315546012747 0ustar rootroot======================================== Separating Concerns Using Object Add-Ons ======================================== (NEW in version 0.6: the``Registry`` base class, and the ``ClassAddOn.for_frame()`` classmethod.) In any sufficiently-sized application or framework, it's common to end up lumping a lot of different concerns into the same class. For example, you may have business logic, persistence code, and UI all jammed into a single class. Attribute and method names for all sorts of different operations get shoved into a single namespace -- even when using mixin classes. Separating concerns into different objects, however, makes it easier to write reusable and separately-testable components. The AddOns package (``peak.util.addons``) lets you manage concerns using ``AddOn`` classes. ``AddOn`` classes are like dynamic mixins, but with their own private attribute and method namespaces. A concern implemented using add-ons can be added at runtime to any object that either has a writable ``__dict__`` attribute, or is weak-referenceable. ``AddOn`` classes are also like adapters, but rather than creating a new instance each time you ask for one, an existing instance is returned if possible. In this way, add-ons can keep track of ongoing state. For example, a ``Persistence`` add-on might keep track of whether its subject has been saved to disk yet:: >>> from peak.util.addons import AddOn >>> class Persistence(AddOn): ... saved = True ... def changed(self): ... self.saved = False ... def save_if_needed(self): ... if not self.saved: ... print "saving" ... self.saved = True >>> class Thing: pass >>> aThing = Thing() >>> Persistence(aThing).saved True >>> Persistence(aThing).changed() >>> Persistence(aThing).saved False >>> Persistence(aThing).save_if_needed() saving >>> Persistence(aThing).save_if_needed() # no action taken This makes it easy for us to, for example, write a loop that saves a bunch of objects, because we don't need to concern ourselves with initializing the state of the persistence add-on. A class doesn't need to inherit from a special base in order to be able to have this state tracked, and it doesn't need to know *how* to initialize it, either. Of course, in the case of persistence, a class does need to know *when* to call the persistence methods, to indicate changedness and to request saving. However, a library providing such an add-on can also provide decorators and other tools to make this easier, while still remaining largely independent of the objects involved. Indeed, the AddOns library was actually created to make it easier to implement functionality using function or method decorators. For example, one can create a ``@synchronized`` decorator that safely locks an object -- see the example below under `Threading Concerns`_. In summary, the AddOns library provides you with a basic form of AOP, that lets you attach (or "introduce", in AspectJ terminology) additional attributes and methods to an object, using a private namespace. (If you also want to do AspectJ-style "advice", the PEAK-Rules package can be used to do "before", "after", and "around" advice in combination with add-ons.) .. contents:: **Table of Contents** Basic API --------- If you need to, you can query for the existence of an add-on:: >>> Persistence.exists_for(aThing) True And by default, it won't exist:: >>> anotherThing = Thing() >>> Persistence.exists_for(anotherThing) False Until you refer to it directly, e.g.:: >>> Persistence(aThing) is Persistence(anotherThing) False At which point it will of course exist:: >>> Persistence.exists_for(anotherThing) True And maintain its state, linked to its subject:: >>> Persistence(anotherThing) is Persistence(anotherThing) True Until/unless you delete it (or its subject is garbage collected):: >>> Persistence.delete_from(anotherThing) >>> Persistence.exists_for(anotherThing) False AddOn Keys and Instances ------------------------ Add-ons are stored either in their subject's ``__dict__``, or if it does not have one (or is a type object with a read-only ``__dict__``), they are stored in a special dictionary linked to the subject via a weak reference. By default, the dictionary key is the add-on class, so there is exactly one add-on instance per subject:: >>> aThing.__dict__ {: } But in some cases, you may wish to have more than one instance of a given add-on class for a subject. (For example, PEAK-Rules uses add-ons to represent indexes on different expressions contained within rules.) For this purpose, you can redefine your AddOn's ``__init__`` method to accept additional arguments besides its subject. The additional arguments become part of the key that instances are stored under, such that more than one add-on instance can exist for a given object:: >>> class Index(AddOn, dict): ... def __init__(self, subject, expression): ... self.expression = expression >>> something = Thing() >>> Index(something, "x>y")["a"] = "b" >>> dir(something) ['__doc__', '__module__', (, 'x>y')] >>> "a" in Index(something, "z<22") False >>> Index(something, "x>y") {'a': 'b'} >>> Index(something, "x>y").expression 'x>y' >>> dir(something) ['__doc__', '__module__', (, 'x>y'), (, 'z<22')] >>> Index.exists_for(something, 'x>y') True >>> Index.exists_for(anotherThing, 'q==42') False By default, an add-on class' key is either the class by itself, or a tuple containing the class, followed by any arguments that appeared in the constructor call after the add-on's subject. However, you can redefine the ``addon_key()`` classmethod in your subclass, and change it to do something different. For example, you could make different add-on classes generate overlapping keys, or you could use attributes of the arguments to generate the key. You could even generate a string key, to cause the add-on to be attached as an attribute!:: >>> class Leech(AddOn): ... def addon_key(cls): ... return "__leech__" ... addon_key = classmethod(addon_key) >>> something = Thing() >>> Leech(something) is something.__leech__ True The ``addon_key`` method only receives the arguments that appear *after* the subject in the constructor call. So, in the case above, it receives no arguments. Had we called it with additional arguments, we'd have gotten an error:: >>> Leech(something, 42) Traceback (most recent call last): ... TypeError: addon_key() takes exactly 1 argument (2 given) Naturally, your ``addon_key()`` and ``__init__()`` (and/or ``__new__()``) methods should also agree on how many arguments there can be, and what they mean! In general, you should include your add-on class (or some add-on class) as part of your key, so as to make collisions with other people's add-on classes impossible. Keys should also be designed for thread-safety, where applicable. (See the section below on `Threading Concerns`_ for more details.) Role Storage and Garbage Collection ----------------------------------- By the way, the approach above of using an string as an add-on key won't always make the add-on into an attribute of the subject! If an object doesn't have a ``__dict__``, or that ``__dict__`` isn't writable (as in the case of type objects), then the add-on is stored in a weakly-keyed dictionary, maintained elsewhere:: >>> class NoDict(object): ... __slots__ = '__weakref__' >>> dictless = NoDict() >>> Leech(dictless) >>> dictless.__leech__ Traceback (most recent call last): ... AttributeError: 'NoDict' object has no attribute '__leech__' Of course, if an object doesn't have a dictionary *and* isn't weak-referenceable, there's simply no way to store an add-on for it:: >>> ob = object() >>> Leech(ob) Traceback (most recent call last): ... TypeError: cannot create weak reference to 'object' object However, there is an ``addons_for()`` function in the ``peak.util.addons`` module that you can extend using PEAK-Rules advice. Once you add a method to support a type that otherwise can't be used with add-ons, you should be able to use any and all kinds of add-on objects with that type. (Assuming, of course, that you can implement a suitable storage mechanism!) Finally, a few words regarding garbage collection. If you don't want to create a reference cycle, don't store a reference to your subject in your add-on. Even though the ``__init__`` and ``__new__`` messages get the subject passed in, you are not under any obligation to *store* the subject, and often won't need to. Usually, the code that is accessing the add-on knows what subject is in use, and can pass the subject to the add-on's methods if needed. It's rare that the add-on really needs to keep a reference to the subject past the ``__new__()`` and ``__init__()`` calls. Add-on instances will usually be garbage collected at the same time as their subject, unless there is some other reference to them. If they keep a reference to their subject, their garbage collection may be delayed until Python's cycle collector is run. But if they don't keep a reference, they will usually be deleted as soon as the subject is:: >>> def deleting(r): ... print "deleting", r >>> from weakref import ref >>> r = ref(Leech(something), deleting) >>> del something deleting (Add-ons that are stored outside the instance dictionary of their subject, however, may take slightly longer, as Python processes weak reference callbacks.) It is also *not* recommended that you have ``__del__`` methods on your add-on objects, especially if you keep a reference to your subject. In such a case, garbage collection may become impossible, and both the add-on and its subject would "leak" (i.e., take up memory forever without being recoverable). Class Add-Ons ------------- Sometimes, it's useful to attach add-ons to classes instead of instances. You could use normal ``AddOn`` classes, of course, as they work just fine with both classic classes and new-style types -- even built-ins:: >>> Persistence.exists_for(int) False >>> Persistence(int) is Persistence(int) True >>> Persistence.exists_for(int) True >>> class X: pass >>> Persistence.exists_for(X) False >>> Persistence(X) is Persistence(X) True >>> Persistence.exists_for(X) True But, sometimes you have add-ons that are specifically intended for adding metadata to classes -- perhaps by way of class or method decorators. In such a case, you need a way to access the add-on *before* its subject even exists! The ``ClassAddOn`` base class provides a mechanism for this. It adds an extra classmethod, ``for_enclosing_class()``, that you can use to access the add-on for the class that is currently being defined in the scope that invoked the caller. For example, suppose we want to have a method decorator that adds the method to some class-level registry:: >>> from peak.util.addons import ClassAddOn >>> class SpecialMethodRegistry(ClassAddOn): ... def __init__(self, subject): ... self.special_methods = {} ... super(SpecialMethodRegistry, self).__init__(subject) >>> def specialmethod(func): ... smr = SpecialMethodRegistry.for_enclosing_class() ... smr.special_methods[func.__name__] = func ... return func >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) >>> SpecialMethodRegistry(Demo).special_methods {'dummy': } >>> class Demo2(object): ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) >>> SpecialMethodRegistry(Demo2).special_methods {'dummy': } You can of course use the usual add-on API for class add-ons:: >>> SpecialMethodRegistry.exists_for(int) False >>> SpecialMethodRegistry(int).special_methods['x'] = 123 >>> SpecialMethodRegistry.exists_for(int) True Except that you cannot explicitly delete them, they must be garbage collected naturally:: >>> SpecialMethodRegistry.delete_from(Demo) Traceback (most recent call last): ... TypeError: ClassAddOns cannot be deleted Delayed Initialization ~~~~~~~~~~~~~~~~~~~~~~ When a class add-on is initialized, the class may not exist yet. In this case, ``None`` is passed as the first argument to the ``__new__`` and ``__init__`` methods. You must be able to handle this case correctly, if your add-on will be accessed inside a class definition with ``for_enclosing_class()``. You can, however, define a ``created_for()`` instance method that will be called as soon as the actual class is available. It is also called by the default ``__init__`` method, if the add-on is initially created for a class that already exists. Either way, the ``created_for()`` method should be called at most once for any given add-on instance. For example:: >>> class SpecialMethodRegistry(ClassAddOn): ... def __init__(self, subject): ... print "init called for", subject ... self.special_methods = {} ... super(SpecialMethodRegistry, self).__init__(subject) ... ... def created_for(self, cls): ... print "created for", cls.__name__ >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) init called for None created for Demo Above, ``__init__`` was called with ``None`` since the type didn't exist yet. However, accessing the add-on for an existing type (that doesn't have the add- on yet) will call ``__init__`` with the type, and the default implementation of ``ClassAddOn.__init__`` will also call ``created_for()`` for us, when it sees the subject is not ``None``:: >>> SpecialMethodRegistry(float) init called for created for float >>> SpecialMethodRegistry(float) # created_for doesn't get called again One of the most useful features of having this ``created_for()`` method is that it allows you to set up class-level metadata that involves inherited settings from base classes. In ``created_for()``, you have access to the class' ``__bases__`` and or ``__mro__``, and you can just ask for an instance of the same add-on for those base classes, then incorporate their data into your own instance as appropriate. You are guaranteed that any such add-ons you access will already be initialized, including having their ``created_for()`` method called. Since this works recursively, and because class add-ons can be attached even to built-in types like ``object``, the work of creating a correct class metadata registry is immensely simplified, compared to having to special case such base classes, check for bases where no metadata was added or defined, etc. Instead, classes that didn't define any metadata will just have an add-on instance containing whatever was setup by your add-on's ``__init__()`` method, plus whatever additional data was added by its ``created_for()`` method. Thus, metadata accumulation using class add-ons can actually be simpler than doing the same things with metaclasses, since metaclasses can't be retroactively added to existing classes. Of course, class add-ons can't entirely replace metaclasses or base class mixins, but for the things they *can* do, they are much easier to implement correctly. Keys, Decoration, and ``for_enclosing_class()`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Class add-ons can have add-on keys, just like regular add-ons, and they're implemented in the same way. And, you can pass the extra arguments as positional arguments to ``for_enclosing_class()``. For example:: >>> class Index(ClassAddOn): ... def __init__(self, subject, expr): ... self.expr = expr ... self.funcs = [] ... super(Index, self).__init__(subject) >>> def indexedmethod(expr): ... def decorate(func): ... Index.for_enclosing_class(expr).funcs.append(func) ... return func ... return decorate >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = indexedmethod("x*y")(dummy) >>> Index(Demo, "x*y").funcs [] >>> Index(Demo, "y+z").funcs [] Note, by the way, that you do not need to use a function decorator to add metadata to a class. You just need to be calling ``for_enclosing_class()`` in a function called directly from the class body:: >>> def special_methods(**kw): ... smr = SpecialMethodRegistry.for_enclosing_class() ... smr.special_methods.update(kw) >>> class Demo: ... special_methods(x=23, y=55) init called for None created for Demo >>> SpecialMethodRegistry(Demo).special_methods {'y': 55, 'x': 23} By default, the ``for_enclosing_class()`` method assumes is it being called by a function that is being called directly from the class suite, such as a method decorator, or a standalone function call as shown above. But if you make a call from somewhere else, such as outside a class statement, you will get an error:: >>> special_methods(z=42) Traceback (most recent call last): ... SyntaxError: Class decorators may only be used inside a class statement Similarly, if you have a function that calls ``for_enclosing_class()``, but then you call that function from another function, it will still fail:: >>> def sm(**kw): ... special_methods(**kw) >>> class Demo: ... sm(x=23, y=55) Traceback (most recent call last): ... SyntaxError: Class decorators may only be used inside a class statement This is because ``for_enclosing_class()`` assumes the class is being defined two stack levels above its frame. You can change this assumption, however, by using the ``level`` keyword argument:: >>> def special_methods(level=2, **kw): ... smr = SpecialMethodRegistry.for_enclosing_class(level=level) ... smr.special_methods.update(kw) >>> def sm(**kw): ... special_methods(level=3, **kw) >>> class Demo: ... sm(x=23) ... special_methods(y=55) init called for None created for Demo >>> SpecialMethodRegistry(Demo).special_methods {'y': 55, 'x': 23} Alternately, you can pass a specific Python frame object via the ``frame`` keyword argument to ``for_enclosing_class()``, or use the ``for_frame()`` classmethod instead. ``for_frame()`` takes a Python stack frame, followed by any extra positional arguments needed to create the key. Class Registries (NEW in version 0.6) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ For many of common class add-on use cases, you just want a dict-like object with "inheritance" for the values in base classes. The ``Registry`` base class provides this behavior, by subclassing ``ClassAddOn`` and the Python ``dict`` builtin type, to create a class add-on that's also a dictionary. It then overrides the ``created_for()`` method to automatically populate itself with any inherited values from base classes. Let's define a ``MethodGoodness`` registry that will store a "goodness" rating for methods:: >>> from peak.util.addons import Registry >>> class MethodGoodness(Registry): ... """Dictionary of method goodness""" >>> def goodness(value): ... def decorate(func): ... MethodGoodness.for_enclosing_class()[func.__name__]=value ... return func ... return decorate >>> class Demo(object): ... def aMethod(self, foo): ... pass ... aMethod = goodness(17)(aMethod) ... def another_method(whinge, spam): ... woohoo ... another_method = goodness(-99)(another_method) >>> MethodGoodness(Demo) {'aMethod': 17, 'another_method': -99} So far, so good. Let's see what happens with a subclass:: >>> class Demo2(Demo): ... def another_method(self, fixed): ... pass ... another_method = goodness(42)(another_method) >>> MethodGoodness(Demo2) {'another_method': 42, 'aMethod': 17} Values set in base class registries are automatically added to the current class' registry of the same type and key, if the current class doesn't have an entry defined. Python's new-style method resolution order is used to determine the precedence of inherited attributes. (For classic classes, a temporary new-style class is created that inherits from the classic class, in order to determine the resolution order, then discarded.) Once the class in question has been created, the registry gets an extra attribute, ``defined_in_class``, which is a dictionary listing the entries that were actually defined in the corresponding class, e.g.:: >>> MethodGoodness(Demo).defined_in_class {'aMethod': 17, 'another_method': -99} >>> MethodGoodness(Demo2).defined_in_class {'another_method': 42} As you can see, this second dictionary contains only the values registered in that class, and not any inherited values. Finally, note that ``Registry`` objects have one additional method that can be useful to call from a decorator: ``set(key, value)``. This method will raise an error if a different value already exists for the given key, and is useful for catching errors in class definitions, e.g.: >>> def goodness(value): ... def decorate(func): ... MethodGoodness.for_enclosing_class().set(func.__name__, value) ... return func ... return decorate >>> class Demo3(object): ... def aMethod(self, foo): ... pass ... aMethod = goodness(17)(aMethod) ... def aMethod(self, foo): ... pass ... aMethod = goodness(27)(aMethod) Traceback (most recent call last): ... ValueError: MethodGoodness['aMethod'] already contains 17; can't set to 27 Threading Concerns ------------------ Add-on lookup and creation is thread-safe (i.e. race-condition free), so long as the add-on key contains no objects with ``__hash__`` or ``__equals__`` methods involve any Python code (as opposed to being pure C code that doesn't call any Python code). So, unkeyed add-ons, or add-ons whose keys consist only of instances of built-in types (recursively, in the case of tuples) or types that inherit their ``__hash__`` and ``__equals__`` methods from built-in types, can be initialized in a thread-safe manner. This does *not* mean, however, that two or more add-on instances can't be created for the same subject at the same time! Code in an add-on class' ``__new__`` or ``__init__`` methods **must not** assume that it will in fact be the only add-on instance attached to its subject, if you wish the code to be thread-safe. This is because the ``AddOn`` access machinery allows multiple threads to *create* an add-on instance at the same time, but only one of those objects will *win* the race to become "the" add-on instance, and no thread can know in advance whether it will win. Thus, if you wish your ``AddOn`` instances to do something *to* their constructor arguments at initialization time, you must either give up on your add-on being thread-safe, or use some other locking mechanism. Of course, add-on initialization is only one small part of the overall thread- safety puzzle. Unless your add-on exists only to compute some immutable metadata about its subject, the rest of your add-on's methods need to be thread-safe also. One way to do that, is to use a ``@synchronized`` decorator, combined with a ``Locking`` add-on:: >>> class Locking(AddOn): ... def __init__(self, subject): ... from threading import RLock ... self.lock = RLock() ... def acquire(self): ... print "acquiring" ... self.lock.acquire() ... def release(self): ... self.lock.release() ... print "released" >>> def synchronized(func): ... def wrapper(self, *__args,**__kw): ... Locking(self).acquire() ... try: ... func(self, *__args,**__kw) ... finally: ... Locking(self).release() ... ... from peak.util.decorators import rewrap ... return rewrap(func, wrapper) >>> class AnotherThing: ... def ping(self): ... print "ping" ... ping = synchronized(ping) >>> AnotherThing().ping() acquiring ping released If the ``Locking()`` add-on constructor were not thread-safe, this decorator would not be able to do its job correctly, because two threads accessing an object that didn't *have* the add-on yet, could end up locking two different locks, and proceeding to run the supposedly-"synchronized" method at the same time! (In general, thread-safety is harder than it looks. But at least you don't have to worry about this one tiny part of correctly implementing it.) Of course, synchronized methods will be slower than normal methods, which is why AddOns doesn't do anything besides that one small part of the thread-safety puzzle, to avoid penalizing non-threaded code. As the PEAK motto says, STASCTAP! (Simple Things Are Simple, Complex Things Are Possible.) Mailing List ------------ Questions, discussion, and bug reports for this software should be directed to the PEAK mailing list; see http://www.eby-sarna.com/mailman/listinfo/PEAK/ for details. AddOns-0.7/PKG-INFO0000666000000000000000000007543111237771654012356 0ustar rootrootMetadata-Version: 1.0 Name: AddOns Version: 0.7 Summary: Dynamically extend other objects with AddOns (formerly ObjectRoles) Home-page: http://pypi.python.org/pypi/AddOns Author: Phillip J. Eby Author-email: peak@eby-sarna.com License: PSF or ZPL Description: ======================================== Separating Concerns Using Object Add-Ons ======================================== (NEW in version 0.6: the``Registry`` base class, and the ``ClassAddOn.for_frame()`` classmethod.) In any sufficiently-sized application or framework, it's common to end up lumping a lot of different concerns into the same class. For example, you may have business logic, persistence code, and UI all jammed into a single class. Attribute and method names for all sorts of different operations get shoved into a single namespace -- even when using mixin classes. Separating concerns into different objects, however, makes it easier to write reusable and separately-testable components. The AddOns package (``peak.util.addons``) lets you manage concerns using ``AddOn`` classes. ``AddOn`` classes are like dynamic mixins, but with their own private attribute and method namespaces. A concern implemented using add-ons can be added at runtime to any object that either has a writable ``__dict__`` attribute, or is weak-referenceable. ``AddOn`` classes are also like adapters, but rather than creating a new instance each time you ask for one, an existing instance is returned if possible. In this way, add-ons can keep track of ongoing state. For example, a ``Persistence`` add-on might keep track of whether its subject has been saved to disk yet:: >>> from peak.util.addons import AddOn >>> class Persistence(AddOn): ... saved = True ... def changed(self): ... self.saved = False ... def save_if_needed(self): ... if not self.saved: ... print "saving" ... self.saved = True >>> class Thing: pass >>> aThing = Thing() >>> Persistence(aThing).saved True >>> Persistence(aThing).changed() >>> Persistence(aThing).saved False >>> Persistence(aThing).save_if_needed() saving >>> Persistence(aThing).save_if_needed() # no action taken This makes it easy for us to, for example, write a loop that saves a bunch of objects, because we don't need to concern ourselves with initializing the state of the persistence add-on. A class doesn't need to inherit from a special base in order to be able to have this state tracked, and it doesn't need to know *how* to initialize it, either. Of course, in the case of persistence, a class does need to know *when* to call the persistence methods, to indicate changedness and to request saving. However, a library providing such an add-on can also provide decorators and other tools to make this easier, while still remaining largely independent of the objects involved. Indeed, the AddOns library was actually created to make it easier to implement functionality using function or method decorators. For example, one can create a ``@synchronized`` decorator that safely locks an object -- see the example below under `Threading Concerns`_. In summary, the AddOns library provides you with a basic form of AOP, that lets you attach (or "introduce", in AspectJ terminology) additional attributes and methods to an object, using a private namespace. (If you also want to do AspectJ-style "advice", the PEAK-Rules package can be used to do "before", "after", and "around" advice in combination with add-ons.) .. contents:: **Table of Contents** Basic API --------- If you need to, you can query for the existence of an add-on:: >>> Persistence.exists_for(aThing) True And by default, it won't exist:: >>> anotherThing = Thing() >>> Persistence.exists_for(anotherThing) False Until you refer to it directly, e.g.:: >>> Persistence(aThing) is Persistence(anotherThing) False At which point it will of course exist:: >>> Persistence.exists_for(anotherThing) True And maintain its state, linked to its subject:: >>> Persistence(anotherThing) is Persistence(anotherThing) True Until/unless you delete it (or its subject is garbage collected):: >>> Persistence.delete_from(anotherThing) >>> Persistence.exists_for(anotherThing) False AddOn Keys and Instances ------------------------ Add-ons are stored either in their subject's ``__dict__``, or if it does not have one (or is a type object with a read-only ``__dict__``), they are stored in a special dictionary linked to the subject via a weak reference. By default, the dictionary key is the add-on class, so there is exactly one add-on instance per subject:: >>> aThing.__dict__ {: } But in some cases, you may wish to have more than one instance of a given add-on class for a subject. (For example, PEAK-Rules uses add-ons to represent indexes on different expressions contained within rules.) For this purpose, you can redefine your AddOn's ``__init__`` method to accept additional arguments besides its subject. The additional arguments become part of the key that instances are stored under, such that more than one add-on instance can exist for a given object:: >>> class Index(AddOn, dict): ... def __init__(self, subject, expression): ... self.expression = expression >>> something = Thing() >>> Index(something, "x>y")["a"] = "b" >>> dir(something) ['__doc__', '__module__', (, 'x>y')] >>> "a" in Index(something, "z<22") False >>> Index(something, "x>y") {'a': 'b'} >>> Index(something, "x>y").expression 'x>y' >>> dir(something) ['__doc__', '__module__', (, 'x>y'), (, 'z<22')] >>> Index.exists_for(something, 'x>y') True >>> Index.exists_for(anotherThing, 'q==42') False By default, an add-on class' key is either the class by itself, or a tuple containing the class, followed by any arguments that appeared in the constructor call after the add-on's subject. However, you can redefine the ``addon_key()`` classmethod in your subclass, and change it to do something different. For example, you could make different add-on classes generate overlapping keys, or you could use attributes of the arguments to generate the key. You could even generate a string key, to cause the add-on to be attached as an attribute!:: >>> class Leech(AddOn): ... def addon_key(cls): ... return "__leech__" ... addon_key = classmethod(addon_key) >>> something = Thing() >>> Leech(something) is something.__leech__ True The ``addon_key`` method only receives the arguments that appear *after* the subject in the constructor call. So, in the case above, it receives no arguments. Had we called it with additional arguments, we'd have gotten an error:: >>> Leech(something, 42) Traceback (most recent call last): ... TypeError: addon_key() takes exactly 1 argument (2 given) Naturally, your ``addon_key()`` and ``__init__()`` (and/or ``__new__()``) methods should also agree on how many arguments there can be, and what they mean! In general, you should include your add-on class (or some add-on class) as part of your key, so as to make collisions with other people's add-on classes impossible. Keys should also be designed for thread-safety, where applicable. (See the section below on `Threading Concerns`_ for more details.) Role Storage and Garbage Collection ----------------------------------- By the way, the approach above of using an string as an add-on key won't always make the add-on into an attribute of the subject! If an object doesn't have a ``__dict__``, or that ``__dict__`` isn't writable (as in the case of type objects), then the add-on is stored in a weakly-keyed dictionary, maintained elsewhere:: >>> class NoDict(object): ... __slots__ = '__weakref__' >>> dictless = NoDict() >>> Leech(dictless) >>> dictless.__leech__ Traceback (most recent call last): ... AttributeError: 'NoDict' object has no attribute '__leech__' Of course, if an object doesn't have a dictionary *and* isn't weak-referenceable, there's simply no way to store an add-on for it:: >>> ob = object() >>> Leech(ob) Traceback (most recent call last): ... TypeError: cannot create weak reference to 'object' object However, there is an ``addons_for()`` function in the ``peak.util.addons`` module that you can extend using PEAK-Rules advice. Once you add a method to support a type that otherwise can't be used with add-ons, you should be able to use any and all kinds of add-on objects with that type. (Assuming, of course, that you can implement a suitable storage mechanism!) Finally, a few words regarding garbage collection. If you don't want to create a reference cycle, don't store a reference to your subject in your add-on. Even though the ``__init__`` and ``__new__`` messages get the subject passed in, you are not under any obligation to *store* the subject, and often won't need to. Usually, the code that is accessing the add-on knows what subject is in use, and can pass the subject to the add-on's methods if needed. It's rare that the add-on really needs to keep a reference to the subject past the ``__new__()`` and ``__init__()`` calls. Add-on instances will usually be garbage collected at the same time as their subject, unless there is some other reference to them. If they keep a reference to their subject, their garbage collection may be delayed until Python's cycle collector is run. But if they don't keep a reference, they will usually be deleted as soon as the subject is:: >>> def deleting(r): ... print "deleting", r >>> from weakref import ref >>> r = ref(Leech(something), deleting) >>> del something deleting (Add-ons that are stored outside the instance dictionary of their subject, however, may take slightly longer, as Python processes weak reference callbacks.) It is also *not* recommended that you have ``__del__`` methods on your add-on objects, especially if you keep a reference to your subject. In such a case, garbage collection may become impossible, and both the add-on and its subject would "leak" (i.e., take up memory forever without being recoverable). Class Add-Ons ------------- Sometimes, it's useful to attach add-ons to classes instead of instances. You could use normal ``AddOn`` classes, of course, as they work just fine with both classic classes and new-style types -- even built-ins:: >>> Persistence.exists_for(int) False >>> Persistence(int) is Persistence(int) True >>> Persistence.exists_for(int) True >>> class X: pass >>> Persistence.exists_for(X) False >>> Persistence(X) is Persistence(X) True >>> Persistence.exists_for(X) True But, sometimes you have add-ons that are specifically intended for adding metadata to classes -- perhaps by way of class or method decorators. In such a case, you need a way to access the add-on *before* its subject even exists! The ``ClassAddOn`` base class provides a mechanism for this. It adds an extra classmethod, ``for_enclosing_class()``, that you can use to access the add-on for the class that is currently being defined in the scope that invoked the caller. For example, suppose we want to have a method decorator that adds the method to some class-level registry:: >>> from peak.util.addons import ClassAddOn >>> class SpecialMethodRegistry(ClassAddOn): ... def __init__(self, subject): ... self.special_methods = {} ... super(SpecialMethodRegistry, self).__init__(subject) >>> def specialmethod(func): ... smr = SpecialMethodRegistry.for_enclosing_class() ... smr.special_methods[func.__name__] = func ... return func >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) >>> SpecialMethodRegistry(Demo).special_methods {'dummy': } >>> class Demo2(object): ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) >>> SpecialMethodRegistry(Demo2).special_methods {'dummy': } You can of course use the usual add-on API for class add-ons:: >>> SpecialMethodRegistry.exists_for(int) False >>> SpecialMethodRegistry(int).special_methods['x'] = 123 >>> SpecialMethodRegistry.exists_for(int) True Except that you cannot explicitly delete them, they must be garbage collected naturally:: >>> SpecialMethodRegistry.delete_from(Demo) Traceback (most recent call last): ... TypeError: ClassAddOns cannot be deleted Delayed Initialization ~~~~~~~~~~~~~~~~~~~~~~ When a class add-on is initialized, the class may not exist yet. In this case, ``None`` is passed as the first argument to the ``__new__`` and ``__init__`` methods. You must be able to handle this case correctly, if your add-on will be accessed inside a class definition with ``for_enclosing_class()``. You can, however, define a ``created_for()`` instance method that will be called as soon as the actual class is available. It is also called by the default ``__init__`` method, if the add-on is initially created for a class that already exists. Either way, the ``created_for()`` method should be called at most once for any given add-on instance. For example:: >>> class SpecialMethodRegistry(ClassAddOn): ... def __init__(self, subject): ... print "init called for", subject ... self.special_methods = {} ... super(SpecialMethodRegistry, self).__init__(subject) ... ... def created_for(self, cls): ... print "created for", cls.__name__ >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = specialmethod(dummy) init called for None created for Demo Above, ``__init__`` was called with ``None`` since the type didn't exist yet. However, accessing the add-on for an existing type (that doesn't have the add- on yet) will call ``__init__`` with the type, and the default implementation of ``ClassAddOn.__init__`` will also call ``created_for()`` for us, when it sees the subject is not ``None``:: >>> SpecialMethodRegistry(float) init called for created for float >>> SpecialMethodRegistry(float) # created_for doesn't get called again One of the most useful features of having this ``created_for()`` method is that it allows you to set up class-level metadata that involves inherited settings from base classes. In ``created_for()``, you have access to the class' ``__bases__`` and or ``__mro__``, and you can just ask for an instance of the same add-on for those base classes, then incorporate their data into your own instance as appropriate. You are guaranteed that any such add-ons you access will already be initialized, including having their ``created_for()`` method called. Since this works recursively, and because class add-ons can be attached even to built-in types like ``object``, the work of creating a correct class metadata registry is immensely simplified, compared to having to special case such base classes, check for bases where no metadata was added or defined, etc. Instead, classes that didn't define any metadata will just have an add-on instance containing whatever was setup by your add-on's ``__init__()`` method, plus whatever additional data was added by its ``created_for()`` method. Thus, metadata accumulation using class add-ons can actually be simpler than doing the same things with metaclasses, since metaclasses can't be retroactively added to existing classes. Of course, class add-ons can't entirely replace metaclasses or base class mixins, but for the things they *can* do, they are much easier to implement correctly. Keys, Decoration, and ``for_enclosing_class()`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Class add-ons can have add-on keys, just like regular add-ons, and they're implemented in the same way. And, you can pass the extra arguments as positional arguments to ``for_enclosing_class()``. For example:: >>> class Index(ClassAddOn): ... def __init__(self, subject, expr): ... self.expr = expr ... self.funcs = [] ... super(Index, self).__init__(subject) >>> def indexedmethod(expr): ... def decorate(func): ... Index.for_enclosing_class(expr).funcs.append(func) ... return func ... return decorate >>> class Demo: ... def dummy(self, foo): ... pass ... dummy = indexedmethod("x*y")(dummy) >>> Index(Demo, "x*y").funcs [] >>> Index(Demo, "y+z").funcs [] Note, by the way, that you do not need to use a function decorator to add metadata to a class. You just need to be calling ``for_enclosing_class()`` in a function called directly from the class body:: >>> def special_methods(**kw): ... smr = SpecialMethodRegistry.for_enclosing_class() ... smr.special_methods.update(kw) >>> class Demo: ... special_methods(x=23, y=55) init called for None created for Demo >>> SpecialMethodRegistry(Demo).special_methods {'y': 55, 'x': 23} By default, the ``for_enclosing_class()`` method assumes is it being called by a function that is being called directly from the class suite, such as a method decorator, or a standalone function call as shown above. But if you make a call from somewhere else, such as outside a class statement, you will get an error:: >>> special_methods(z=42) Traceback (most recent call last): ... SyntaxError: Class decorators may only be used inside a class statement Similarly, if you have a function that calls ``for_enclosing_class()``, but then you call that function from another function, it will still fail:: >>> def sm(**kw): ... special_methods(**kw) >>> class Demo: ... sm(x=23, y=55) Traceback (most recent call last): ... SyntaxError: Class decorators may only be used inside a class statement This is because ``for_enclosing_class()`` assumes the class is being defined two stack levels above its frame. You can change this assumption, however, by using the ``level`` keyword argument:: >>> def special_methods(level=2, **kw): ... smr = SpecialMethodRegistry.for_enclosing_class(level=level) ... smr.special_methods.update(kw) >>> def sm(**kw): ... special_methods(level=3, **kw) >>> class Demo: ... sm(x=23) ... special_methods(y=55) init called for None created for Demo >>> SpecialMethodRegistry(Demo).special_methods {'y': 55, 'x': 23} Alternately, you can pass a specific Python frame object via the ``frame`` keyword argument to ``for_enclosing_class()``, or use the ``for_frame()`` classmethod instead. ``for_frame()`` takes a Python stack frame, followed by any extra positional arguments needed to create the key. Class Registries (NEW in version 0.6) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ For many of common class add-on use cases, you just want a dict-like object with "inheritance" for the values in base classes. The ``Registry`` base class provides this behavior, by subclassing ``ClassAddOn`` and the Python ``dict`` builtin type, to create a class add-on that's also a dictionary. It then overrides the ``created_for()`` method to automatically populate itself with any inherited values from base classes. Let's define a ``MethodGoodness`` registry that will store a "goodness" rating for methods:: >>> from peak.util.addons import Registry >>> class MethodGoodness(Registry): ... """Dictionary of method goodness""" >>> def goodness(value): ... def decorate(func): ... MethodGoodness.for_enclosing_class()[func.__name__]=value ... return func ... return decorate >>> class Demo(object): ... def aMethod(self, foo): ... pass ... aMethod = goodness(17)(aMethod) ... def another_method(whinge, spam): ... woohoo ... another_method = goodness(-99)(another_method) >>> MethodGoodness(Demo) {'aMethod': 17, 'another_method': -99} So far, so good. Let's see what happens with a subclass:: >>> class Demo2(Demo): ... def another_method(self, fixed): ... pass ... another_method = goodness(42)(another_method) >>> MethodGoodness(Demo2) {'another_method': 42, 'aMethod': 17} Values set in base class registries are automatically added to the current class' registry of the same type and key, if the current class doesn't have an entry defined. Python's new-style method resolution order is used to determine the precedence of inherited attributes. (For classic classes, a temporary new-style class is created that inherits from the classic class, in order to determine the resolution order, then discarded.) Once the class in question has been created, the registry gets an extra attribute, ``defined_in_class``, which is a dictionary listing the entries that were actually defined in the corresponding class, e.g.:: >>> MethodGoodness(Demo).defined_in_class {'aMethod': 17, 'another_method': -99} >>> MethodGoodness(Demo2).defined_in_class {'another_method': 42} As you can see, this second dictionary contains only the values registered in that class, and not any inherited values. Finally, note that ``Registry`` objects have one additional method that can be useful to call from a decorator: ``set(key, value)``. This method will raise an error if a different value already exists for the given key, and is useful for catching errors in class definitions, e.g.: >>> def goodness(value): ... def decorate(func): ... MethodGoodness.for_enclosing_class().set(func.__name__, value) ... return func ... return decorate >>> class Demo3(object): ... def aMethod(self, foo): ... pass ... aMethod = goodness(17)(aMethod) ... def aMethod(self, foo): ... pass ... aMethod = goodness(27)(aMethod) Traceback (most recent call last): ... ValueError: MethodGoodness['aMethod'] already contains 17; can't set to 27 Threading Concerns ------------------ Add-on lookup and creation is thread-safe (i.e. race-condition free), so long as the add-on key contains no objects with ``__hash__`` or ``__equals__`` methods involve any Python code (as opposed to being pure C code that doesn't call any Python code). So, unkeyed add-ons, or add-ons whose keys consist only of instances of built-in types (recursively, in the case of tuples) or types that inherit their ``__hash__`` and ``__equals__`` methods from built-in types, can be initialized in a thread-safe manner. This does *not* mean, however, that two or more add-on instances can't be created for the same subject at the same time! Code in an add-on class' ``__new__`` or ``__init__`` methods **must not** assume that it will in fact be the only add-on instance attached to its subject, if you wish the code to be thread-safe. This is because the ``AddOn`` access machinery allows multiple threads to *create* an add-on instance at the same time, but only one of those objects will *win* the race to become "the" add-on instance, and no thread can know in advance whether it will win. Thus, if you wish your ``AddOn`` instances to do something *to* their constructor arguments at initialization time, you must either give up on your add-on being thread-safe, or use some other locking mechanism. Of course, add-on initialization is only one small part of the overall thread- safety puzzle. Unless your add-on exists only to compute some immutable metadata about its subject, the rest of your add-on's methods need to be thread-safe also. One way to do that, is to use a ``@synchronized`` decorator, combined with a ``Locking`` add-on:: >>> class Locking(AddOn): ... def __init__(self, subject): ... from threading import RLock ... self.lock = RLock() ... def acquire(self): ... print "acquiring" ... self.lock.acquire() ... def release(self): ... self.lock.release() ... print "released" >>> def synchronized(func): ... def wrapper(self, *__args,**__kw): ... Locking(self).acquire() ... try: ... func(self, *__args,**__kw) ... finally: ... Locking(self).release() ... ... from peak.util.decorators import rewrap ... return rewrap(func, wrapper) >>> class AnotherThing: ... def ping(self): ... print "ping" ... ping = synchronized(ping) >>> AnotherThing().ping() acquiring ping released If the ``Locking()`` add-on constructor were not thread-safe, this decorator would not be able to do its job correctly, because two threads accessing an object that didn't *have* the add-on yet, could end up locking two different locks, and proceeding to run the supposedly-"synchronized" method at the same time! (In general, thread-safety is harder than it looks. But at least you don't have to worry about this one tiny part of correctly implementing it.) Of course, synchronized methods will be slower than normal methods, which is why AddOns doesn't do anything besides that one small part of the thread-safety puzzle, to avoid penalizing non-threaded code. As the PEAK motto says, STASCTAP! (Simple Things Are Simple, Complex Things Are Possible.) Mailing List ------------ Questions, discussion, and bug reports for this software should be directed to the PEAK mailing list; see http://www.eby-sarna.com/mailman/listinfo/PEAK/ for details. Platform: UNKNOWN