Epsilon-0.7.1/0000755000175000017500000000000012606214253015111 5ustar mithrandimithrandi00000000000000Epsilon-0.7.1/Epsilon.egg-info/0000755000175000017500000000000012606214253020214 5ustar mithrandimithrandi00000000000000Epsilon-0.7.1/Epsilon.egg-info/PKG-INFO0000644000175000017500000000105712606214253021314 0ustar mithrandimithrandi00000000000000Metadata-Version: 1.1 Name: Epsilon Version: 0.7.1 Summary: A set of utility modules used by Divmod projects Home-page: https://github.com/twisted/epsilon Author: UNKNOWN Author-email: UNKNOWN License: MIT Description: UNKNOWN Platform: any Classifier: Development Status :: 5 - Production/Stable Classifier: Framework :: Twisted Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python Classifier: Topic :: Internet Classifier: Topic :: Security Classifier: Topic :: Utilities Epsilon-0.7.1/Epsilon.egg-info/SOURCES.txt0000644000175000017500000000343112606214253022101 0ustar mithrandimithrandi00000000000000LICENSE MANIFEST.in NAME.txt NEWS.txt README setup.cfg setup.py versioneer.py Epsilon.egg-info/PKG-INFO Epsilon.egg-info/SOURCES.txt Epsilon.egg-info/dependency_links.txt Epsilon.egg-info/requires.txt Epsilon.egg-info/top_level.txt bin/benchmark bin/certcreate epsilon/__init__.py epsilon/_version.py epsilon/ampauth.py epsilon/amprouter.py epsilon/asplode.py epsilon/caseless.py epsilon/cooperator.py epsilon/descriptor.py epsilon/expose.py epsilon/extime.py epsilon/hotfix.py epsilon/iepsilon.py epsilon/juice.py epsilon/liner.py epsilon/modal.py epsilon/pending.py epsilon/process.py epsilon/react.py epsilon/remember.py epsilon/setuphelper.py epsilon/spewer.py epsilon/structlike.py epsilon/unrepr.py epsilon/view.py epsilon/hotfixes/__init__.py epsilon/hotfixes/deferredgenerator_tfailure.py epsilon/hotfixes/delayedcall_seconds.py epsilon/hotfixes/filepath_copyTo.py epsilon/hotfixes/internet_task_clock.py epsilon/hotfixes/loopbackasync_reentrancy.py epsilon/hotfixes/plugin_package_paths.py epsilon/hotfixes/proto_helpers_stringtransport.py epsilon/hotfixes/timeoutmixin_calllater.py epsilon/hotfixes/trial_assertwarns.py epsilon/scripts/__init__.py epsilon/scripts/benchmark.py epsilon/scripts/certcreate.py epsilon/test/__init__.py epsilon/test/iosim.py epsilon/test/mantissa-structure.py epsilon/test/test_ampauth.py epsilon/test/test_amprouter.py epsilon/test/test_benchmark.py epsilon/test/test_caseless.py epsilon/test/test_descriptor.py epsilon/test/test_expose.py epsilon/test/test_extime.py epsilon/test/test_juice.py epsilon/test/test_modes.py epsilon/test/test_process.py epsilon/test/test_react.py epsilon/test/test_remember.py epsilon/test/test_setuphelper.py epsilon/test/test_structlike.py epsilon/test/test_unrepr.py epsilon/test/test_version.py epsilon/test/test_view.py epsilon/test/utils.pyEpsilon-0.7.1/Epsilon.egg-info/dependency_links.txt0000644000175000017500000000000112606214253024262 0ustar mithrandimithrandi00000000000000 Epsilon-0.7.1/Epsilon.egg-info/requires.txt0000644000175000017500000000004012606214253022606 0ustar mithrandimithrandi00000000000000Twisted>=13.2.0 PyOpenSSL>=0.13 Epsilon-0.7.1/Epsilon.egg-info/top_level.txt0000644000175000017500000000001012606214253022735 0ustar mithrandimithrandi00000000000000epsilon Epsilon-0.7.1/bin/0000755000175000017500000000000012606214253015661 5ustar mithrandimithrandi00000000000000Epsilon-0.7.1/bin/benchmark0000755000175000017500000000011212461004115017524 0ustar mithrandimithrandi00000000000000#!/usr/bin/python from epsilon.scripts import benchmark benchmark.main() Epsilon-0.7.1/bin/certcreate0000755000175000017500000000021012461004115017712 0ustar mithrandimithrandi00000000000000#!/usr/bin/python # Copyright 2005 Divmod, Inc. See LICENSE file for details from epsilon.scripts import certcreate certcreate.main() Epsilon-0.7.1/epsilon/0000755000175000017500000000000012606214253016562 5ustar mithrandimithrandi00000000000000Epsilon-0.7.1/epsilon/hotfixes/0000755000175000017500000000000012606214253020413 5ustar mithrandimithrandi00000000000000Epsilon-0.7.1/epsilon/hotfixes/__init__.py0000644000175000017500000000000012461004115022503 0ustar mithrandimithrandi00000000000000Epsilon-0.7.1/epsilon/hotfixes/deferredgenerator_tfailure.py0000644000175000017500000000357112461004115026346 0ustar mithrandimithrandi00000000000000 from twisted.python import failure from twisted.internet import defer def getResult(self): if isinstance(self.result, failure.Failure): self.result.raiseException() return self.result def _deferGenerator(g, deferred=None): """ See L{waitForDeferred}. """ result = None while 1: if deferred is None: deferred = defer.Deferred() try: result = g.next() except StopIteration: deferred.callback(result) return deferred except: deferred.errback() return deferred # Deferred.callback(Deferred) raises an error; we catch this case # early here and give a nicer error message to the user in case # they yield a Deferred. Perhaps eventually these semantics may # change. if isinstance(result, defer.Deferred): return defer.fail(TypeError("Yield waitForDeferred(d), not d!")) if isinstance(result, defer.waitForDeferred): waiting = [True, None] # Pass vars in so they don't get changed going around the loop def gotResult(r, waiting=waiting, result=result): result.result = r if waiting[0]: waiting[0] = False waiting[1] = r else: _deferGenerator(g, deferred) result.d.addBoth(gotResult) if waiting[0]: # Haven't called back yet, set flag so that we get reinvoked # and return from the loop waiting[0] = False return deferred result = None # waiting[1] def install(): getResult.__module__ = 'twisted.internet.defer' defer.waitForDeferred.getResult = getResult _deferGenerator.__module__ = 'twisted.internet.defer' defer._deferGenerator = _deferGenerator Epsilon-0.7.1/epsilon/hotfixes/delayedcall_seconds.py0000644000175000017500000001216212461004115024741 0ustar mithrandimithrandi00000000000000 import traceback from zope.interface import implements from twisted.persisted import styles from twisted.internet.interfaces import IDelayedCall from twisted.internet import error, base from twisted.python import reflect class DelayedCall(styles.Ephemeral): implements(IDelayedCall) # enable .debug to record creator call stack, and it will be logged if # an exception occurs while the function is being run debug = False _str = None def __init__(self, time, func, args, kw, cancel, reset, seconds=None): self.time, self.func, self.args, self.kw = time, func, args, kw self.resetter = reset self.canceller = cancel self.seconds = seconds self.cancelled = self.called = 0 self.delayed_time = 0 if self.debug: self.creator = traceback.format_stack()[:-2] def getTime(self): """Return the time at which this call will fire @rtype: C{float} @return: The number of seconds after the epoch at which this call is scheduled to be made. """ return self.time + self.delayed_time def cancel(self): """Unschedule this call @raise AlreadyCancelled: Raised if this call has already been unscheduled. @raise AlreadyCalled: Raised if this call has already been made. """ if self.cancelled: raise error.AlreadyCancelled elif self.called: raise error.AlreadyCalled else: self.canceller(self) self.cancelled = 1 if self.debug: self._str = str(self) del self.func, self.args, self.kw def reset(self, secondsFromNow): """Reschedule this call for a different time @type secondsFromNow: C{float} @param secondsFromNow: The number of seconds from the time of the C{reset} call at which this call will be scheduled. @raise AlreadyCancelled: Raised if this call has been cancelled. @raise AlreadyCalled: Raised if this call has already been made. """ if self.cancelled: raise error.AlreadyCancelled elif self.called: raise error.AlreadyCalled else: if self.seconds is None: new_time = base.seconds() + secondsFromNow else: new_time = self.seconds() + secondsFromNow if new_time < self.time: self.delayed_time = 0 self.time = new_time self.resetter(self) else: self.delayed_time = new_time - self.time def delay(self, secondsLater): """Reschedule this call for a later time @type secondsLater: C{float} @param secondsLater: The number of seconds after the originally scheduled time for which to reschedule this call. @raise AlreadyCancelled: Raised if this call has been cancelled. @raise AlreadyCalled: Raised if this call has already been made. """ if self.cancelled: raise error.AlreadyCancelled elif self.called: raise error.AlreadyCalled else: self.delayed_time += secondsLater if self.delayed_time < 0: self.activate_delay() self.resetter(self) def activate_delay(self): self.time += self.delayed_time self.delayed_time = 0 def active(self): """Determine whether this call is still pending @rtype: C{bool} @return: True if this call has not yet been made or cancelled, False otherwise. """ return not (self.cancelled or self.called) def __le__(self, other): return self.time <= other.time def __str__(self): if self._str is not None: return self._str if hasattr(self, 'func'): if hasattr(self.func, 'func_name'): func = self.func.func_name if hasattr(self.func, 'im_class'): func = self.func.im_class.__name__ + '.' + func else: func = reflect.safe_repr(self.func) else: func = None if self.seconds is None: now = base.seconds() else: now = self.seconds() L = ["') return "".join(L) def install(): global DelayedCall base.DelayedCall.__dict__ = DelayedCall.__dict__ base.DelayedCall.__dict__['__module__'] = 'twisted.internet.base' DelayedCall = base.DelayedCall Epsilon-0.7.1/epsilon/hotfixes/filepath_copyTo.py0000644000175000017500000003034612461004115024115 0ustar mithrandimithrandi00000000000000# -*- test-case-name: twisted.test.test_paths -*- # Copyright (c) 2001-2004 Twisted Matrix Laboratories. # See LICENSE for details. from __future__ import generators import os import errno import base64 import random import sha from os.path import isabs, exists, normpath, abspath, splitext from os.path import basename, dirname from os.path import join as joinpath from os import sep as slash from os import listdir, utime, stat from os import remove from stat import ST_MODE, ST_MTIME, ST_ATIME, ST_CTIME, ST_SIZE from stat import S_ISREG, S_ISDIR, S_ISLNK try: from os.path import islink except ImportError: def islink(path): return False try: from os import urandom as randomBytes except ImportError: def randomBytes(n): randomData = [random.randrange(256) for n in xrange(n)] return ''.join(map(chr, randomData)) try: from base64 import urlsafe_b64encode as armor except ImportError: def armor(s): return s.encode('hex') class InsecurePath(Exception): pass def _secureEnoughString(): """ Create a pseudorandom, 16-character string for use in secure filenames. """ return armor(sha.new(randomBytes(64)).digest())[:16] class FilePath: """I am a path on the filesystem that only permits 'downwards' access. Instantiate me with a pathname (for example, FilePath('/home/myuser/public_html')) and I will attempt to only provide access to files which reside inside that path. I may be a path to a file, a directory, or a file which does not exist. The correct way to use me is to instantiate me, and then do ALL filesystem access through me. In other words, do not import the 'os' module; if you need to open a file, call my 'open' method. If you need to list a directory, call my 'path' method. Even if you pass me a relative path, I will convert that to an absolute path internally. @type alwaysCreate: C{bool} @ivar alwaysCreate: When opening this file, only succeed if the file does not already exist. """ # __slots__ = 'path abs'.split() statinfo = None def __init__(self, path, alwaysCreate=False): self.path = abspath(path) self.alwaysCreate = alwaysCreate def __getstate__(self): d = self.__dict__.copy() if d.has_key('statinfo'): del d['statinfo'] return d def child(self, path): norm = normpath(path) if slash in norm: raise InsecurePath("%r contains one or more directory separators" % (path,)) newpath = abspath(joinpath(self.path, norm)) if not newpath.startswith(self.path): raise InsecurePath("%r is not a child of %s" % (newpath, self.path)) return self.clonePath(newpath) def preauthChild(self, path): """ Use me if `path' might have slashes in it, but you know they're safe. (NOT slashes at the beginning. It still needs to be a _child_). """ newpath = abspath(joinpath(self.path, normpath(path))) if not newpath.startswith(self.path): raise InsecurePath("%s is not a child of %s" % (newpath, self.path)) return self.clonePath(newpath) def childSearchPreauth(self, *paths): """Return my first existing child with a name in 'paths'. paths is expected to be a list of *pre-secured* path fragments; in most cases this will be specified by a system administrator and not an arbitrary user. If no appropriately-named children exist, this will return None. """ p = self.path for child in paths: jp = joinpath(p, child) if exists(jp): return self.clonePath(jp) def siblingExtensionSearch(self, *exts): """Attempt to return a path with my name, given multiple possible extensions. Each extension in exts will be tested and the first path which exists will be returned. If no path exists, None will be returned. If '' is in exts, then if the file referred to by this path exists, 'self' will be returned. The extension '*' has a magic meaning, which means "any path that begins with self.path+'.' is acceptable". """ p = self.path for ext in exts: if not ext and self.exists(): return self if ext == '*': basedot = basename(p)+'.' for fn in listdir(dirname(p)): if fn.startswith(basedot): return self.clonePath(joinpath(dirname(p), fn)) p2 = p + ext if exists(p2): return self.clonePath(p2) def siblingExtension(self, ext): return self.clonePath(self.path+ext) def open(self, mode='r'): if self.alwaysCreate: assert 'a' not in mode, "Appending not supported when alwaysCreate == True" return self.create() return open(self.path, mode+'b') # stat methods below def restat(self, reraise=True): try: self.statinfo = stat(self.path) except OSError: self.statinfo = 0 if reraise: raise def getsize(self): st = self.statinfo if not st: self.restat() st = self.statinfo return st[ST_SIZE] def getmtime(self): st = self.statinfo if not st: self.restat() st = self.statinfo return st[ST_MTIME] def getctime(self): st = self.statinfo if not st: self.restat() st = self.statinfo return st[ST_CTIME] def getatime(self): st = self.statinfo if not st: self.restat() st = self.statinfo return st[ST_ATIME] def exists(self): if self.statinfo: return True elif self.statinfo is None: self.restat(False) return self.exists() else: return False def isdir(self): st = self.statinfo if not st: self.restat(False) st = self.statinfo if not st: return False return S_ISDIR(st[ST_MODE]) def isfile(self): st = self.statinfo if not st: self.restat(False) st = self.statinfo if not st: return False return S_ISREG(st[ST_MODE]) def islink(self): st = self.statinfo if not st: self.restat(False) st = self.statinfo if not st: return False return S_ISLNK(st[ST_MODE]) def isabs(self): return isabs(self.path) def listdir(self): return listdir(self.path) def splitext(self): return splitext(self.path) def __repr__(self): return 'FilePath(%r)' % self.path def touch(self): try: self.open('a').close() except IOError: pass utime(self.path, None) def remove(self): if self.isdir(): for child in self.children(): child.remove() os.rmdir(self.path) else: os.remove(self.path) self.restat(False) def makedirs(self): return os.makedirs(self.path) def globChildren(self, pattern): """ Assuming I am representing a directory, return a list of FilePaths representing my children that match the given pattern. """ import glob path = self.path[-1] == '/' and self.path + pattern or slash.join([self.path, pattern]) return map(self.clonePath, glob.glob(path)) def basename(self): return basename(self.path) def dirname(self): return dirname(self.path) def parent(self): return self.clonePath(self.dirname()) def setContent(self, content, ext='.new'): sib = self.siblingExtension(ext) sib.open('w').write(content) os.rename(sib.path, self.path) def getContent(self): return self.open().read() # new in 2.2.0 def __cmp__(self, other): if not isinstance(other, FilePath): return NotImplemented return cmp(self.path, other.path) def createDirectory(self): os.mkdir(self.path) def requireCreate(self, val=1): self.alwaysCreate = val def create(self): """Exclusively create a file, only if this file previously did not exist. """ fdint = os.open(self.path, (os.O_EXCL | os.O_CREAT | os.O_RDWR)) # XXX TODO: 'name' attribute of returned files is not mutable or # settable via fdopen, so this file is slighly less functional than the # one returned from 'open' by default. send a patch to Python... return os.fdopen(fdint, 'w+b') def temporarySibling(self): """ Create a path naming a temporary sibling of this path in a secure fashion. """ sib = self.parent().child(_secureEnoughString() + self.basename()) sib.requireCreate() return sib def children(self): return map(self.child, self.listdir()) def walk(self): yield self if self.isdir(): for c in self.children(): for subc in c.walk(): yield subc _chunkSize = 2 ** 2 ** 2 ** 2 def copyTo(self, destination): # XXX TODO: *thorough* audit and documentation of the exact desired # semantics of this code. Right now the behavior of existent # destination symlinks is convenient, and quite possibly correct, but # its security properties need to be explained. if self.isdir(): if not destination.exists(): destination.createDirectory() for child in self.children(): destChild = destination.child(child.basename()) child.copyTo(destChild) elif self.isfile(): writefile = destination.open('w') readfile = self.open() while 1: # XXX TODO: optionally use os.open, os.read and O_DIRECT and # use os.fstatvfs to determine chunk sizes and make # *****sure**** copy is page-atomic; the following is good # enough for 99.9% of everybody and won't take a week to audit # though. chunk = readfile.read(self._chunkSize) writefile.write(chunk) if len(chunk) < self._chunkSize: break writefile.close() readfile.close() else: # If you see the following message because you want to copy # symlinks, fifos, block devices, character devices, or unix # sockets, please feel free to add support to do sensible things in # reaction to those types! raise NotImplementedError( "Only copying of files and directories supported") def moveTo(self, destination): try: os.rename(self.path, destination.path) self.restat(False) except OSError, ose: if ose.errno == errno.EXDEV: # man 2 rename, ubuntu linux 5.10 "breezy": # oldpath and newpath are not on the same mounted filesystem. # (Linux permits a filesystem to be mounted at multiple # points, but rename(2) does not work across different mount # points, even if the same filesystem is mounted on both.) # that means it's time to copy trees of directories! secsib = destination.secureSibling() self.copyTo(secsib) # slow secsib.moveTo(destination) # visible # done creating new stuff. let's clean me up. mysecsib = self.secureSibling() self.moveTo(mysecsib) # visible mysecsib.remove() # slow else: raise FilePath.clonePath = FilePath def install(): global FilePath from twisted.python import filepath filepath.FilePath.__dict__ = FilePath.__dict__ filepath.FilePath.__dict__['__module__'] = 'twisted.python.filepath' FilePath = filepath.FilePath Epsilon-0.7.1/epsilon/hotfixes/internet_task_clock.py0000644000175000017500000000202512461004115025002 0ustar mithrandimithrandi00000000000000""" Fix from Twisted r20480. """ from twisted.internet.task import Clock from twisted.internet import base def callLater(self, when, what, *a, **kw): """ Copied from twisted.internet.task.Clock, r20480. Fixes the bug where the wrong DelayedCall would sometimes be returned. """ dc = base.DelayedCall(self.seconds() + when, what, a, kw, self.calls.remove, lambda c: None, self.seconds) self.calls.append(dc) self.calls.sort(lambda a, b: cmp(a.getTime(), b.getTime())) return dc def clockIsBroken(): """ Returns whether twisted.internet.task.Clock has the bug that returns the wrong DelayedCall or not. """ clock = Clock() dc1 = clock.callLater(10, lambda: None) dc2 = clock.callLater(1, lambda: None) if dc1 is dc2: return True else: return False def install(): """ Insert the fixed callLater method. """ Clock.callLater = callLater Epsilon-0.7.1/epsilon/hotfixes/loopbackasync_reentrancy.py0000644000175000017500000000165312461004115026045 0ustar mithrandimithrandi00000000000000 """ Fix from Twisted r23970 """ from twisted.internet.task import deferLater from twisted.protocols.loopback import _loopbackAsyncBody def _loopbackAsyncContinue(ignored, server, serverToClient, client, clientToServer): # Clear the Deferred from each message queue, since it has already fired # and cannot be used again. clientToServer._notificationDeferred = serverToClient._notificationDeferred = None # Schedule some more byte-pushing to happen. This isn't done # synchronously because no actual transport can re-enter dataReceived as # a result of calling write, and doing this synchronously could result # in that. from twisted.internet import reactor return deferLater( reactor, 0, _loopbackAsyncBody, server, serverToClient, client, clientToServer) def install(): from twisted.protocols import loopback loopback._loopbackAsyncContinue = _loopbackAsyncContinue Epsilon-0.7.1/epsilon/hotfixes/plugin_package_paths.py0000644000175000017500000000263112461004115025130 0ustar mithrandimithrandi00000000000000# Copyright (c) 2007 Twisted Matrix Laboratories. # Copyright (c) 2008 Divmod. # See LICENSE for details. import sys, os def pluginPackagePaths(name): """ Return a list of additional directories which should be searched for modules to be included as part of the named plugin package. @type name: C{str} @param name: The fully-qualified Python name of a plugin package, eg C{'twisted.plugins'}. @rtype: C{list} of C{str} @return: The absolute paths to other directories which may contain plugin modules for the named plugin package. """ package = name.split('.') # Note that this may include directories which do not exist. It may be # preferable to remove such directories at this point, rather than allow # them to be searched later on. # # Note as well that only '__init__.py' will be considered to make a # directory a package (and thus exclude it from this list). This means # that if you create a master plugin package which has some other kind of # __init__ (eg, __init__.pyc) it will be incorrectly treated as a # supplementary plugin directory. return [ os.path.abspath(os.path.join(x, *package)) for x in sys.path if not os.path.exists(os.path.join(x, *package + ['__init__.py']))] def install(): import twisted.plugin twisted.plugin.pluginPackagePaths = pluginPackagePaths Epsilon-0.7.1/epsilon/hotfixes/proto_helpers_stringtransport.py0000644000175000017500000000052512461004115027210 0ustar mithrandimithrandi00000000000000from twisted.test import proto_helpers class StringTransport: def write(self, data): if isinstance(data, unicode): # no, really, I mean it raise TypeError("Data must not be unicode") self.io.write(data) def install(): proto_helpers.StringTransport.__dict__['write'] = StringTransport.__dict__['write'] Epsilon-0.7.1/epsilon/hotfixes/timeoutmixin_calllater.py0000644000175000017500000000333512461004115025540 0ustar mithrandimithrandi00000000000000 from twisted.internet import reactor class TimeoutMixin: """Mixin for protocols which wish to timeout connections @cvar timeOut: The number of seconds after which to timeout the connection. """ timeOut = None __timeoutCall = None def callLater(self, period, func): return reactor.callLater(period, func) def resetTimeout(self): """Reset the timeout count down""" if self.__timeoutCall is not None and self.timeOut is not None: self.__timeoutCall.reset(self.timeOut) def setTimeout(self, period): """Change the timeout period @type period: C{int} or C{NoneType} @param period: The period, in seconds, to change the timeout to, or C{None} to disable the timeout. """ prev = self.timeOut self.timeOut = period if self.__timeoutCall is not None: if period is None: self.__timeoutCall.cancel() self.__timeoutCall = None else: self.__timeoutCall.reset(period) elif period is not None: self.__timeoutCall = self.callLater(period, self.__timedOut) return prev def __timedOut(self): self.__timeoutCall = None self.timeoutConnection() def timeoutConnection(self): """Called when the connection times out. Override to define behavior other than dropping the connection. """ self.transport.loseConnection() def install(): global TimeoutMixin from twisted.protocols import policies policies.TimeoutMixin.__dict__ = TimeoutMixin.__dict__ policies.TimeoutMixin.__dict__['module'] = 'twisted.protocols.policies' TimeoutMixin = policies.TimeoutMixin Epsilon-0.7.1/epsilon/hotfixes/trial_assertwarns.py0000644000175000017500000000410412461004115024524 0ustar mithrandimithrandi00000000000000 """ failUnlessWarns assertion from twisted.trial in Twisted 8.0. """ import warnings def failUnlessWarns(self, category, message, filename, f, *args, **kwargs): """ Fail if the given function doesn't generate the specified warning when called. It calls the function, checks the warning, and forwards the result of the function if everything is fine. @param category: the category of the warning to check. @param message: the output message of the warning to check. @param filename: the filename where the warning should come from. @param f: the function which is supposed to generate the warning. @type f: any callable. @param args: the arguments to C{f}. @param kwargs: the keywords arguments to C{f}. @return: the result of the original function C{f}. """ warningsShown = [] def warnExplicit(*args): warningsShown.append(args) origExplicit = warnings.warn_explicit try: warnings.warn_explicit = warnExplicit result = f(*args, **kwargs) finally: warnings.warn_explicit = origExplicit if not warningsShown: self.fail("No warnings emitted") first = warningsShown[0] for other in warningsShown[1:]: if other[:2] != first[:2]: self.fail("Can't handle different warnings") gotMessage, gotCategory, gotFilename, lineno = first[:4] self.assertEqual(gotMessage, message) self.assertIdentical(gotCategory, category) # Use starts with because of .pyc/.pyo issues. self.failUnless( filename.startswith(gotFilename), 'Warning in %r, expected %r' % (gotFilename, filename)) # It would be nice to be able to check the line number as well, but # different configurations actually end up reporting different line # numbers (generally the variation is only 1 line, but that's enough # to fail the test erroneously...). # self.assertEqual(lineno, xxx) return result def install(): from twisted.trial.unittest import TestCase TestCase.failUnlessWarns = TestCase.assertWarns = failUnlessWarns Epsilon-0.7.1/epsilon/scripts/0000755000175000017500000000000012606214253020251 5ustar mithrandimithrandi00000000000000Epsilon-0.7.1/epsilon/scripts/__init__.py0000644000175000017500000000000012461004115022341 0ustar mithrandimithrandi00000000000000Epsilon-0.7.1/epsilon/scripts/benchmark.py0000644000175000017500000004401512606176061022565 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_benchmark -*- """ Functions for running a Python file in a child process and recording resource usage information and other statistics about it. """ import os, time, sys, socket, StringIO, pprint, errno import twisted from twisted.python import log, filepath, failure, util from twisted.internet import reactor, protocol, error, defer from twisted.protocols import policies import epsilon from epsilon import structlike from epsilon import juice from epsilon.test import utils class diskstat(structlike.record( 'readCount mergedReadCount readSectorCount readMilliseconds ' 'writeCount mergedWriteCount writeSectorCount writeMilliseconds ' 'outstandingIOCount ioMilliseconds weightedIOMilliseconds')): """ Represent the I/O stats of a single device, as reported by Linux's disk stats. """ class partitionstat(structlike.record( 'readCount readSectorCount writeCount writeSectorCount')): """ Like diskstat, but for a partition. Less information is made available by Linux for partitions, so this has fewer attributes. """ def parseDiskStatLine(L): """ Parse a single line from C{/proc/diskstats} into a two-tuple of the name of the device to which it corresponds (ie 'hda') and an instance of the appropriate record type (either L{partitionstat} or L{diskstat}). """ parts = L.split() device = parts[2] if len(parts) == 7: factory = partitionstat else: factory = diskstat return device, factory(*map(int, parts[3:])) def parseDiskStats(fObj): """ Parse a file-like object containing lines formatted like those in C{/proc/diskstats}. Yield two-tuples of information for each line. """ for L in fObj: yield parseDiskStatLine(L) def captureStats(): """ Parse the current contents of C{/proc/diskstats} into a dict mapping device names to instances of the appropriate stat record. """ return dict(parseDiskStats(file('/proc/diskstats'))) class ResourceSnapshot(structlike.record('time disk partition size')): """ Represents the state of some resources on the system at a particular moment in time. @ivar time: The time at which the stats associated with this instance were recorded. @ivar disk: A C{diskstat} instance created from stats available that the given time. @ivar partition: A C{diskstat} instance created from stats available that the given time. @ivar size: Total size of all files beneath a particular directory. """ class ProcessDied(Exception): """ Encapsulates process state and failure mode. """ def __init__(self, exitCode, signal, status, output): self.exitCode = exitCode self.signal = signal self.status = status self.output = output Exception.__init__(self) class BasicProcess(protocol.ProcessProtocol, policies.TimeoutMixin): """ The simplest possible process protocol. It doesn't do anything except what is absolutely mandatory of any conceivable ProcessProtocol. """ timedOut = False BACKCHANNEL_OUT = 3 BACKCHANNEL_IN = 4 def __init__(self, whenFinished, path): self.whenFinished = whenFinished self.path = path self.output = [] def connectionMade(self): self.setTimeout(900.0) def timeoutConnection(self): self.timedOut = True self.transport.signalProcess('KILL') def childDataReceived(self, childFD, data): self.resetTimeout() self.output.append((childFD, data)) def childConnectionLost(self, childFD): self.resetTimeout() self.output.append((childFD, None)) def processEnded(self, reason): # XXX Okay, I'm a liar. This doesn't do everything. Strictly speaking # we shouldn't fire completion notification until the process has # terminated *and* the file descriptors have all been closed. We're # not supporting passing file descriptors from the child to a # grandchild here, though. Don't Do It. d, self.whenFinished = self.whenFinished, None o, self.output = self.output, None if reason.check(error.ProcessDone): d.callback((self, reason.value.status, o)) elif self.timedOut: d.errback(error.TimeoutError()) elif reason.check(error.ProcessTerminated): d.errback(failure.Failure(ProcessDied( reason.value.exitCode, reason.value.signal, reason.value.status, o))) else: d.errback(reason.value) self.setTimeout(None) def spawn(cls, executable, args, path, env, spawnProcess=None): """ Run an executable with some arguments in the given working directory with the given environment variables. Returns a Deferred which fires with a two-tuple of (exit status, output list) if the process terminates without timing out or being killed by a signal. Otherwise, the Deferred errbacks with either L{error.TimeoutError} if any 10 minute period passes with no events or L{ProcessDied} if it is killed by a signal. On success, the output list is of two-tuples of (file descriptor, bytes). """ d = defer.Deferred() proto = cls(d, filepath.FilePath(path)) if spawnProcess is None: spawnProcess = reactor.spawnProcess spawnProcess( proto, executable, [executable] + args, path=path, env=env, childFDs={0: 'w', 1: 'r', 2: 'r', cls.BACKCHANNEL_OUT: 'r', cls.BACKCHANNEL_IN: 'w'}) return d spawn = classmethod(spawn) class Change(object): """ Stores two ResourceSnapshots taken at two different times. """ def start(self, path, disk, partition): # Do these three things as explicit, separate statments to make sure # gathering disk stats isn't accidentally included in the duration. startSize = getSize(path) beforeDiskStats = captureStats() startTime = time.time() self.before = ResourceSnapshot( time=startTime, disk=beforeDiskStats.get(disk, None), partition=beforeDiskStats.get(partition, None), size=startSize) def stop(self, path, disk, partition): # Do these three things as explicit, separate statments to make sure # gathering disk stats isn't accidentally included in the duration. endTime = time.time() afterDiskStats = captureStats() endSize = getSize(path) self.after = ResourceSnapshot( time=endTime, disk=afterDiskStats.get(disk, None), partition=afterDiskStats.get(partition, None), size=endSize) class BenchmarkProcess(BasicProcess): START = '\0' STOP = '\1' def __init__(self, *a, **kw): BasicProcess.__init__(self, *a, **kw) # Figure out where the process is running. self.partition = discoverCurrentWorkingDevice().split('/')[-1] self.disk = self.partition.rstrip('0123456789') # Keep track of stats for the entire process run. self.overallChange = Change() self.overallChange.start(self.path, self.disk, self.partition) # Just keep track of stats between START and STOP events. self.benchmarkChange = Change() def connectionMade(self): return BasicProcess.connectionMade(self) def startTiming(self): self.benchmarkChange.start(self.path, self.disk, self.partition) self.transport.writeToChild(self.BACKCHANNEL_IN, self.START) def stopTiming(self): self.benchmarkChange.stop(self.path, self.disk, self.partition) self.transport.writeToChild(self.BACKCHANNEL_IN, self.STOP) def childDataReceived(self, childFD, data): if childFD == self.BACKCHANNEL_OUT: self.resetTimeout() for byte in data: if byte == self.START: self.startTiming() elif byte == self.STOP: self.stopTiming() else: self.transport.signalProcess('QUIT') else: return BasicProcess.childDataReceived(self, childFD, data) def processEnded(self, reason): self.overallChange.stop(self.path, self.disk, self.partition) return BasicProcess.processEnded(self, reason) STATS_VERSION = 0 class Results(juice.Command): commandName = 'Result' arguments = [ # Stats version - change this whenever the meaning of something changes # or a field is added or removed. ('version', juice.Integer()), # If an error occurred while collecting these stats - this probably # means they're bogus. ('error', juice.Boolean()), # If a particular timeout (See BasicProcess.connectionMade) elapsed # with no events whatsoever from the benchmark process. ('timeout', juice.Boolean()), # A unique name identifying the benchmark for which these are stats. ('name', juice.Unicode()), # The name of the benchmark associated with these stats. ('host', juice.Unicode()), # The sector size of the disk on which these stats were collected # (sectors are a gross lie, this is really the block size, and # everything else that talks about sectors is really talking about # blocks). ('sector_size', juice.Integer()), # Hex version info for the Python which generated these stats. ('python_version', juice.Unicode()), # Twisted SVN revision number used to generate these stats. ('twisted_version', juice.Unicode()), # Divmod SVN revision number used to generate these stats. ('divmod_version', juice.Unicode()), # Number of seconds between process startup and termination. ('elapsed', juice.Float()), # Size, in bytes, of the directory in which the child process was run. ('filesystem_growth', juice.Integer()), # Number of reads issued on the partition over the lifetime of the # child process. This may include reads from other processes, if any # were active on the same disk when the stats were collected. ('read_count', juice.Integer(optional=True)), # Number of sectors which were read from the partition over the # lifetime of the child process. Same caveat as above. ('read_sectors', juice.Integer(optional=True)), # Number of writes issued to the partition over the lifetime of the # child process. Same caveat as above. ('write_count', juice.Integer(optional=True)), # Number of sectors which were written to the partition over the # lifetime of the child process. Same caveat as above. ('write_sectors', juice.Integer(optional=True)), # Number of milliseconds spent blocked on reading from the disk over # the lifetime of the child process. Same caveat as above. ('read_ms', juice.Integer(optional=True)), # Number of milliseconds spent blocked on writing to the disk over the # lifetime of the child process. Same caveat as above. ('write_ms', juice.Integer(optional=True)), ] hostname = socket.gethostname() assert hostname != 'localhost', "Fix your computro." def formatResults(name, sectorSize, before, after, error, timeout): output = StringIO.StringIO() jj = juice.Juice(issueGreeting=False) tt = utils.FileWrapper(output) jj.makeConnection(tt) if after.partition is not None: read_count = after.partition.readCount - before.partition.readCount read_sectors = after.partition.readSectorCount - before.partition.readSectorCount write_count = after.partition.writeCount - before.partition.writeCount write_sectors = after.partition.writeSectorCount - before.partition.writeSectorCount else: read_count = None read_sectors = None write_count = None write_sectors = None if after.disk is not None: read_ms = after.disk.readMilliseconds - before.disk.readMilliseconds write_ms = after.disk.writeMilliseconds - before.disk.writeMilliseconds else: read_ms = None write_ms = None twisted_version = twisted.version._getSVNVersion() if twisted_version is None: twisted_version = twisted.version.short() epsilon_version = epsilon.version._getSVNVersion() if epsilon_version is None: epsilon_version = epsilon.version.short() Results( version=STATS_VERSION, error=error, timeout=timeout, name=name, host=hostname, elapsed=after.time - before.time, sector_size=sectorSize, read_count=read_count, read_sectors=read_sectors, read_ms=read_ms, write_count=write_count, write_sectors=write_sectors, write_ms=write_ms, filesystem_growth=after.size - before.size, python_version=unicode(sys.hexversion), twisted_version=twisted_version, divmod_version=epsilon_version, ).do(jj, requiresAnswer=False) return output.getvalue() def reportResults(results): print results print fObj = file('output', 'ab') fObj.write(results) fObj.close() def discoverCurrentWorkingDevice(): """ Return a short string naming the device which backs the current working directory, ie C{/dev/hda1}. """ possibilities = [] cwd = os.getcwd() for L in file('/proc/self/mounts'): parts = L.split() if cwd.startswith(parts[1]): possibilities.append((len(parts[1]), parts[0])) possibilities.sort() return possibilities[-1][-1] def getSize(p): """ @type p: L{twisted.python.filepath.FilePath} @return: The size, in bytes, of the given path and all its children. """ return sum(getOneSize(ch) for ch in p.walk()) def getOneSize(ch): """ @type ch: L{twisted.python.filepath.FilePath} @return: The size, in bytes, of the given path only. """ try: return ch.getsize() except OSError, e: if e.errno == errno.ENOENT: # XXX FilePath is broken if os.path.islink(ch.path): return len(os.readlink(ch.path)) else: raise else: raise def getSectorSize(p): return os.statvfs(p.path).f_bsize def _bench(name, workingPath, function): d = function() def later(result): err = timeout = False if isinstance(result, failure.Failure): err = True if result.check(error.TimeoutError): log.msg("Failing because timeout!") timeout = True elif result.check(ProcessDied): log.msg("Failing because Failure!") pprint.pprint(result.value.output) print result.value.exitCode, result.value.signal else: log.err(result) else: proto, status, output = result stderr = [bytes for (fd, bytes) in output if fd == 2] if status or stderr != [None]: err = True log.msg("Failing because stderr or bad status") pprint.pprint(result) for n, change in [(name + '-overall', proto.overallChange), (name + '-benchmark', proto.benchmarkChange)]: reportResults(formatResults( n, getSectorSize(workingPath), change.before, change.after, err, timeout)) return d.addBoth(later) def bench(name, path, func): log.startLogging(sys.stdout) log.msg("Running " + name) d = _bench(name, path, func) d.addErrback(log.err) d.addCallback(lambda ign: reactor.stop()) reactor.run() def makeBenchmarkRunner(path, args): """ Make a function that will run two Python processes serially: first one which calls the setup function from the given file, then one which calls the execute function from the given file. """ def runner(): return BenchmarkProcess.spawn( executable=sys.executable, args=['-Wignore'] + args, path=path.path, env=os.environ) return runner def start(): """ Start recording stats. Call this from a benchmark script when your setup is done. Call this at most once. @raise RuntimeError: Raised if the parent process responds with anything other than an acknowledgement of this message. """ os.write(BenchmarkProcess.BACKCHANNEL_OUT, BenchmarkProcess.START) response = util.untilConcludes(os.read, BenchmarkProcess.BACKCHANNEL_IN, 1) if response != BenchmarkProcess.START: raise RuntimeError( "Parent process responded with %r instead of START " % (response,)) def stop(): """ Stop recording stats. Call this from a benchmark script when the code you want benchmarked has finished. Call this exactly the same number of times you call L{start} and only after calling it. @raise RuntimeError: Raised if the parent process responds with anything other than an acknowledgement of this message. """ os.write(BenchmarkProcess.BACKCHANNEL_OUT, BenchmarkProcess.STOP) response = util.untilConcludes(os.read, BenchmarkProcess.BACKCHANNEL_IN, 1) if response != BenchmarkProcess.STOP: raise RuntimeError( "Parent process responded with %r instead of STOP" % (response,)) def main(): """ Run me with the filename of a benchmark script as an argument. I will time it and append the results to a file named output in the current working directory. """ name = sys.argv[1] path = filepath.FilePath('.stat').temporarySibling() path.makedirs() func = makeBenchmarkRunner(path, sys.argv[1:]) try: bench(name, path, func) finally: path.remove() if __name__ == '__main__': main() Epsilon-0.7.1/epsilon/scripts/certcreate.py0000644000175000017500000000314612461004115022741 0ustar mithrandimithrandi00000000000000# Copyright 2005-2008 Divmod, Inc. See LICENSE file for details import sys from twisted.python import usage from twisted.internet.ssl import KeyPair class Options(usage.Options): optParameters = [ ["country", "C", "US", None], ["state", "s", "New York", None], ["city", "c", "New York", None], ["organization", "o", "Divmod LLC", None], ["unit", "u", "Security", None], ["hostname", "h", "divmod.com", None], ["email", "e", "support@divmod.org", None], ["filename", "f", "server.pem", "Name of the file to which to write the PEM."], ["serial-number", "S", 1, None], ] optFlags = [ ['quiet', 'q'] ] def createSSLCertificate(opts): sslopt = {} for x, y in (('country','C'), ('state', 'ST'), ('city', 'L'), ('organization', 'O'), ('unit', 'OU'), ('hostname', 'CN'), ('email','emailAddress')): sslopt[y] = opts[x] serialNumber = int(opts['serial-number']) ssc = KeyPair.generate().selfSignedCert(serialNumber, **sslopt) file(opts['filename'], 'w').write(ssc.dumpPEM()) if not opts['quiet']: print 'Wrote SSL certificate:' print ssc.inspect() return ssc def main(args=None): """ Create a private key and a certificate and write them to a file. """ if args is None: args = sys.argv[1:] o = Options() try: o.parseOptions(args) except usage.UsageError, e: raise SystemExit(str(e)) else: return createSSLCertificate(o) Epsilon-0.7.1/epsilon/test/0000755000175000017500000000000012606214253017541 5ustar mithrandimithrandi00000000000000Epsilon-0.7.1/epsilon/test/__init__.py0000644000175000017500000000000012461004115021631 0ustar mithrandimithrandi00000000000000Epsilon-0.7.1/epsilon/test/iosim.py0000644000175000017500000000652612461004115021235 0ustar mithrandimithrandi00000000000000 """Utilities and helpers for simulating a network """ from cStringIO import StringIO from twisted.internet import error from epsilon.test import utils def readAndDestroy(iodata): try: iodata.seek(0) result = iodata.read() iodata.seek(0) iodata.truncate() except ValueError: print '' result = '' return result class IOPump: """Utility to pump data between clients and servers for protocol testing. Perhaps this is a utility worthy of being in protocol.py? """ def __init__(self, client, server, clientIO, serverIO, debug): self.client = client self.server = server self.clientIO = clientIO self.serverIO = serverIO self.debug = debug def flush(self, debug=False): """Pump until there is no more input or output. Returns whether any data was moved. """ result = False for x in range(1000): if self.pump(debug): result = True else: break else: assert 0, "Too long" return result def pump(self, debug=False): """Move data back and forth. Returns whether any data was moved. """ if self.debug or debug: print '-- GLUG --' sData = readAndDestroy(self.serverIO) cData = readAndDestroy(self.clientIO) self.client.transport._checkProducer() self.server.transport._checkProducer() if self.debug or debug: print '.' # XXX slightly buggy in the face of incremental output if cData: for line in cData.split('\r\n'): print 'C: '+line if sData: for line in sData.split('\r\n'): print 'S: '+line if cData: self.server.dataReceived(cData) if sData: self.client.dataReceived(sData) if cData or sData: return True if self.server.transport.disconnecting and not self.server.transport.disconnected: if self.debug or debug: print '* C' self.server.transport.disconnected = True self.client.transport.disconnecting = True self.client.connectionLost(error.ConnectionDone("Connection done")) return True if self.client.transport.disconnecting and not self.client.transport.disconnected: if self.debug or debug: print '* S' self.client.transport.disconnected = True self.server.transport.disconnecting = True self.server.connectionLost(error.ConnectionDone("Connection done")) return True return False def connectedServerAndClient(ServerClass, ClientClass, clientTransportWrapper=utils.FileWrapper, serverTransportWrapper=utils.FileWrapper, debug=False): """Returns a 3-tuple: (client, server, pump) """ c = ClientClass() s = ServerClass() cio = StringIO() sio = StringIO() c.makeConnection(clientTransportWrapper(cio)) s.makeConnection(serverTransportWrapper(sio)) pump = IOPump(c, s, cio, sio, debug) # kick off server greeting, etc pump.flush() return c, s, pump Epsilon-0.7.1/epsilon/test/mantissa-structure.py0000644000175000017500000000307112461004115023762 0ustar mithrandimithrandi00000000000000 import sys import os from os.path import join as opj projectName = sys.argv[1] topDir = projectName.capitalize() codeDir = projectName.lower() os.mkdir(topDir) os.mkdir(opj(topDir, codeDir)) file(opj(topDir, codeDir, '__init__.py'), 'w').write(""" # Don't put code here. from twisted.python.versions import Version version = Version(%r, 0, 0, 1) """ %(codeDir,)) file(opj(topDir, codeDir, codeDir+'_model.py'), 'w').write(""" from axiom.item import Item from axiom.attributes import text, bytes, integer, reference class %sStart(Item): schemaVersion = 1 # First version of this object. typeName = '%s_start' # Database table name. name = text() # We must have at least one attribute - model # objects must store data. def explode(self): raise Exception('these should fail until you write some tests!') """ % (topDir, codeDir)) os.mkdir(opj(topDir, codeDir, 'test')) file(opj(topDir, codeDir, 'test', '__init__.py'), 'w').write( "# Don't put code here.") file(opj(topDir, codeDir, 'test', 'test_'+codeDir+'.py'), 'w').write(""" from axiom.store import Store from twisted.trial import unittest from %s import %s_model class BasicTest(unittest.TestCase): def setUp(self): self.store = Store() def testUserWroteTests(self): o = %s_model.%sStart(store=self.store, name=u'Test Object') self.assertEquals(1, 0) o.explode() def tearDown(self): self.store.close() """ % (codeDir, codeDir, codeDir, topDir)) Epsilon-0.7.1/epsilon/test/test_ampauth.py0000644000175000017500000003455412461004115022615 0ustar mithrandimithrandi00000000000000# Copyright (c) 2008 Divmod. See LICENSE for details. """ Tests for L{epsilon.ampauth}. """ import epsilon.hotfix epsilon.hotfix.require('twisted', 'loopbackasync_reentrancy') from hashlib import sha1 from zope.interface import implements from zope.interface.verify import verifyObject from twisted.python.failure import Failure from twisted.internet.error import ConnectionDone from twisted.cred.error import UnauthorizedLogin from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse from twisted.cred.credentials import UsernamePassword from twisted.cred.portal import Portal from twisted.protocols.amp import IBoxReceiver, BinaryBoxProtocol, CommandLocator, AMP from twisted.protocols.loopback import loopbackAsync from twisted.trial.unittest import TestCase from epsilon.ampauth import ( _AMPOneTimePad, _AMPUsernamePassword, _calcResponse, UnhandledCredentials, CredReceiver, PasswordLogin, OTPLogin, PasswordChallengeResponse, OneTimePadChecker, CredAMPServerFactory, login) __metaclass__ = type class StubRealm: def __init__(self, avatar): self.avatar = avatar self.loggedOut = 0 self.requests = [] def requestAvatar(self, avatarId, mind, *interfaces): self.requests.append((avatarId, mind, interfaces)) return interfaces[0], self.avatar, self.logout def logout(self): self.loggedOut += 1 class StubAvatar: """ An L{IBoxReceiver} implementation which can be used as an avatar by the L{CredReceiver} tests. """ implements(IBoxReceiver) def startReceivingBoxes(self, sender): self.boxSender = sender def ampBoxReceived(self, box): pass def stopReceivingBoxes(self, reason): pass verifyObject(IBoxReceiver, StubAvatar()) class CredReceiverTests(TestCase): """ Tests for L{CredReceiver}, an L{IBoxReceiver} which integrates with L{twisted.cred} to provide authentication and authorization of AMP connections. """ def setUp(self): """ Create a L{CredReceiver} hooked up to a fake L{IBoxSender} which records boxes sent through it. """ self.username = 'alice@example.com' self.password = 'foo bar baz' self.checker = InMemoryUsernamePasswordDatabaseDontUse() self.checker.addUser(self.username, self.password) self.avatar = StubAvatar() self.realm = StubRealm(self.avatar) self.portal = Portal(self.realm, [self.checker]) self.server = CredReceiver() self.server.portal = self.portal self.client = AMP() self.finished = loopbackAsync(self.server, self.client) def test_otpLogin(self): """ L{CredReceiver.otpLogin} returns without error if the pad is valid. """ PAD = 'test_otpLogin' self.portal.registerChecker(OneTimePadChecker({PAD: 'user'})) d = self.server.otpLogin(PAD) def cbLoggedIn(result): self.assertEqual(result, {}) d.addCallback(cbLoggedIn) return d def test_otpLoginUnauthorized(self): """ L{CredReceiver.otpLogin} should fail with L{UnauthorizedLogin} if an invalid pad is received. """ self.portal.registerChecker(OneTimePadChecker({})) return self.assertFailure( self.server.otpLogin('test_otpLoginUnauthorized'), UnauthorizedLogin) def test_otpLoginNotImplemented(self): """ L{CredReceiver.otpLogin} should fail with L{NotImplementedError} if the realm raises L{NotImplementedError} when asked for the avatar. """ def noAvatar(avatarId, mind, *interfaces): raise NotImplementedError() self.realm.requestAvatar = noAvatar PAD = 'test_otpLoginNotImplemented' self.portal.registerChecker(OneTimePadChecker({PAD: 'user'})) return self.assertFailure( self.server.otpLogin(PAD), NotImplementedError) def test_otpLoginResponder(self): """ L{CredReceiver} responds to the L{OTPLogin} command. """ PAD = 'test_otpLoginResponder' self.portal.registerChecker(OneTimePadChecker({PAD: 'user'})) d = self.client.callRemote(OTPLogin, pad=PAD) def cbLoggedIn(result): self.assertEqual(result, {}) d.addCallback(cbLoggedIn) return d def test_passwordLoginDifferentChallenges(self): """ L{CredReceiver.passwordLogin} returns a new challenge each time it is called. """ first = self.server.passwordLogin(self.username) second = self.server.passwordLogin(self.username) self.assertNotEqual(first['challenge'], second['challenge']) def test_passwordLoginResponder(self): """ L{CredReceiver} responds to the L{PasswordLogin} L{Command} with a challenge. """ d = self.client.callRemote(PasswordLogin, username=self.username) def cbLogin(result): self.assertIn('challenge', result) d.addCallback(cbLogin) return d def test_determineFromDifferentNonces(self): """ Each time L{PasswordChallengeResponse.determineFrom} is used, it generates a different C{cnonce} value. """ first = PasswordChallengeResponse.determineFrom('a', 'b') second = PasswordChallengeResponse.determineFrom('a', 'b') self.assertNotEqual(first['cnonce'], second['cnonce']) def test_passwordChallengeResponse(self): """ L{CredReceiver.passwordChallengeResponse} returns without error if the response is valid. """ challenge = self.server.passwordLogin(self.username)['challenge'] cnonce = '123abc' cleartext = '%s %s %s' % (challenge, cnonce, self.password) response = sha1(cleartext).digest() d = self.server.passwordChallengeResponse(cnonce, response) def cbLoggedIn(result): self.assertEqual(result, {}) d.addCallback(cbLoggedIn) return d def test_passwordChallengeResponseResponder(self): """ L{CredReceiver} responds to the L{PasswordChallengeResponse} L{Command} with an empty box if the response supplied is valid. """ challenge = self.server.passwordLogin(self.username)['challenge'] d = self.client.callRemote( PasswordChallengeResponse, **PasswordChallengeResponse.determineFrom( challenge, self.password)) def cbResponded(result): self.assertEqual(result, {}) d.addCallback(cbResponded) return d def test_response(self): """ L{PasswordChallengeResponse.determineFrom} generates the correct response to a challenge issued by L{CredReceiver.passwordLogin}. """ challenge = self.server.passwordLogin(self.username)['challenge'] result = PasswordChallengeResponse.determineFrom( challenge, self.password) d = self.server.passwordChallengeResponse(**result) def cbLoggedIn(ignored): [(avatarId, mind, interfaces)] = self.realm.requests self.assertEqual(avatarId, self.username) self.assertEqual(interfaces, (IBoxReceiver,)) # The avatar is now the protocol's box receiver. self.assertIdentical(self.server.boxReceiver, self.avatar) # And the avatar has been started up with the protocol's # IBoxSender. self.assertIdentical(self.avatar.boxSender, self.server.boxSender) # After the connection is lost, the logout function should be # called. self.assertEqual(self.realm.loggedOut, 0) self.server.connectionLost( Failure(ConnectionDone("test connection lost"))) self.assertEqual(self.realm.loggedOut, 1) d.addCallback(cbLoggedIn) return d def test_invalidResponse(self): """ L{CredReceiver.passwordChallengeResponse} returns a L{Deferred} which fails with L{UnauthorizedLogin} if it is passed a response which is not valid. """ challenge = self.server.passwordLogin(self.username)['challenge'] return self.assertFailure( self.server.passwordChallengeResponse(cnonce='bar', response='baz'), UnauthorizedLogin) def test_connectionLostWithoutAvatar(self): """ L{CredReceiver.connectionLost} does not raise an exception if no login has occurred when it is called. """ self.server.connectionLost( Failure(ConnectionDone("test connection lost"))) def test_unrecognizedCredentialsLogin(self): """ L{login} raises L{UnhandledCredentials} if passed a credentials object which provides no interface explicitly supported by that function, currently L{IUsernamePassword}. """ self.assertRaises(UnhandledCredentials, login, None, None) def test_passwordChallengeLogin(self): """ L{login} issues the commands necessary to authenticate against L{CredReceiver} when given an L{IUsernamePassword} provider with its C{username} and C{password} attributes set to valid credentials. """ loginDeferred = login( self.client, UsernamePassword(self.username, self.password)) def cbLoggedIn(clientAgain): self.assertIdentical(self.client, clientAgain) self.assertIdentical(self.server.boxReceiver, self.avatar) loginDeferred.addCallback(cbLoggedIn) return loginDeferred def test_passwordChallengeInvalid(self): """ L{login} returns a L{Deferred} which fires with L{UnauthorizedLogin} if the L{UsernamePassword} credentials object given does not contain valid authentication information. """ boxReceiver = self.server.boxReceiver loginDeferred = login( self.client, UsernamePassword(self.username + 'x', self.password)) self.assertFailure(loginDeferred, UnauthorizedLogin) def cbFailed(ignored): self.assertIdentical(self.server.boxReceiver, boxReceiver) loginDeferred.addCallback(cbFailed) return loginDeferred def test_noAvatar(self): """ L{login} returns a L{Deferred} which fires with L{NotImplementedError} if the realm raises L{NotImplementedError} when asked for the avatar. """ def noAvatar(avatarId, mind, *interfaces): raise NotImplementedError() self.realm.requestAvatar = noAvatar loginDeferred = login( self.client, UsernamePassword(self.username, self.password)) return self.assertFailure(loginDeferred, NotImplementedError) class AMPUsernamePasswordTests(TestCase): """ Tests for L{_AMPUsernamePasswordTests}, a credentials type which works with username/challenge/nonce/responses of the form used by L{PasswordLogin}. """ def setUp(self): self.username = 'user name' password = u'foo bar\N{LATIN SMALL LETTER E WITH ACUTE}' self.password = password.encode('utf-8') self.challenge = '123xyzabc789' self.nonce = '1 2 3 4 5' self.response = _calcResponse( self.challenge, self.nonce, self.password) self.credentials = _AMPUsernamePassword( self.username, self.challenge, self.nonce, self.response) def test_checkPasswordString(self): """ L{_AMPUsernamePassword} accepts a C{str} for the known correct password and returns C{True} if the response matches it. """ self.assertTrue(self.credentials.checkPassword(self.password)) def test_checkInvalidPasswordString(self): """ L{_AMPUsernamePassword} accepts a C{str} for the known correct password and returns C{False} if the response does not match it. """ self.assertFalse(self.credentials.checkPassword('quux')) def test_checkPasswordUnicode(self): """ L{_AMPUsernamePassword} accepts a C{unicode} for the known correct password and returns C{True} if the response matches the UTF-8 encoding of it. """ self.assertTrue( self.credentials.checkPassword(self.password.decode('utf-8'))) def test_checkInvalidPasswordUnicode(self): """ L{_AMPUsernamePassword} accepts a C{unicode} for the known correct password and returns C{False} if the response does not match the UTF-8 encoding of it. """ self.assertFalse( self.credentials.checkPassword( u'\N{LATIN SMALL LETTER E WITH ACUTE}')) class CredAMPServerFactoryTests(TestCase): """ Tests for L{CredAMPServerFactory}. """ def test_buildProtocol(self): """ L{CredAMPServerFactory.buildProtocol} returns a L{CredReceiver} instance with its C{portal} attribute set to the portal object passed to L{CredAMPServerFactory.__init__}. """ portal = object() factory = CredAMPServerFactory(portal) proto = factory.buildProtocol(None) self.assertIsInstance(proto, CredReceiver) self.assertIdentical(proto.portal, portal) class OneTimePadCheckerTests(TestCase): """ Tests for L{OneTimePadChecker}. """ def test_requestAvatarId(self): """ L{OneTimePadChecker.requestAvatarId} should return the username in the case the pad is valid. """ PAD = 'test_requestAvatarId' USERNAME = 'test_requestAvatarId username' checker = OneTimePadChecker({PAD: USERNAME}) self.assertEqual( checker.requestAvatarId(_AMPOneTimePad(PAD)), USERNAME) def test_requestAvatarIdUnauthorized(self): """ L{OneTimePadChecker.requestAvatarId} should throw L{UnauthorizedLogin} if an unknown pad is given. """ checker = OneTimePadChecker({}) self.assertRaises( UnauthorizedLogin, lambda: checker.requestAvatarId(_AMPOneTimePad(None))) def test_oneTimePad(self): """ L{OneTimePadChecker.requestAvatarId} should invalidate the pad if a login is successful. """ PAD = 'test_requestAvatarId' checker = OneTimePadChecker({PAD: 'username'}) checker.requestAvatarId(_AMPOneTimePad(PAD)) self.assertRaises( UnauthorizedLogin, lambda: checker.requestAvatarId(_AMPOneTimePad(PAD))) Epsilon-0.7.1/epsilon/test/test_amprouter.py0000644000175000017500000002313612461004115023166 0ustar mithrandimithrandi00000000000000# Copyright (c) 2008 Divmod. See LICENSE for details. """ Tests for L{epsilon.amprouter}. """ from zope.interface import implements from zope.interface.verify import verifyObject from twisted.python.failure import Failure from twisted.protocols.amp import IBoxReceiver, IBoxSender from twisted.trial.unittest import TestCase from epsilon.amprouter import _ROUTE, RouteNotConnected, Router class SomeReceiver: """ A stub AMP box receiver which just keeps track of whether it has been started or stopped and what boxes have been delivered to it. @ivar sender: C{None} until C{startReceivingBoxes} is called, then a reference to the L{IBoxSender} passed to that method. @ivar reason: C{None} until {stopReceivingBoxes} is called, then a reference to the L{Failure} passed to that method. @ivar started: C{False} until C{startReceivingBoxes} is called, then C{True}. @ivar stopped: C{False} until C{stopReceivingBoxes} is called, then C{True}. """ implements(IBoxReceiver) sender = None reason = None started = False stopped = False def __init__(self): self.boxes = [] def startReceivingBoxes(self, sender): self.started = True self.sender = sender def ampBoxReceived(self, box): if self.started and not self.stopped: self.boxes.append(box) def stopReceivingBoxes(self, reason): self.stopped = True self.reason = reason class CollectingSender: """ An L{IBoxSender} which collects and saves boxes and errors sent to it. """ implements(IBoxSender) def __init__(self): self.boxes = [] self.errors = [] def sendBox(self, box): """ Reject boxes with non-string keys or values; save all the rest in C{self.boxes}. """ for k, v in box.iteritems(): if not (isinstance(k, str) and isinstance(v, str)): raise TypeError("Cannot send boxes containing non-strings") self.boxes.append(box) def unhandledError(self, failure): self.errors.append(failure.getErrorMessage()) class RouteTests(TestCase): """ Tests for L{Route}, the L{IBoxSender} which handles adding routing information to outgoing boxes. """ def setUp(self): """ Create a route attached to a stub sender. """ self.receiver = SomeReceiver() self.sender = CollectingSender() self.localName = u"foo" self.remoteName = u"bar" self.router = Router() self.router.startReceivingBoxes(self.sender) self.route = self.router.bindRoute(self.receiver, self.localName) def test_interfaces(self): """ L{Route} instances provide L{IBoxSender}. """ self.assertTrue(verifyObject(IBoxSender, self.route)) def test_start(self): """ L{Route.start} starts its L{IBoxReceiver}. """ self.assertFalse(self.receiver.started) self.route.start() self.assertTrue(self.receiver.started) self.assertIdentical(self.receiver.sender, self.route) def test_stop(self): """ L{Route.stop} stops its L{IBoxReceiver}. """ self.route.start() self.assertFalse(self.receiver.stopped) self.route.stop(Failure(RuntimeError("foo"))) self.assertTrue(self.receiver.stopped) self.receiver.reason.trap(RuntimeError) def test_sendBox(self): """ L{Route.sendBox} adds the route name to the box before passing it on to the underlying sender. """ self.route.connectTo(self.remoteName) self.route.sendBox({"foo": "bar"}) self.assertEqual( self.sender.boxes, [{_ROUTE: self.remoteName, "foo": "bar"}]) def test_sendUnroutedBox(self): """ If C{Route.connectTo} is called with C{None}, no route name is added to the outgoing box. """ self.route.connectTo(None) self.route.sendBox({"foo": "bar"}) self.assertEqual( self.sender.boxes, [{"foo": "bar"}]) def test_sendBoxWithoutConnection(self): """ L{Route.sendBox} raises L{RouteNotConnected} if called before the L{Route} is connected to a remote route name. """ self.assertRaises( RouteNotConnected, self.route.sendBox, {'foo': 'bar'}) def test_unbind(self): """ L{Route.unbind} removes the route from its router. """ self.route.unbind() self.assertRaises( KeyError, self.router.ampBoxReceived, {_ROUTE: self.localName}) class RouterTests(TestCase): """ Tests for L{Router}, the L{IBoxReceiver} which directs routed AMP boxes to the right object. """ def setUp(self): """ Create sender, router, receiver, and route objects. """ self.sender = CollectingSender() self.router = Router() self.router.startReceivingBoxes(self.sender) self.receiver = SomeReceiver() self.route = self.router.bindRoute(self.receiver) self.route.connectTo(u"foo") def test_interfaces(self): """ L{Router} instances provide L{IBoxReceiver}. """ self.assertTrue(verifyObject(IBoxReceiver, self.router)) def test_uniqueRoutes(self): """ L{Router.createRouteIdentifier} returns a new, different route identifier on each call. """ identifiers = [self.router.createRouteIdentifier() for x in range(10)] self.assertEqual(len(set(identifiers)), len(identifiers)) def test_bind(self): """ L{Router.bind} returns a new L{Route} instance which will send boxes to the L{Route}'s L{IBoxSender} after adding a C{_ROUTE} key to them. """ self.route.sendBox({'foo': 'bar'}) self.assertEqual( self.sender.boxes, [{_ROUTE: self.route.remoteRouteName, 'foo': 'bar'}]) self.route.unhandledError(Failure(Exception("some test exception"))) self.assertEqual( self.sender.errors, ["some test exception"]) def test_bindBeforeStart(self): """ If a L{Route} is created with L{Router.bind} before the L{Router} is started with L{Router.startReceivingBoxes}, the L{Route} is created unstarted and only started when the L{Router} is started. """ router = Router() receiver = SomeReceiver() route = router.bindRoute(receiver) route.connectTo(u'quux') self.assertFalse(receiver.started) sender = CollectingSender() router.startReceivingBoxes(sender) self.assertTrue(receiver.started) route.sendBox({'foo': 'bar'}) self.assertEqual( sender.boxes, [{_ROUTE: route.remoteRouteName, 'foo': 'bar'}]) router.ampBoxReceived({_ROUTE: route.localRouteName, 'baz': 'quux'}) self.assertEqual(receiver.boxes, [{'baz': 'quux'}]) def test_bindBeforeStartFinishAfterStart(self): """ If a L{Route} is created with L{Router.connect} before the L{Router} is started with L{Router.startReceivingBoxes} but the Deferred returned by the connect thunk does not fire until after the router is started, the L{IBoxReceiver} associated with the route is not started until that Deferred fires and the route is associated with a remote route name. """ router = Router() receiver = SomeReceiver() route = router.bindRoute(receiver) sender = CollectingSender() router.startReceivingBoxes(sender) self.assertFalse(receiver.started) route.connectTo(u"remoteName") self.assertTrue(receiver.started) receiver.sender.sendBox({'foo': 'bar'}) self.assertEqual(sender.boxes, [{_ROUTE: 'remoteName', 'foo': 'bar'}]) def test_ampBoxReceived(self): """ L{Router.ampBoxReceived} passes on AMP boxes to the L{IBoxReceiver} identified by the route key in the box. """ firstReceiver = SomeReceiver() firstRoute = self.router.bindRoute(firstReceiver) firstRoute.start() secondReceiver = SomeReceiver() secondRoute = self.router.bindRoute(secondReceiver) secondRoute.start() self.router.ampBoxReceived( {_ROUTE: firstRoute.localRouteName, 'foo': 'bar'}) self.router.ampBoxReceived( {_ROUTE: secondRoute.localRouteName, 'baz': 'quux'}) self.assertEqual(firstReceiver.boxes, [{'foo': 'bar'}]) self.assertEqual(secondReceiver.boxes, [{'baz': 'quux'}]) def test_ampBoxReceivedDefaultRoute(self): """ L{Router.ampBoxReceived} delivers boxes with no route to the default box receiver. """ sender = CollectingSender() receiver = SomeReceiver() router = Router() router.startReceivingBoxes(sender) router.bindRoute(receiver, None).start() router.ampBoxReceived({'foo': 'bar'}) self.assertEqual(receiver.boxes, [{'foo': 'bar'}]) def test_stopReceivingBoxes(self): """ L{Router.stopReceivingBoxes} calls the C{stop} method of each connected route. """ sender = CollectingSender() router = Router() router.startReceivingBoxes(sender) receiver = SomeReceiver() router.bindRoute(receiver) class DummyException(Exception): pass self.assertFalse(receiver.stopped) router.stopReceivingBoxes(Failure(DummyException())) self.assertTrue(receiver.stopped) receiver.reason.trap(DummyException) Epsilon-0.7.1/epsilon/test/test_benchmark.py0000644000175000017500000003557312606176061023125 0ustar mithrandimithrandi00000000000000 from epsilon import hotfix hotfix.require('twisted', 'delayedcall_seconds') hotfix.require('twisted', 'timeoutmixin_calllater') import os, StringIO from twisted.trial import unittest from twisted.internet import error, base from twisted.python import failure, filepath from epsilon.scripts import benchmark from epsilon import juice try: filepath.FilePath('/proc/diskstats').open().close() filepath.FilePath('/proc/self/mounts').open().close() except IOError: hasProc = False else: hasProc = True class DiskstatTestCase(unittest.TestCase): if not hasProc: skip = 'epsilon.benchmark requires access to /proc' def testDiskLineParser(self): """ Test the parsing of a single line into a single diststat instance. """ s = ("3 0 hda 267481 3913 3944418 1625467 3392405 3781877 58210592 " "150845143 0 6136300 153333793") device, stat = benchmark.parseDiskStatLine(s) self.assertEquals(device, 'hda') self.assertEquals(stat.readCount, 267481) self.assertEquals(stat.mergedReadCount, 3913) self.assertEquals(stat.readSectorCount, 3944418) self.assertEquals(stat.readMilliseconds, 1625467) self.assertEquals(stat.writeCount, 3392405) self.assertEquals(stat.mergedWriteCount, 3781877) self.assertEquals(stat.writeSectorCount, 58210592) self.assertEquals(stat.writeMilliseconds, 150845143) self.assertEquals(stat.outstandingIOCount, 0) self.assertEquals(stat.ioMilliseconds, 6136300) self.assertEquals(stat.weightedIOMilliseconds, 153333793) def testPartitionLineParser(self): """ Test parsing the other kind of line that can show up in the diskstats file. """ s = "3 1 hda1 2 5 7 9" device, stat = benchmark.parseDiskStatLine(s) self.assertEquals(device, 'hda1') self.assertEquals(stat.readCount, 2) self.assertEquals(stat.readSectorCount, 5) self.assertEquals(stat.writeCount, 7) self.assertEquals(stat.writeSectorCount, 9) def testFileParser(self): """ Test the parsing of multiple lines into a dict mapping device names and numbers to diststat instances. """ s = StringIO.StringIO( "1 2 abc 3 4 5 6 7 8 9 10 11 12 13\n" "14 15 def 16 17 18 19 20 21 22 23 24 25 26\n") ds = list(benchmark.parseDiskStats(s)) ds.sort() self.assertEquals(ds[0][0], "abc") self.assertEquals(ds[0][1].readCount, 3) self.assertEquals(ds[0][1].mergedReadCount, 4) self.assertEquals(ds[0][1].readSectorCount, 5) self.assertEquals(ds[0][1].readMilliseconds, 6) self.assertEquals(ds[0][1].writeCount, 7) self.assertEquals(ds[0][1].mergedWriteCount, 8) self.assertEquals(ds[0][1].writeSectorCount, 9) self.assertEquals(ds[0][1].writeMilliseconds, 10) self.assertEquals(ds[0][1].outstandingIOCount, 11) self.assertEquals(ds[0][1].ioMilliseconds, 12) self.assertEquals(ds[0][1].weightedIOMilliseconds, 13) self.assertEquals(ds[1][0], "def") self.assertEquals(ds[1][1].readCount, 16) self.assertEquals(ds[1][1].mergedReadCount, 17) self.assertEquals(ds[1][1].readSectorCount, 18) self.assertEquals(ds[1][1].readMilliseconds, 19) self.assertEquals(ds[1][1].writeCount, 20) self.assertEquals(ds[1][1].mergedWriteCount, 21) self.assertEquals(ds[1][1].writeSectorCount, 22) self.assertEquals(ds[1][1].writeMilliseconds, 23) self.assertEquals(ds[1][1].outstandingIOCount, 24) self.assertEquals(ds[1][1].ioMilliseconds, 25) self.assertEquals(ds[1][1].weightedIOMilliseconds, 26) def testCaptureStats(self): """ Test that captureStats reads out of /proc/diskstats, if it is available. """ stats = benchmark.captureStats() self.failUnless(isinstance(stats, dict), "Expected dictionary, got %r" % (stats,)) class ReporterTestCase(unittest.TestCase): def testFormatter(self): [msg] = juice.parseString(benchmark.formatResults( "frunk", 4096, benchmark.ResourceSnapshot( 3, benchmark.diskstat(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), benchmark.partitionstat(1, 2, 3, 4), 12), benchmark.ResourceSnapshot( 7, benchmark.diskstat(11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21), benchmark.partitionstat(5, 7, 9, 11), 56), False, False)) self.assertEquals(msg['_command'], 'Result') self.assertEquals(msg['version'], '0') self.assertEquals(msg['error'], 'False') self.assertEquals(msg['timeout'], 'False') self.assertEquals(msg['name'], 'frunk') self.failIfEqual(msg['host'], 'localhost') self.assertIn('sector_size', msg) self.assertIn('python_version', msg) self.assertIn('twisted_version', msg) self.assertIn('divmod_version', msg) self.assertEquals(msg['elapsed'], '4') self.assertEquals(msg['filesystem_growth'], '44') self.assertEquals(msg['read_count'], '4') self.assertEquals(msg['read_sectors'], '5') self.assertEquals(msg['write_count'], '6') self.assertEquals(msg['write_sectors'], '7') self.assertEquals(msg['read_ms'], '10') self.assertEquals(msg['write_ms'], '10') def testFormatterWithoutDiskStats(self): """ Sometimes it is not possible to find diskstats. In these cases, None should be reported as the value for all fields which are derived from the diskstats object. """ [msg] = juice.parseString(benchmark.formatResults( "frunk", 4096, benchmark.ResourceSnapshot( 3, None, benchmark.partitionstat(1, 2, 3, 4), 12), benchmark.ResourceSnapshot( 7, None, benchmark.partitionstat(5, 7, 9, 11), 56), False, False)) self.assertEquals(msg['_command'], 'Result') self.assertEquals(msg['version'], '0') self.assertEquals(msg['error'], 'False') self.assertEquals(msg['timeout'], 'False') self.assertEquals(msg['name'], 'frunk') self.failIfEqual(msg['host'], 'localhost') self.assertIn('sector_size', msg) self.assertIn('python_version', msg) self.assertIn('twisted_version', msg) self.assertIn('divmod_version', msg) self.assertEquals(msg['elapsed'], '4') self.assertEquals(msg['filesystem_growth'], '44') self.assertEquals(msg['read_count'], '4') self.assertEquals(msg['read_sectors'], '5') self.assertEquals(msg['write_count'], '6') self.assertEquals(msg['write_sectors'], '7') self.failIfIn('read_ms', msg) self.failIfIn('write_ms', msg) def testFormatterWithoutPartitionStats(self): """ Sometimes it is not possible to find partitionstats. In these cases, None should be reported as the value for all fields which are derived from the partitionstats object. """ [msg] = juice.parseString(benchmark.formatResults( "frunk", 4096, benchmark.ResourceSnapshot( 3, benchmark.diskstat(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), None, 12), benchmark.ResourceSnapshot( 7, benchmark.diskstat(11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21), None, 56), False, False)) self.assertEquals(msg['_command'], 'Result') self.assertEquals(msg['version'], '0') self.assertEquals(msg['error'], 'False') self.assertEquals(msg['timeout'], 'False') self.assertEquals(msg['name'], 'frunk') self.failIfEqual(msg['host'], 'localhost') self.assertIn('sector_size', msg) self.assertIn('python_version', msg) self.assertIn('twisted_version', msg) self.assertIn('divmod_version', msg) self.assertEquals(msg['elapsed'], '4') self.assertEquals(msg['filesystem_growth'], '44') self.failIfIn('read_count', msg) self.failIfIn('read_sectors', msg) self.failIfIn('write_count', msg) self.failIfIn('write_sectors', msg) self.assertEquals(msg['read_ms'], '10') self.assertEquals(msg['write_ms'], '10') def testGetSize(self): path = self.mktemp() os.makedirs(path) fObj = file(os.path.join(path, 'foo'), 'wb') fObj.write('x' * 10) fObj.close() self.assertEquals( benchmark.getSize(filepath.FilePath(path)), os.path.getsize(path) + os.path.getsize(os.path.join(path, 'foo'))) def test_getOneSizeBrokenSymlink(self): """ Test that a broken symlink inside a directory passed to getOneSize doesn't cause it to freak out. """ path = filepath.FilePath(self.mktemp()) path.makedirs() link = path.child('foo') os.symlink('abcdefg', link.path) self.assertEquals( benchmark.getOneSize(link), len('abcdefg')) class MockSpawnProcess(object): """ A fake partial ITransport implementation for use in testing ProcessProtocols. """ killed = False def __init__(self, proto, executable, args, path, env, childFDs): self.proto = proto self.executable = executable self.args = args self.path = path self.env = env self.childFDs = childFDs self.signals = [] def signalProcess(self, signal): self.signals.append(signal) if signal == 'KILL': self.killed = True self.proto.processEnded(failure.Failure(error.ProcessTerminated())) class SpawnMixin: def setUp(self): mock = [] def spawnProcess(*a, **kw): mock.append(MockSpawnProcess(*a, **kw)) return mock[0] self.workingDirectory = self.mktemp() os.makedirs(self.workingDirectory) self.spawnDeferred = self.processProtocol.spawn( 'executable', ['args'], self.workingDirectory, {'env': 'stuff'}, spawnProcess) self.mock = mock[0] self.sched = [] self.currentTime = 0 def seconds(): return self.currentTime def canceller(c): self.sched.remove(c) def resetter(c): self.sched.sort(key=lambda d: d.getTime()) def callLater(n, f, *a, **kw): c = base.DelayedCall(self.currentTime + n, f, a, kw, canceller, resetter, seconds) self.sched.append(c) return c self.mock.proto.callLater = callLater self.mock.proto.makeConnection(self.mock) class BasicProcessTestCase(SpawnMixin, unittest.TestCase): processProtocol = benchmark.BasicProcess def testCorrectArgs(self): self.assertEquals(self.mock.executable, 'executable') self.assertEquals(self.mock.args, ['executable', 'args']) self.assertEquals(self.mock.path, self.workingDirectory) self.assertEquals(self.mock.env, {'env': 'stuff'}) def testChildDataReceived(self): self.mock.proto.childDataReceived(1, 'stdout bytes') self.mock.proto.childDataReceived(2, 'stderr bytes') self.mock.proto.childDataReceived(1, 'more stdout bytes') def cbProcessFinished((proto, status, output)): self.assertIdentical(proto, self.mock.proto) self.assertEquals(status, 0) self.assertEquals( output, [(1, 'stdout bytes'), (2, 'stderr bytes'), (1, 'more stdout bytes')]) self.spawnDeferred.addCallback(cbProcessFinished) self.mock.proto.processEnded(failure.Failure(error.ProcessDone(0))) return self.spawnDeferred def testTimeout(self): """ Assert that a timeout call is created as soon as the process is started and that if it expires, the spawn call's Deferred fails. """ self.assertEquals(len(self.sched), 1) self.assertEquals(self.sched[0].getTime(), 900.0) self.sched[0].func(*self.sched[0].args, **self.sched[0].kw) def cbTimedOut(ign): self.assertEquals(self.mock.signals, ['KILL']) d = self.assertFailure(self.spawnDeferred, error.TimeoutError) d.addCallback(cbTimedOut) return d def testTimeoutExtended(self): """ Assert that input or connection-lost events reset the timeout. """ self.currentTime = 1 self.mock.proto.childDataReceived(1, 'bytes') self.assertEquals(len(self.sched), 1) self.assertEquals(self.sched[0].getTime(), 901.0) self.currentTime = 2 self.mock.proto.childConnectionLost(1) self.assertEquals(len(self.sched), 1) self.assertEquals(self.sched[0].getTime(), 902.0) def testProcessKilled(self): """ Assert that the spawn call's Deferred fails appropriately if someone else gets involved and kills the child process. """ def cbKilled(exc): self.assertEquals(exc.exitCode, 1) self.assertEquals(exc.signal, 2) self.assertEquals(exc.status, 3) self.assertEquals(exc.output, [(1, 'bytes')]) self.mock.proto.childDataReceived(1, 'bytes') self.mock.proto.processEnded(failure.Failure(error.ProcessTerminated(1, 2, 3))) d = self.assertFailure(self.spawnDeferred, benchmark.ProcessDied) d.addCallback(cbKilled) return d class SnapshotTestCase(unittest.TestCase): if not hasProc: skip = 'epsilon.benchmark requires access to /proc' def testStart(self): c = benchmark.Change() c.start(filepath.FilePath('.'), 'hda', 'hda1') self.failUnless(isinstance(c.before, benchmark.ResourceSnapshot)) def testStop(self): c = benchmark.Change() c.stop(filepath.FilePath('.'), 'hda', 'hda1') self.failUnless(isinstance(c.after, benchmark.ResourceSnapshot)) class BenchmarkProcessTestCase(SpawnMixin, unittest.TestCase): if not hasProc: skip = 'epsilon.benchmark requires access to /proc' processProtocol = benchmark.BenchmarkProcess def testProcessStartTimingCommand(self): started = [] p = self.mock.proto p.startTiming = lambda: started.append(None) self.mock.proto.childDataReceived(p.BACKCHANNEL_OUT, p.START) self.assertEquals(started, [None]) def testProcessStopTimingCommand(self): stopped = [] p = self.mock.proto p.stopTiming = lambda: stopped.append(None) self.mock.proto.childDataReceived(p.BACKCHANNEL_OUT, p.STOP) self.assertEquals(stopped, [None]) Epsilon-0.7.1/epsilon/test/test_caseless.py0000644000175000017500000002333712604672606022774 0ustar mithrandimithrandi00000000000000""" Tests for L{epsilon.caseless}. """ import sys from twisted.trial.unittest import TestCase from epsilon.caseless import Caseless class CaselessTestCase(TestCase): """ Tests for L{Caseless}. """ def _casings(s): """ Generate variously cased versions of the given string. """ yield s.lower() yield s.upper() yield s.title() yield s.title().swapcase() _casings = staticmethod(_casings) def _strings(self): """ Generate a variety of C{str} and C{unicode} test samples. """ for t in [str, unicode]: yield t() for s in self._casings('foo'): yield t(s) def test_cased(self): """ L{Caseless} should expose the wrapped string as C{cased}. """ for s in self._strings(): self.assertIdentical(Caseless(s).cased, s) def test_idempotence(self): """ L{Caseless} should be idempotent. """ for s in self._strings(): self.assertIdentical(Caseless(Caseless(s)).cased, s) def test_repr(self): """ L{Caseless} should implement L{repr}. """ for s in self._strings(): self.assertEquals(repr(Caseless(s)), 'Caseless(%r)' % s) def test_str(self): """ L{Caseless} should delegate L{str}. """ for s in self._strings(): self.assertEquals(str(Caseless(s)), str(s)) def test_unicode(self): """ L{Caseless} should delegate L{unicode}. """ for s in self._strings(): self.assertEquals(unicode(Caseless(s)), unicode(s)) def test_len(self): """ L{Caseless} should delegate L{len}. """ for s in self._strings(): self.assertEquals(len(Caseless(s)), len(s)) def test_getitem(self): """ L{Caseless} should delegate indexing/slicing. """ for s in self._strings(): for i in xrange(len(s)): self.assertEquals(Caseless(s)[i], s[i]) self.assertEquals(Caseless(s)[:i], s[:i]) self.assertEquals(Caseless(s)[i:], s[i:]) self.assertEquals(Caseless(s)[::-1], s[::-1]) def test_iter(self): """ L{Caseless} should delegate L{iter}. """ for s in self._strings(): self.assertEquals(list(iter(Caseless(s))), list(iter(s))) def test_lower(self): """ L{Caseless} should delegate C{lower}. """ for s in self._strings(): self.assertEquals(Caseless(s).lower(), s.lower()) def test_upper(self): """ L{Caseless} should delegate C{upper}. """ for s in self._strings(): self.assertEquals(Caseless(s).upper(), s.upper()) def test_title(self): """ L{Caseless} should delegate C{title}. """ for s in self._strings(): self.assertEquals(Caseless(s).title(), s.title()) def test_swapcase(self): """ L{Caseless} should delegate C{swapcase}. """ for s in self._strings(): self.assertEquals(Caseless(s).swapcase(), s.swapcase()) def test_comparison(self): """ L{Caseless} should implement comparison and hashing case-insensitively. """ for a in map(Caseless, self._casings(u'abc')): for b in map(Caseless, self._casings(u'abc')): self.assertEquals(a, b) self.assertEquals(hash(a), hash(b)) self.assertEquals(cmp(a, b), 0) for a in map(Caseless, self._casings(u'abc')): for b in map(Caseless, self._casings(u'abd')): self.assertNotEquals(a, b) self.assertNotEquals(hash(a), hash(b)) self.assertEquals(cmp(a, b), -1) def test_contains(self): """ L{Caseless} should search for substrings case-insensitively. """ for a in map(Caseless, self._casings(u'abc')): for b in map(Caseless, self._casings(u'{{{abc}}}')): self.assertIn(a, b) for a in map(Caseless, self._casings(u'abc')): for b in map(Caseless, self._casings(u'{{{abd}}}')): self.assertNotIn(a, b) def test_startswith(self): """ L{Caseless} should implement C{startswith} case-insensitively. """ for a in map(Caseless, self._casings(u'abcbabcba')): for b in self._casings(u'abc'): self.assertTrue(a.startswith(b)) self.assertTrue(a.startswith(b, 4)) self.assertFalse(a.startswith(b, 2)) self.assertFalse(a.startswith(b, 4, 6)) for a in map(Caseless, self._casings(u'abcbabcba')): for b in self._casings(u'cba'): self.assertFalse(a.startswith(b)) self.assertFalse(a.startswith(b, 4)) self.assertTrue(a.startswith(b, 2)) self.assertFalse(a.startswith(b, 4, 6)) def test_endswith(self): """ L{Caseless} should implement C{endswith} case-insensitively. """ for a in map(Caseless, self._casings(u'abcbabcba')): for b in self._casings(u'cba'): self.assertTrue(a.endswith(b)) self.assertTrue(a.endswith(b, 0, 5)) self.assertFalse(a.endswith(b, 0, 3)) self.assertFalse(a.endswith(b, 7)) for a in map(Caseless, self._casings(u'abcbabcba')): for b in self._casings(u'abc'): self.assertFalse(a.endswith(b)) self.assertFalse(a.endswith(b, 0, 5)) self.assertTrue(a.endswith(b, 0, 3)) self.assertFalse(a.endswith(b, 7)) def test_startswithTuple(self): """ L{test_startswith} with tuple arguments. """ for a in map(Caseless, self._casings(u'abcbabcba')): for b in self._casings(u'abc'): self.assertTrue(a.startswith((u'foo', b, u'bar'))) self.assertTrue(a.startswith((u'foo', b, u'bar'), 4)) self.assertFalse(a.startswith((u'foo', b, u'bar'), 2)) self.assertFalse(a.startswith((u'foo', b, u'bar'), 4, 6)) for a in map(Caseless, self._casings(u'abcbabcba')): for b in self._casings(u'cba'): self.assertFalse(a.startswith((u'foo', b, u'bar'))) self.assertFalse(a.startswith((u'foo', b, u'bar'), 4)) self.assertTrue(a.startswith((u'foo', b, u'bar'), 2)) self.assertFalse(a.startswith((u'foo', b, u'bar'), 4, 6)) def test_endswithTuple(self): """ L{test_endswith} with tuple arguments. """ for a in map(Caseless, self._casings(u'abcbabcba')): for b in self._casings(u'cba'): self.assertTrue(a.endswith((u'foo', b, u'bar'))) self.assertTrue(a.endswith((u'foo', b, u'bar'), 0, 5)) self.assertFalse(a.endswith((u'foo', b, u'bar'), 0, 3)) self.assertFalse(a.endswith((u'foo', b, u'bar'), 7)) for a in map(Caseless, self._casings(u'abcbabcba')): for b in self._casings(u'abc'): self.assertFalse(a.endswith((u'foo', b, u'bar'))) self.assertFalse(a.endswith((u'foo', b, u'bar'), 0, 5)) self.assertTrue(a.endswith((u'foo', b, u'bar'), 0, 3)) self.assertFalse(a.endswith((u'foo', b, u'bar'), 7)) if sys.version_info < (2, 5): test_startswithTuple.skip = test_endswithTuple.skip = ( 'Tuple arguments implemented in Python 2.5') def test_count(self): """ L{Caseless} should implement C{count} case-insensitively. """ for a in map(Caseless, self._casings(u'abcbabcba')): self.assertEquals(a.count(u'foo'), 0) for b in self._casings(u'cba'): self.assertEquals(a.count(b), 2) self.assertEquals(a.count(b, 2), 2) self.assertEquals(a.count(b, 3), 1) self.assertEquals(a.count(b, 0, 4), 0) def test_findindex(self): """ L{Caseless} should implement C{find}/C{index} case-insensitively. """ def assertFound(a, b, result, rest=()): self.assertEquals(a.find(b, *rest), result) self.assertEquals(a.index(b, *rest), result) def assertNotFound(a, b, rest=()): self.assertEquals(a.find(b, *rest), -1) err = self.assertRaises(ValueError, lambda: a.index(b, *rest)) self.assertSubstring('substring not found', str(err)) for a in map(Caseless, self._casings(u'abcbabcba')): assertNotFound(a, u'foo') for b in self._casings(u'abc'): assertFound(a, b, result=0) assertFound(a, b, rest=(1,), result=4) assertNotFound(a, b, rest=(1, 6)) def test_rfindindex(self): """ L{Caseless} should implement C{rfind}/C{rindex} case-insensitively. """ def assertFound(a, b, result, rest=()): self.assertEquals(a.rfind(b, *rest), result) self.assertEquals(a.rindex(b, *rest), result) def assertNotFound(a, b, rest=()): self.assertEquals(a.rfind(b, *rest), -1) err = self.assertRaises(ValueError, lambda: a.rindex(b, *rest)) self.assertSubstring('substring not found', str(err)) for a in map(Caseless, self._casings(u'abcbabcba')): assertNotFound(a, u'foo') for b in self._casings(u'cba'): assertFound(a, b, result=6) assertFound(a, b, rest=(0, 8), result=2) assertNotFound(a, b, rest=(7,)) __doctests__ = ['epsilon.caseless'] Epsilon-0.7.1/epsilon/test/test_descriptor.py0000644000175000017500000001132512461004115023323 0ustar mithrandimithrandi00000000000000""" Tests for L{epsilon.descriptor}. """ from twisted.trial import unittest from epsilon import descriptor class Test1(object): class a(descriptor.attribute): def get(self): return 1 def set(self, value): pass def delete(self): pass class Test2(object): class a(descriptor.attribute): "stuff" def get(self): return 10 class DescriptorTest(unittest.TestCase): def testCase1(self): t = Test1() self.assertEquals(t.a, 1) t.a = 2 self.assertEquals(t.a, 1) del t.a self.assertEquals(t.a, 1) def testCase2(self): t = Test2() self.assertEquals(Test2.a.__doc__, 'stuff') self.assertEquals(t.a, 10) self.assertRaises(AttributeError, setattr, t, 'a', 1) self.assertRaises(AttributeError, delattr, t, 'a') class AbstractClassic: """ Toy classic class used by L{RequiredAttributeTestCase}. """ foo = descriptor.requiredAttribute('foo') bar = descriptor.requiredAttribute('bar') class ManifestClassic(AbstractClassic): """ Toy classic class used by L{RequiredAttributeTestCase}. """ foo = 'bar' class AbstractNewStyle(object): """ Toy new-style class used by L{RequiredAttributeTestCase}. """ foo = descriptor.requiredAttribute('foo') bar = descriptor.requiredAttribute('bar') class ManifestNewStyle(AbstractNewStyle): """ Toy classic class used by L{RequiredAttributeTestCase}. """ foo = 'bar' class RequiredAttributeTestCase(unittest.TestCase): """ Tests for L{descriptor.requiredAttribute}. """ def _defaultAccess(self, abstractFoo): exception = self.assertRaises(AttributeError, getattr, abstractFoo, 'foo') self.assertEqual(len(exception.args), 1) self.assertEqual( exception.args[0], ("Required attribute 'foo' has not been changed" " from its default value on %r" % (abstractFoo,))) def test_defaultAccessClassic(self): """ Accessing a L{descriptor.requiredAttribute} on a classic class raises an C{AttributeError} if its value has not been overridden. """ abstractFoo = AbstractClassic() self._defaultAccess(abstractFoo) def test_defaultAccessNewStyle(self): """ Accessing a L{descriptor.requiredAttribute} on a new-style class raises an C{AttributeError} if its value has not been overridden. """ abstractFoo = AbstractNewStyle() self._defaultAccess(abstractFoo) def _derivedAccess(self, manifestFoo): self.assertEqual(manifestFoo.foo, 'bar') def test_derivedAccessClassic(self): """ If a derived classic class sets a new value for a L{descriptor.requiredAttribute}, things should work fine. """ manifestFoo = ManifestClassic() self._derivedAccess(manifestFoo) def test_derivedAccessNewStyle(self): """ If a new-style derived class sets a new value for a L{descriptor.requiredAttribute}, things should work fine. """ manifestFoo = ManifestNewStyle() self._derivedAccess(manifestFoo) def _instanceAccess(self, abstractMadeManifest): abstractMadeManifest.foo = 123 self.assertEqual(abstractMadeManifest.foo, 123) def test_instanceAccessClassic(self): """ Accessing a L{descriptor.requiredAttribute} after setting a value for it on an instance of a classic class evaluates to that value. """ abstractMadeManifest = AbstractClassic() self._instanceAccess(abstractMadeManifest) def test_instanceAccessNewStyle(self): """ Accessing a L{descriptor.requiredAttribute} after setting a value for it on an instance of a new-style class evaluates to that value. """ abstractMadeManifest = AbstractNewStyle() self._instanceAccess(abstractMadeManifest) def test_instanceAttributesUnrelatedClassic(self): """ Accessing one L{descriptor.requiredAttribute} after setting a value for a different L{descriptor.requiredAttribute} raises an L{AttributeError}. """ partiallyAbstract = AbstractClassic() partiallyAbstract.bar = 123 self._defaultAccess(partiallyAbstract) def test_instanceAttributesUnrelatedNewStyle(self): """ Accessing one L{descriptor.requiredAttribute} after setting a value for a different L{descriptor.requiredAttribute} raises an L{AttributeError}. """ partiallyAbstract = AbstractNewStyle() partiallyAbstract.bar = 123 self._defaultAccess(partiallyAbstract) Epsilon-0.7.1/epsilon/test/test_expose.py0000644000175000017500000002277512461004115022463 0ustar mithrandimithrandi00000000000000# Copright 2008 Divmod, Inc. See LICENSE file for details. """ L{epsilon.expose} is a module which allows a system that needs to expose code to a network endpoint do so in a manner which only exposes methods which have been explicitly designated. It provides utilities for convenient annotation and lookup of exposed methods. """ from epsilon.structlike import record from epsilon.expose import Exposer, MethodNotExposed, NameRequired from twisted.trial.unittest import TestCase class ExposeTests: """ This mixin provides tests for expose, based on a parameterized base type for the class which methods are being exposed on. Subclass this before L{TestCase} and set L{superClass} to use this. @ivar superClass: the class to be subclassed by all classes which expose methods. """ superClass = None def setUp(self): """ Create two exposers to expose methods in tests. """ self.exposer = Exposer("test exposer") self.otherExposer = Exposer("other exposer") def test_exposeDocAttribute(self): """ Creating an exposer should require a docstring explaining what it's for. """ docstring = "This is my docstring." exposer = Exposer(docstring) self.assertEqual(exposer.__doc__, docstring) def test_simpleExpose(self): """ Creating an exposer, defining a class and exposing a method of a class with that exposer, then retrieving a method of that class should result in the method of that class. """ class Foo(self.superClass): def __init__(self, num): self.num = num @self.exposer.expose() def bar(self): return self.num + 1 f = Foo(3) method = self.exposer.get(f, 'bar') self.assertEqual(method(), 4) def test_notExposed(self): """ Creating an exposer and then attempting to retrieve a method not exposed with it should result in a L{MethodNotExposed} exception. """ class Foo(self.superClass): def bar(self): return 1 f = Foo() self.assertRaises(MethodNotExposed, self.exposer.get, f, 'bar') def test_differentMethodsDifferentExposers(self): """ Methods should only be able to be retrieved with the exposer that exposed them, not with any other exposer. """ class Foo(self.superClass): @self.exposer.expose() def bar(self): return 1 @self.otherExposer.expose() def baz(self): return 2 f = Foo() self.assertEqual(self.exposer.get(f, 'bar')(), 1) self.assertEqual(self.otherExposer.get(f, 'baz')(), 2) self.assertRaises(MethodNotExposed, self.otherExposer.get, f, 'bar') self.assertRaises(MethodNotExposed, self.exposer.get, f, 'baz') def test_sameMethodExposedByDifferentExposers(self): """ If the same method is exposed by two different exposers, it should be accessible by both of them. """ class Foo(self.superClass): @self.exposer.expose() @self.otherExposer.expose() def bar(self): return 4 f = Foo() self.assertEqual(self.exposer.get(f, 'bar')(), 4) self.assertEqual(self.otherExposer.get(f, 'bar')(), 4) def test_exposeWithDifferentKey(self): """ The 'key' argument to {Exposer.expose} should change the argument to 'get'. """ class Foo(self.superClass): @self.exposer.expose(key='hello') def bar(self): return 7 f = Foo() self.assertEqual(self.exposer.get(f, 'hello')(), 7) def test_exposeOnDifferentClass(self): """ An exposer should only be able to retrieve a method from instances of types which it has explicitly exposed methods on. Instances of different types with the same method name should raise L{MethodNotExposed}. """ class Foo(self.superClass): @self.exposer.expose() def bar(self): return 7 class Baz(self.superClass): def bar(self): return 8 f = Foo() b = Baz() self.assertEqual(self.exposer.get(f, 'bar')(), 7) self.assertRaises(MethodNotExposed, self.otherExposer.get, b, 'bar') def test_exposeUnnamedNoKey(self): """ L{Exposer.expose} raises L{NameRequired} when called without a value for the C{key} parameter if it is used to decorate a non-function object. """ def f(): class Foo(self.superClass): @self.exposer.expose() @classmethod def foo(self): pass self.assertRaises(NameRequired, f) def test_exposeNonMethod(self): """ L{Exposer.expose} should work on methods which have been decorated by another decorator and will therefore not result in function objects when retrieved with __get__. """ class Getter(record('function')): def __get__(self, oself, type): return self.function class Foo(self.superClass): @self.exposer.expose(key='bar') @Getter def bar(): return 7 f = Foo() # Sanity check self.assertEqual(f.bar(), 7) self.assertEqual(self.exposer.get(f, 'bar')(), 7) def test_descriptorGetsType(self): """ L{Exposer.get} should not interfere with the appropriate type object being passed to the wrapped descriptor's C{__get__}. """ types = [] class Getter(record('function')): def __get__(self, oself, type): types.append(type) return self.function class Foo(self.superClass): @self.exposer.expose(key='bar') @Getter def bar(): return 7 f = Foo() self.exposer.get(f, 'bar') self.assertEqual(types, [Foo]) def test_descriptorGetsSubtype(self): """ When a descriptor is exposed through a superclass, getting it from a subclass results in the subclass being passed to the C{__get__} method. """ types = [] class Getter(record('function')): def __get__(self, oself, type): types.append(type) return self.function class Foo(self.superClass): @self.exposer.expose(key='bar') @Getter def bar(): return 7 class Baz(Foo): pass b = Baz() self.exposer.get(b, 'bar') self.assertEqual(types, [Baz]) def test_implicitSubclassExpose(self): """ L{Exposer.expose} should expose the given object on all subclasses. """ class Foo(self.superClass): @self.exposer.expose() def bar(self): return 7 class Baz(Foo): pass b = Baz() self.assertEqual(self.exposer.get(b, 'bar')(), 7) def test_overrideDontExpose(self): """ L{Exposer.expose} should not expose overridden methods on subclasses. """ class Foo(self.superClass): @self.exposer.expose() def bar(self): return 7 class Baz(Foo): def bar(self): return 8 b = Baz() self.assertRaises(MethodNotExposed, self.otherExposer.get, b, 'bar') def test_sameKeyOnDifferentTypes(self): """ L{Exposer.expose} should work with the same key on different types. """ class Foo(self.superClass): @self.exposer.expose() def bar(self): return 17 class Qux(self.superClass): @self.exposer.expose() def bar(self): return 71 q = Qux() f = Foo() self.assertEqual(self.exposer.get(q, 'bar')(), 71) self.assertEqual(self.exposer.get(f, 'bar')(), 17) def test_overrideReExpose(self): """ L{Exposer.expose} should expose a method on a subclass if that method is overridden. """ class Foo(self.superClass): @self.exposer.expose() def bar(self): return 7 class Baz(Foo): @self.exposer.expose() def bar(self): return 8 f = Foo() b = Baz() self.assertEqual(self.exposer.get(f, 'bar')(), 7) self.assertEqual(self.exposer.get(b, 'bar')(), 8) def test_deleteExposedAttribute(self): """ When an exposed attribute is deleted from a class, it should no longer be exposed; calling L{Exposer.get} should result in L{MethodNotExposed}. """ class Foo(self.superClass): @self.exposer.expose() def bar(self): return 7 f = Foo() del Foo.bar self.assertRaises(MethodNotExposed, self.otherExposer.get, f, 'bar') class ExposeNewStyle(ExposeTests, TestCase): """ All of the above functionality should work on new-style classes. """ superClass = object class Classic: """ A dummy classic class. """ class ExposeOldStyle(ExposeTests, TestCase): """ All of the above functionality should work on old-style classes. """ superClass = Classic Epsilon-0.7.1/epsilon/test/test_extime.py0000644000175000017500000005574512604672606022475 0ustar mithrandimithrandi00000000000000 import datetime import time import operator from twisted.trial import unittest from epsilon import extime # This is the implementation of 'mkgmtime' used to derive the values below. It # is perhaps instructive to read, but it remains commented out to avoid the # temptation to actually call it. If have a GMT time-tuple, just use # Time.fromStructTime(gmtt).asPOSIXTimestamp() to convert it; this was only # written as an alternative implementation to test that code path. # def mkgmtime(gmtt): # 'convert GMT time-tuple to local time' # if time.daylight and gmtt[-1]: # zone = time.altzone # else: # zone = time.timezone # return time.mktime(gmtt) - zone class TestTime(unittest.TestCase): class MST(datetime.tzinfo): def tzname(self, dt): return 'MST' def utcoffset(self, dt): return datetime.timedelta(hours = -7) def dst(self, dt): return datetime.timedelta(0) class CET(datetime.tzinfo): def tzname(self, dt): return 'MST' def utcoffset(self, dt): return datetime.timedelta(hours = 1) def dst(self, dt): return datetime.timedelta(0) reference = datetime.datetime(2004, 12, 6, 14, 15, 16) awareReference = datetime.datetime(2004, 12, 6, 14, 15, 16, tzinfo=extime.FixedOffset(0, 0)) def _checkReference(self, timeInstance, reference=None): """ Check timeInstance against self.reference. """ self.assertEquals(timeInstance._time, reference or self.reference) def _createReference(self, reference=None): """ Return a reference instance. """ return extime.Time.fromDatetime(reference or self.reference) def test_pytzWeirdness(self): """ pytz weirdness; RT ticket #2755 """ try: import pytz except ImportError: raise unittest.SkipTest, 'pytz could not be imported' tz = pytz.timezone('America/Detroit') time = extime.Time.fromRFC2822('Wed, 06 Apr 2005 23:12:27 -0400') dtime = time.asDatetime(tz) self.assertEquals(dtime.hour, 23) self.assertEquals(dtime.minute, 12) def test_cmp(self): now = time.gmtime() self.assertEquals(extime.Time.fromStructTime(now), extime.Time.fromStructTime(now)) self.assertNotEqual( extime.Time.fromStructTime(now), extime.Time.fromStructTime(now) + datetime.timedelta(seconds=42)) self.assertNotEquals(extime.Time.fromStructTime(now), 13) aTime = extime.Time.fromStructTime(now) for op in 'lt', 'le', 'gt', 'ge': self.assertRaises(TypeError, getattr(operator, op), aTime, now) def test_fromNow(self): diff = datetime.datetime.utcnow() - extime.Time()._time if diff < datetime.timedelta(): diff = -diff self.failUnless(diff.days == 0 and diff.seconds <= 5, 'Time created now is %r away from now' % (diff,)) def test_insignificantTimezones(self): """ Timezones should be insignificant when the resolution is >= 1 day """ def testEqual(creator, input): self.assertEquals(creator(input), creator(input, tzinfo=self.MST())) def testNotEqual(creator, input): self.assertNotEquals(creator(input), creator(input, tzinfo=self.MST())) testEqual(extime.Time.fromHumanly, 'sunday') testEqual(extime.Time.fromISO8601TimeAndDate, '2005') testEqual(extime.Time.fromISO8601TimeAndDate, '2005-02') testEqual(extime.Time.fromISO8601TimeAndDate, '2005-02-10') testNotEqual(extime.Time.fromISO8601TimeAndDate, '2005-02-10T12') testNotEqual(extime.Time.fromISO8601TimeAndDate, '2005-02-10T12:10') testNotEqual(extime.Time.fromISO8601TimeAndDate, '2005-02-10T12:10:03') def test_fromHumanly(self): def test(input, expected, tzinfo=None): time = extime.Time.fromHumanly( input, tzinfo, self._createReference()) self.assertEquals( time.asISO8601TimeAndDate(), expected) return time def testMalformed(input): self.assertRaises(ValueError, extime.Time.fromHumanly, input) def testDay(input, expected, tzinfo=None): time = test(input, expected, tzinfo) self.assert_(time.isAllDay()) def testMinute(input, expected, tzinfo=None): time = test(input, expected, tzinfo) self.assertEquals(time.resolution, datetime.timedelta(minutes=1)) def testMicrosecond(input, expected, tzinfo=None): time = test(input, expected, tzinfo) self.assertEquals(time.resolution, datetime.timedelta(microseconds=1)) # 'now' is Monday, 2004-12-06 14:15:16 UTC testDay('yesterday', '2004-12-05') testDay(' ToDaY ', '2004-12-06') testDay(' TuESDaY ', '2004-12-07') testDay(' ToMoRroW ', '2004-12-07') testDay('wednesday', '2004-12-08') testDay('This wednesday', '2004-12-08') testDay('neXt wednesday', '2004-12-08') testDay('thursday', '2004-12-09') testDay('friday', '2004-12-10') testDay('saturday', '2004-12-11') testDay('sunday', '2004-12-12') testDay('sunday', '2004-12-12', self.MST()) # timezone is insignificant for dates with resolution >= 1 day testDay('monday', '2004-12-13') testMinute('15:00', '2004-12-06T15:00+00:00') testMinute('8:00', '2004-12-06T15:00+00:00', self.MST()) testMinute(' 14:00 ', '2004-12-07T14:00+00:00') testMinute(' 2:00 pm ', '2004-12-07T14:00+00:00') testMinute(' 02:00 pm ', '2004-12-07T14:00+00:00') testMinute(' noon ', '2004-12-07T12:00+00:00') testMinute('midnight', '2004-12-07T00:00+00:00') testMicrosecond('now', '2004-12-06T14:15:16+00:00') testMicrosecond(' noW ', '2004-12-06T14:15:16+00:00') testMalformed('24:01') testMalformed('24:00') # this one might be considered valid by some people, but it's just dumb. testMalformed('13:00pm') testMalformed('13:00am') # these are perfectly reasonable cases, but are totally broken. Good enough for demo work. testMalformed('13:00 tomorrow') testMalformed('13:00 next thursday') testMalformed('last monday') def test_fromISO8601DateAndTime(self): self._checkReference( extime.Time.fromISO8601TimeAndDate('2004-12-06T14:15:16') ) self._checkReference( extime.Time.fromISO8601TimeAndDate('20041206T141516') ) self._checkReference( extime.Time.fromISO8601TimeAndDate('20041206T091516-0500') ) self._checkReference( extime.Time.fromISO8601TimeAndDate('2004-12-06T07:15:16', self.MST()) ) self._checkReference( extime.Time.fromISO8601TimeAndDate('2004-12-06T14:15:16Z', self.MST()) ) self._checkReference( extime.Time.fromISO8601TimeAndDate('2004-12-06T14:15:16-0000', self.MST()) ) self._checkReference( extime.Time.fromISO8601TimeAndDate('2004-12-06T14:15:16-0000') ) self._checkReference( extime.Time.fromISO8601TimeAndDate('2004-W50-1T14:15:16') ) self._checkReference( extime.Time.fromISO8601TimeAndDate('2004-341T14:15:16') ) self.assertRaises( ValueError, extime.Time.fromISO8601TimeAndDate, '2005-W53' ) self.assertRaises( ValueError, extime.Time.fromISO8601TimeAndDate, '2004-367' ) try: extime.Time.fromISO8601TimeAndDate('2004-366') except ValueError: raise unittest.FailTest, 'leap years should have 366 days' try: extime.Time.fromISO8601TimeAndDate('2004-123T14-0600') extime.Time.fromISO8601TimeAndDate('2004-123T14:13-0600') extime.Time.fromISO8601TimeAndDate('2004-123T14:13:51-0600') except ValueError: raise unittest.FailTest, 'timezone should be allowed if time with *any* resolution is specified' self.assertEquals( extime.Time.fromISO8601TimeAndDate('2005').resolution, datetime.timedelta(days=365) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004').resolution, datetime.timedelta(days=366) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-02').resolution, datetime.timedelta(days=29) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-02-29').resolution, datetime.timedelta(days=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-02-29T13').resolution, datetime.timedelta(hours=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-02-29T13:10').resolution, datetime.timedelta(minutes=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-02-29T13:10:05').resolution, datetime.timedelta(seconds=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-02-29T13:10:05.010').resolution, datetime.timedelta(microseconds=1000) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-02-29T13:10:05.010000').resolution, datetime.timedelta(microseconds=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-02-29T13:10:05.010000123').resolution, datetime.timedelta(microseconds=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-W11').resolution, datetime.timedelta(days=7) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-W11-3').resolution, datetime.timedelta(days=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-W11-3T14:16:21').resolution, datetime.timedelta(seconds=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-123').resolution, datetime.timedelta(days=1) ) self.assertEquals( extime.Time.fromISO8601TimeAndDate('2004-123T14:16:21').resolution, datetime.timedelta(seconds=1) ) def test_fromStructTime(self): self._checkReference( extime.Time.fromStructTime((2004, 12, 6, 14, 15, 16, 0, 0, 0)) ) self._checkReference( extime.Time.fromStructTime((2004, 12, 6, 7, 15, 16, 0, 0, 0), self.MST()) ) self._checkReference( extime.Time.fromStructTime((2004, 12, 6, 15, 15, 16, 0, 0, 0), self.CET()) ) self._checkReference( extime.Time.fromStructTime(time.struct_time((2004, 12, 6, 7, 15, 16, 0, 0, 0)), self.MST()) ) def test_sanitizeStructTime(self): """ Ensure that sanitizeStructTime does not modify valid times and rounds down invalid ones. """ t1 = (2004, 12, 6, 14, 15, 16, 0, 0, 0) t2 = (2004, 12, 33, 14, 15, 61, 1, 2, 3) cleanT2 = (2004, 12, 31, 14, 15, 59, 1, 2, 3) self.assertEqual(extime.sanitizeStructTime(t1), t1) self.assertEqual(extime.sanitizeStructTime(t2), cleanT2) t3 = (2004, -12, 33, 14, 15, 61, 1, 2, 3) cleanT3 = (2004, 1, 31, 14, 15, 59, 1, 2, 3) self.assertEqual(extime.sanitizeStructTime(t3), cleanT3) def test_fromDatetime(self): self._checkReference( extime.Time.fromDatetime(datetime.datetime(2004, 12, 6, 14, 15, 16)) ) self._checkReference( extime.Time.fromDatetime(datetime.datetime(2004, 12, 6, 7, 15, 16, tzinfo=self.MST())) ) self._checkReference( extime.Time.fromDatetime(datetime.datetime(2004, 12, 6, 15, 15, 16, tzinfo=self.CET())) ) def test_fromPOSIXTimestamp(self): # if there were an 'mkgmtime', it would do this: # mkgmtime((2004, 12, 6, 14, 15, 16, 0, 0, 0))) = 1102342516.0 self._checkReference( extime.Time.fromPOSIXTimestamp(1102342516.0)) def test_fromRFC2822(self): self._checkReference( extime.Time.fromRFC2822('Mon, 6 Dec 2004 14:15:16 -0000') ) self._checkReference( extime.Time.fromRFC2822('Mon, 6 Dec 2004 9:15:16 -0500') ) self._checkReference( extime.Time.fromRFC2822('6 Dec 2004 9:15:16 -0500') ) self._checkReference( extime.Time.fromRFC2822('Mon,6 Dec 2004 9:15:16 -0500') ) self._checkReference( extime.Time.fromRFC2822('Mon,6 Dec 2004 9:15 -0500'), datetime.datetime(2004, 12, 6, 14, 15) ) self._checkReference( extime.Time.fromRFC2822('Mon,6 Dec 2004 9:15:16 EST') ) self._checkReference( extime.Time.fromRFC2822('Monday,6 December 2004 9:15:16 EST') ) self._checkReference( extime.Time.fromRFC2822('Monday,6 December 2004 14:15:16') ) self.assertRaises( ValueError, extime.Time.fromRFC2822, 'some invalid date' ) def test_twentyThirtyEightBug_RFC2822(self): """ Verify that we can parse RFC2822 timestamps after the One Terrible Moment in 2038. In other words, make sure that we don't round trip through a platform time_t, because those will overflow on 32-bit platforms in 2038. """ self.assertEquals( extime.Time.fromRFC2822( 'Fri, 19 Jan 2038 03:14:08 -0000' ).asPOSIXTimestamp(), (2**31)) self.assertEquals( extime.Time.fromRFC2822( 'Fri, 13 Dec 1901 20:45:52 -0000' ).asPOSIXTimestamp(), -(2**31)) def test_twentyThirtyEightBug_POSIXTimestamp(self): """ Verify that we can load POSIX timestamps after the One Terrible Moment in 2038. In other words, make sure that we don't round trip through a platform time_t, because those will overflow on 32-bit platforms in 2038. """ self.assertEquals( extime.Time.fromPOSIXTimestamp( 2**31 ).asPOSIXTimestamp(), (2**31)) self.assertEquals( extime.Time.fromPOSIXTimestamp( -(2**31)-1 ).asPOSIXTimestamp(), -(2**31)-1) def test_obsoleteRFC2822(self): self._checkReference( extime.Time.fromRFC2822('Monday,6 December (i hate this month) 2004 9:15:16 R') ) test_obsoleteRFC2822.todo = '''\ email.Utils implementation does not handle obsoleted military style timezones, nor does it handle obsoleted comments in the header''' def test_asPOSIXTimestamp(self): self.assertEquals( self._createReference().asPOSIXTimestamp(), 1102342516 ) def test_asRFC1123(self): self.assertEquals( self._createReference().asRFC1123(), 'Mon, 06 Dec 2004 14:15:16 GMT' ) def test_asRFC2822(self): self.assertEquals( self._createReference().asRFC2822(), 'Mon, 6 Dec 2004 14:15:16 -0000' ) self.assertEquals( self._createReference().asRFC2822(self.MST()), 'Mon, 6 Dec 2004 07:15:16 -0700' ) self.assertEquals( self._createReference().asRFC2822(self.CET()), 'Mon, 6 Dec 2004 15:15:16 +0100' ) def test_asISO8601TimeAndDate(self): self.assertEquals( self._createReference().asISO8601TimeAndDate(), '2004-12-06T14:15:16+00:00' ) self.assertEquals( self._createReference(reference=datetime.datetime(2004, 12, 6, 14, 15, 16, 43210)).asISO8601TimeAndDate(), '2004-12-06T14:15:16.04321+00:00' ) self.assertEquals( self._createReference().asISO8601TimeAndDate(tzinfo=self.MST()), '2004-12-06T07:15:16-07:00' ) self.assertEquals( self._createReference().asISO8601TimeAndDate(tzinfo=self.CET()), '2004-12-06T15:15:16+01:00' ) self.assertEquals( self._createReference().asISO8601TimeAndDate(includeTimezone=False), '2004-12-06T14:15:16' ) self.assertEquals( self._createReference(reference=datetime.datetime(2004, 12, 6, 14, 15, 16, 43210)).asISO8601TimeAndDate(includeTimezone=False), '2004-12-06T14:15:16.04321' ) self.assertEquals( self._createReference().asISO8601TimeAndDate(tzinfo=self.MST(), includeTimezone=False), '2004-12-06T07:15:16' ) self.assertEquals( self._createReference().asISO8601TimeAndDate(tzinfo=self.CET(), includeTimezone=False), '2004-12-06T15:15:16' ) def test_asStructTime(self): self.assertEquals( self._createReference().asStructTime(), (2004, 12, 06, 14, 15, 16, 0, 341, 0) ) self.assertEquals( self._createReference().asStructTime(tzinfo=self.MST()), (2004, 12, 06, 7, 15, 16, 0, 341, 0) ) self.assertEquals( self._createReference().asStructTime(tzinfo=self.CET()), (2004, 12, 06, 15, 15, 16, 0, 341, 0) ) def test_asNaiveDatetime(self): def ref(tzinfo): return self.awareReference.astimezone(tzinfo).replace(tzinfo=None) self.assertEquals( self._createReference().asNaiveDatetime(), self.reference ) self.assertEquals( self._createReference().asNaiveDatetime(tzinfo=self.MST()), ref(self.MST())) self.assertEquals( self._createReference().asNaiveDatetime(tzinfo=self.CET()), ref(self.CET())) def test_asDatetime(self): self.assertEquals( self._createReference().asDatetime(), self.awareReference ) self.assertEquals( self._createReference().asDatetime(tzinfo=self.MST()), self.awareReference ) self.assertEquals( self._createReference().asDatetime(tzinfo=self.CET()), self.awareReference ) def test_asHumanlySameDay(self): """ L{Time.asHumanly} should return a string which provides only enough context to identify the time being formatted. It should include only the time of day, when formatting times in the same day as now. """ sameDay = extime.Time.fromStructTime((2004, 12, 6, 14, 15, 16, 0, 0, 0)) self.assertEquals( self._createReference().asHumanly(now=sameDay), '02:15 pm' ) self.assertEquals( self._createReference().asHumanly(tzinfo=self.MST(), now=sameDay), '07:15 am' ) self.assertEquals( self._createReference().asHumanly(tzinfo=self.CET(), now=sameDay), '03:15 pm' ) allDay = extime.Time.fromISO8601TimeAndDate('2005-123') self.assertEquals(allDay.asHumanly(now=allDay), 'all day') def test_asHumanlyDifferentDay(self): """ L{Time.asHumanly} should include the month and day, when formatting times in a different day (but the same year) as now. """ nextDay = extime.Time.fromStructTime((2004, 12, 7, 14, 15, 16, 0, 0, 0)) self.assertEquals( self._createReference().asHumanly(now=nextDay), '6 Dec, 02:15 pm' ) self.assertEquals( self._createReference().asHumanly(tzinfo=self.MST(), now=nextDay), '6 Dec, 07:15 am' ) self.assertEquals( self._createReference().asHumanly(tzinfo=self.CET(), now=nextDay), '6 Dec, 03:15 pm' ) allDay = extime.Time.fromISO8601TimeAndDate('2005-123') allDayNextDay = extime.Time.fromISO8601TimeAndDate('2005-124') self.assertEquals(allDay.asHumanly(now=allDayNextDay), '3 May') def test_asHumanlyDifferentYear(self): """ L{Time.asHumanly} should include the year, when formatting times in a different year than now. """ nextYear = extime.Time.fromStructTime((2005, 12, 6, 14, 15, 16, 0, 0, 0)) self.assertEquals( self._createReference().asHumanly(now=nextYear), '6 Dec 2004, 02:15 pm' ) self.assertEquals( self._createReference().asHumanly(tzinfo=self.MST(), now=nextYear), '6 Dec 2004, 07:15 am' ) self.assertEquals( self._createReference().asHumanly(tzinfo=self.CET(), now=nextYear), '6 Dec 2004, 03:15 pm' ) allDay = extime.Time.fromISO8601TimeAndDate('2005-123') allDayNextYear = extime.Time.fromISO8601TimeAndDate('2006-123') self.assertEquals(allDay.asHumanly(now=allDayNextYear), '3 May 2005') def test_asHumanlyValidPrecision(self): """ L{Time.asHumanly} should return the time in minutes by default, and in the specified precision when the precision parameter is given. The precision behavior should be identical for both same day and different day code paths. """ sameDay = extime.Time.fromStructTime((2004, 12, 6, 14, 15, 16, 0, 0, 0)) nextDay = extime.Time.fromStructTime((2004, 12, 7, 14, 15, 16, 0, 0, 0)) self.assertEquals(self._createReference().asHumanly(now=sameDay), '02:15 pm' ) self.assertEquals(self._createReference().asHumanly(now=sameDay, precision=extime.Time.Precision.SECONDS), '02:15:16 pm' ) self.assertEquals(self._createReference().asHumanly(now=nextDay), '6 Dec, 02:15 pm' ) self.assertEquals(self._createReference().asHumanly(now=nextDay, precision=extime.Time.Precision.SECONDS), '6 Dec, 02:15:16 pm' ) def test_asHumanlyInvalidPrecision(self): """ L{Time.asHumanly} should raise an L{InvalidPrecision} exception if passed a value for precision other than L{Time.Precision.MINUTES} or L{Time.Precision.SECONDS}. """ self.assertRaises(extime.InvalidPrecision, extime.Time().asHumanly, **{'precision': '%H:%M'}) def test_inverse(self): for style in [ 'POSIXTimestamp', 'Datetime', 'RFC2822', 'StructTime', 'ISO8601TimeAndDate']: parse = getattr(extime.Time, 'from'+style) format = getattr(extime.Time, 'as'+style) self.assertEquals( self._createReference(), parse(format(self._createReference())), '%s() is not the inverse of %s()' % (style, style)) def test_evalRepr(self): evalns = {'datetime': datetime, 'extime': extime} now = extime.Time() self.assertEquals( now, eval(repr(now), evalns, evalns) ) def test_containment(self): makeTime = extime.Time.fromISO8601TimeAndDate self.assertIn(makeTime('2004-05'), makeTime('2004')) self.assertNotIn(makeTime('2005-01'), makeTime('2004')) def test_arithmetic(self): """ Verify that L{datetime.timedelta} objects can be added to and subtracted from L{Time} instances and that L{Time} instances can be subtracted from each other. """ time1 = extime.Time.fromISO8601TimeAndDate('2004-12-03T14:15:16') time2 = extime.Time.fromISO8601TimeAndDate('2004-12-09T14:15:16') offset = datetime.timedelta(days=6) # Supported operations self.assertEqual(time1 + offset, time2) self.assertEqual(time2 - offset, time1) self.assertEqual(time2 - time1, offset) # Make sure unsupported types give back a TypeError self.assertRaises(TypeError, lambda: time1 + 1) self.assertRaises(TypeError, lambda: time1 - 1) def test_oneDay(self): day = self._createReference().oneDay() self.assertEquals(day._time, datetime.datetime(2004, 12, 6, 0, 0, 0)) self.assertEquals(day.resolution, datetime.timedelta(days=1)) def test_isAllDay(self): self.failIf(self._createReference().isAllDay()) self.failUnless(extime.Time.fromISO8601TimeAndDate('2005-123').isAllDay()) Epsilon-0.7.1/epsilon/test/test_juice.py0000644000175000017500000002307412461004115022250 0ustar mithrandimithrandi00000000000000# Copyright 2005 Divmod, Inc. See LICENSE file for details from epsilon import juice from epsilon.test import iosim from twisted.trial import unittest from twisted.internet import protocol, defer class TestProto(protocol.Protocol): def __init__(self, onConnLost, dataToSend): self.onConnLost = onConnLost self.dataToSend = dataToSend def connectionMade(self): self.data = [] self.transport.write(self.dataToSend) def dataReceived(self, bytes): self.data.append(bytes) self.transport.loseConnection() def connectionLost(self, reason): self.onConnLost.callback(self.data) class SimpleSymmetricProtocol(juice.Juice): def sendHello(self, text): return self.sendCommand("hello", hello=text) def sendGoodbye(self): return self.sendCommand("goodbye") def juice_HELLO(self, box): return juice.Box(hello=box['hello']) def juice_GOODBYE(self, box): return juice.QuitBox(goodbye='world') class UnfriendlyGreeting(Exception): """Greeting was insufficiently kind. """ class UnknownProtocol(Exception): """Asked to switch to the wrong protocol. """ class Hello(juice.Command): commandName = 'hello' arguments = [('hello', juice.String())] response = [('hello', juice.String())] errors = {UnfriendlyGreeting: 'UNFRIENDLY'} class Goodbye(juice.Command): commandName = 'goodbye' responseType = juice.QuitBox class GetList(juice.Command): commandName = 'getlist' arguments = [('length', juice.Integer())] response = [('body', juice.JuiceList([('x', juice.Integer())]))] class TestSwitchProto(juice.ProtocolSwitchCommand): commandName = 'Switch-Proto' arguments = [ ('name', juice.String()), ] errors = {UnknownProtocol: 'UNKNOWN'} class SingleUseFactory(protocol.ClientFactory): def __init__(self, proto): self.proto = proto def buildProtocol(self, addr): p, self.proto = self.proto, None return p class SimpleSymmetricCommandProtocol(juice.Juice): maybeLater = None def __init__(self, issueGreeting, onConnLost=None): juice.Juice.__init__(self, issueGreeting) self.onConnLost = onConnLost def sendHello(self, text): return Hello(hello=text).do(self) def sendGoodbye(self): return Goodbye().do(self) def command_HELLO(self, hello): if hello.startswith('fuck'): raise UnfriendlyGreeting("Don't be a dick.") return dict(hello=hello) def command_GETLIST(self, length): return {'body': [dict(x=1)] * length} def command_GOODBYE(self): return dict(goodbye='world') command_HELLO.command = Hello command_GOODBYE.command = Goodbye command_GETLIST.command = GetList def switchToTestProtocol(self): p = TestProto(self.onConnLost, SWITCH_CLIENT_DATA) return TestSwitchProto(SingleUseFactory(p), name='test-proto').do(self).addCallback(lambda ign: p) def command_SWITCH_PROTO(self, name): if name == 'test-proto': return TestProto(self.onConnLost, SWITCH_SERVER_DATA) raise UnknownProtocol(name) command_SWITCH_PROTO.command = TestSwitchProto class DeferredSymmetricCommandProtocol(SimpleSymmetricCommandProtocol): def command_SWITCH_PROTO(self, name): if name == 'test-proto': self.maybeLaterProto = TestProto(self.onConnLost, SWITCH_SERVER_DATA) self.maybeLater = defer.Deferred() return self.maybeLater raise UnknownProtocol(name) command_SWITCH_PROTO.command = TestSwitchProto class SSPF: protocol = SimpleSymmetricProtocol class SSSF(SSPF, protocol.ServerFactory): pass class SSCF(SSPF, protocol.ClientFactory): pass def connectedServerAndClient(ServerClass=lambda: SimpleSymmetricProtocol(True), ClientClass=lambda: SimpleSymmetricProtocol(False), *a, **kw): """Returns a 3-tuple: (client, server, pump) """ return iosim.connectedServerAndClient( ServerClass, ClientClass, *a, **kw) class TotallyDumbProtocol(protocol.Protocol): buf = '' def dataReceived(self, data): self.buf += data class LiteralJuice(juice.Juice): def __init__(self, issueGreeting): juice.Juice.__init__(self, issueGreeting) self.boxes = [] def juiceBoxReceived(self, box): self.boxes.append(box) return class LiteralParsingTest(unittest.TestCase): def testBasicRequestResponse(self): c, s, p = connectedServerAndClient(ClientClass=TotallyDumbProtocol) HELLO = 'abcdefg' ASKTOK = 'hand-crafted-ask' c.transport.write(("""-Command: HeLlO -Ask: %s Hello: %s World: this header is ignored """ % (ASKTOK, HELLO,)).replace('\n','\r\n')) p.flush() asserts = {'hello': HELLO, '-answer': ASKTOK} hdrs = [j.split(': ') for j in c.buf.split('\r\n')[:-2]] self.assertEquals(len(asserts), len(hdrs)) for hdr in hdrs: k, v = hdr self.assertEquals(v, asserts[k.lower()]) def testParsingRoundTrip(self): c, s, p = connectedServerAndClient(ClientClass=lambda: LiteralJuice(False), ServerClass=lambda: LiteralJuice(True)) SIMPLE = ('simple', 'test') CE = ('ceq', ': ') CR = ('crtest', 'test\r') LF = ('lftest', 'hello\n') NEWLINE = ('newline', 'test\r\none\r\ntwo') NEWLINE2 = ('newline2', 'test\r\none\r\n two') BLANKLINE = ('newline3', 'test\r\n\r\nblank\r\n\r\nline') BODYTEST = (juice.BODY, 'blah\r\n\r\ntesttest') testData = [ [SIMPLE], [SIMPLE, BODYTEST], [SIMPLE, CE], [SIMPLE, CR], [SIMPLE, CE, CR, LF], [CE, CR, LF], [SIMPLE, NEWLINE, CE, NEWLINE2], [BODYTEST, SIMPLE, NEWLINE] ] for test in testData: jb = juice.Box() jb.update(dict(test)) jb.sendTo(c) p.flush() self.assertEquals(s.boxes[-1], jb) SWITCH_CLIENT_DATA = 'Success!' SWITCH_SERVER_DATA = 'No, really. Success.' class AppLevelTest(unittest.TestCase): def testHelloWorld(self): c, s, p = connectedServerAndClient() L = [] HELLO = 'world' c.sendHello(HELLO).addCallback(L.append) p.flush() self.assertEquals(L[0]['hello'], HELLO) def testHelloWorldCommand(self): c, s, p = connectedServerAndClient( ServerClass=lambda: SimpleSymmetricCommandProtocol(True), ClientClass=lambda: SimpleSymmetricCommandProtocol(False)) L = [] HELLO = 'world' c.sendHello(HELLO).addCallback(L.append) p.flush() self.assertEquals(L[0]['hello'], HELLO) def testHelloErrorHandling(self): L=[] c, s, p = connectedServerAndClient(ServerClass=lambda: SimpleSymmetricCommandProtocol(True), ClientClass=lambda: SimpleSymmetricCommandProtocol(False)) HELLO = 'fuck you' c.sendHello(HELLO).addErrback(L.append) p.flush() L[0].trap(UnfriendlyGreeting) self.assertEquals(str(L[0].value), "Don't be a dick.") def testJuiceListCommand(self): c, s, p = connectedServerAndClient(ServerClass=lambda: SimpleSymmetricCommandProtocol(True), ClientClass=lambda: SimpleSymmetricCommandProtocol(False)) L = [] GetList(length=10).do(c).addCallback(L.append) p.flush() values = L.pop().get('body') self.assertEquals(values, [{'x': 1}] * 10) def testFailEarlyOnArgSending(self): okayCommand = Hello(Hello="What?") self.assertRaises(RuntimeError, Hello) def testSupportsVersion1(self): c, s, p = connectedServerAndClient(ServerClass=lambda: juice.Juice(True), ClientClass=lambda: juice.Juice(False)) negotiatedVersion = [] s.renegotiateVersion(1).addCallback(negotiatedVersion.append) p.flush() self.assertEquals(negotiatedVersion[0], 1) self.assertEquals(c.protocolVersion, 1) self.assertEquals(s.protocolVersion, 1) def testProtocolSwitch(self, switcher=SimpleSymmetricCommandProtocol): self.testSucceeded = False serverDeferred = defer.Deferred() serverProto = switcher(True, serverDeferred) clientDeferred = defer.Deferred() clientProto = switcher(False, clientDeferred) c, s, p = connectedServerAndClient(ServerClass=lambda: serverProto, ClientClass=lambda: clientProto) switchDeferred = c.switchToTestProtocol() def cbConnsLost(((serverSuccess, serverData), (clientSuccess, clientData))): self.failUnless(serverSuccess) self.failUnless(clientSuccess) self.assertEquals(''.join(serverData), SWITCH_CLIENT_DATA) self.assertEquals(''.join(clientData), SWITCH_SERVER_DATA) self.testSucceeded = True def cbSwitch(proto): return defer.DeferredList([serverDeferred, clientDeferred]).addCallback(cbConnsLost) switchDeferred.addCallback(cbSwitch) p.flush() if serverProto.maybeLater is not None: serverProto.maybeLater.callback(serverProto.maybeLaterProto) p.flush() self.failUnless(self.testSucceeded) def testProtocolSwitchDeferred(self): return self.testProtocolSwitch(switcher=DeferredSymmetricCommandProtocol) Epsilon-0.7.1/epsilon/test/test_modes.py0000644000175000017500000000302212461004115022247 0ustar mithrandimithrandi00000000000000 from twisted.trial import unittest from epsilon.modal import mode, Modal class ModalTestClass(Modal): modeAttribute = 'mode' initialMode = 'alpha' class alpha(mode): def one(self): return 'alpha-one' def two(self): return 'alpha-two' class beta(mode): def two(self): return 'beta-two' def three(self): return 'beta-three' def four(self): return 'unmode-four' class gamma(mode): def change(self): self.mode = 'delta' return self.change() class delta(mode): def change(self): return 'delta-change' class ModalTestSubclass(ModalTestClass): pass class ModalityTestCase(unittest.TestCase): modalFactory = ModalTestClass def testModalMethods(self): x = self.modalFactory() self.assertEquals(x.one(), 'alpha-one') self.assertEquals(x.two(), 'alpha-two') self.assertRaises(AttributeError, getattr, x, 'three') self.assertEquals(x.four(), 'unmode-four') x.mode = 'beta' self.assertRaises(AttributeError, getattr, x, 'one') self.assertEquals(x.two(), 'beta-two') self.assertEquals(x.three(), 'beta-three') self.assertEquals(x.four(), 'unmode-four') def testInternalModeChange(self): x = self.modalFactory() x.mode = 'gamma' self.assertEquals(x.change(), 'delta-change') class MostBasicInheritanceTestCase(ModalityTestCase): modalFactory = ModalTestSubclass Epsilon-0.7.1/epsilon/test/test_process.py0000644000175000017500000000437312461004115022630 0ustar mithrandimithrandi00000000000000# Copyright (c) 2008 Divmod. See LICENSE for details. """ Tests for L{epsilon.process}. """ from zope.interface.verify import verifyObject from twisted.trial.unittest import TestCase from twisted.application.service import IService, MultiService from twisted.internet.protocol import Protocol from epsilon import process class StandardIOServiceTests(TestCase): """ Tests for L{StandardIOService}, an L{IService} implementation which associates a L{IProtocol} provider with stdin and stdout when it is started. """ def test_interface(self): """ L{StandardIOService} instances provide L{IService}. """ verifyObject(IService, process.StandardIOService(None)) def test_startService(self): """ L{StandardIOService.startService} connects a protocol to a standard io transport. """ # This sucks. StandardIO sucks. APIs should be testable. L = [] self.patch(process, 'StandardIO', L.append) proto = Protocol() service = process.StandardIOService(proto) service.startService() self.assertEqual(L, [proto]) def test_setName(self): """ L{StandardIOService.setName} sets the C{name} attribute. """ service = process.StandardIOService(None) service.setName("foo") self.assertEqual(service.name, "foo") def test_setServiceParent(self): """ L{StandardIOService.setServiceParent} sets the C{parent} attribute and adds the service as a child of the given parent. """ parent = MultiService() service = process.StandardIOService(None) service.setServiceParent(parent) self.assertEqual(list(parent), [service]) self.assertIdentical(service.parent, parent) def test_disownServiceParent(self): """ L{StandardIOService.disownServiceParent} sets the C{parent} attribute to C{None} and removes the service from the parent's child list. """ parent = MultiService() service = process.StandardIOService(None) service.setServiceParent(parent) service.disownServiceParent() self.assertEqual(list(parent), []) self.assertIdentical(service.parent, None) Epsilon-0.7.1/epsilon/test/test_react.py0000644000175000017500000001340512461004115022244 0ustar mithrandimithrandi00000000000000# Copyright (c) 2008 Divmod. See LICENSE for details. """ Tests for L{epsilon.react}. """ from twisted.internet.defer import Deferred, succeed, fail from twisted.internet.task import Clock from twisted.trial.unittest import TestCase from epsilon.react import react class _FakeReactor(object): """ A fake implementation of L{IReactorCore}. """ def __init__(self): self._running = False self._clock = Clock() self.callLater = self._clock.callLater self.seconds = self._clock.seconds self.getDelayedCalls = self._clock.getDelayedCalls self._whenRunning = [] self._shutdownTriggers = {'before': [], 'during': []} def callWhenRunning(self, callable): if self._running: callable() else: self._whenRunning.append(callable) def addSystemEventTrigger(self, phase, event, callable, *args): assert phase in ('before', 'during') assert event == 'shutdown' self._shutdownTriggers[phase].append((callable, args)) def run(self): """ Call timed events until there are no more or the reactor is stopped. @raise RuntimeError: When no timed events are left and the reactor is still running. """ self._running = True whenRunning = self._whenRunning self._whenRunning = None for callable in whenRunning: callable() while self._running: calls = self.getDelayedCalls() if not calls: raise RuntimeError("No DelayedCalls left") self._clock.advance(calls[0].getTime() - self.seconds()) shutdownTriggers = self._shutdownTriggers self._shutdownTriggers = None for (trigger, args) in shutdownTriggers['before'] + shutdownTriggers['during']: trigger(*args) def stop(self): """ Stop the reactor. """ self._running = False class ReactTests(TestCase): """ Tests for L{epsilon.react.react}. """ def test_runsUntilAsyncCallback(self): """ L{react} runs the reactor until the L{Deferred} returned by the function it is passed is called back, then stops it. """ timePassed = [] def main(reactor): finished = Deferred() reactor.callLater(1, timePassed.append, True) reactor.callLater(2, finished.callback, None) return finished r = _FakeReactor() react(r, main, []) self.assertEqual(timePassed, [True]) self.assertEqual(r.seconds(), 2) def test_runsUntilSyncCallback(self): """ L{react} returns quickly if the L{Deferred} returned by the function it is passed has already been called back at the time it is returned. """ def main(reactor): return succeed(None) r = _FakeReactor() react(r, main, []) self.assertEqual(r.seconds(), 0) def test_runsUntilAsyncErrback(self): """ L{react} runs the reactor until the L{Deferred} returned by the function it is passed is errbacked, then it stops the reactor and reports the error. """ class ExpectedException(Exception): pass def main(reactor): finished = Deferred() reactor.callLater(1, finished.errback, ExpectedException()) return finished r = _FakeReactor() react(r, main, []) errors = self.flushLoggedErrors(ExpectedException) self.assertEqual(len(errors), 1) def test_runsUntilSyncErrback(self): """ L{react} returns quickly if the L{Deferred} returned by the function it is passed has already been errbacked at the time it is returned. """ class ExpectedException(Exception): pass def main(reactor): return fail(ExpectedException()) r = _FakeReactor() react(r, main, []) self.assertEqual(r.seconds(), 0) errors = self.flushLoggedErrors(ExpectedException) self.assertEqual(len(errors), 1) def test_singleStopCallback(self): """ L{react} doesn't try to stop the reactor if the L{Deferred} the function it is passed is called back after the reactor has already been stopped. """ def main(reactor): reactor.callLater(1, reactor.stop) finished = Deferred() reactor.addSystemEventTrigger( 'during', 'shutdown', finished.callback, None) return finished r = _FakeReactor() react(r, main, []) self.assertEqual(r.seconds(), 1) def test_singleStopErrback(self): """ L{react} doesn't try to stop the reactor if the L{Deferred} the function it is passed is errbacked after the reactor has already been stopped. """ class ExpectedException(Exception): pass def main(reactor): reactor.callLater(1, reactor.stop) finished = Deferred() reactor.addSystemEventTrigger( 'during', 'shutdown', finished.errback, ExpectedException()) return finished r = _FakeReactor() react(r, main, []) self.assertEqual(r.seconds(), 1) errors = self.flushLoggedErrors(ExpectedException) self.assertEqual(len(errors), 1) def test_arguments(self): """ L{react} passes the elements of the list it is passed as positional arguments to the function it is passed. """ args = [] def main(reactor, x, y, z): args.extend((x, y, z)) return succeed(None) r = _FakeReactor() react(r, main, [1, 2, 3]) self.assertEqual(args, [1, 2, 3]) Epsilon-0.7.1/epsilon/test/test_remember.py0000644000175000017500000000504212461004115022742 0ustar mithrandimithrandi00000000000000 from twisted.trial.unittest import TestCase from epsilon.remember import remembered from epsilon.structlike import record class Rememberee(record("rememberer whichValue")): """ A sample value that holds on to its L{Rememberer}. """ class Rememberer(object): """ Sample application code which uses epsilon.remember. @ivar invocations: The number of times that it is invoked. """ invocations = 0 otherInvocations = 0 @remembered def value1(self): """ I remember a value. """ self.invocations += 1 return Rememberee(self, 1) @remembered def value2(self): """ A separate value. """ self.otherInvocations += 1 return Rememberee(self, 2) class RememberedTests(TestCase): """ The "remembered" decorator allows you to lazily create an attribute and remember it. """ def setUp(self): """ Create a L{Rememberer} for use with the tests. """ self.rememberer = Rememberer() def test_selfArgument(self): """ The "self" argument to the decorated creation function will be the instance the property is accessed upon. """ value = self.rememberer.value1 self.assertIdentical(value.rememberer, self.rememberer) def test_onlyOneInvocation(self): """ The callable wrapped by C{@remembered} will only be invoked once, regardless of how many times the attribute is accessed. """ self.assertEquals(self.rememberer.invocations, 0) firstTime = self.rememberer.value1 self.assertEquals(self.rememberer.invocations, 1) secondTime = self.rememberer.value1 self.assertEquals(self.rememberer.invocations, 1) self.assertIdentical(firstTime, secondTime) def test_twoValues(self): """ If the L{@remembered} decorator is used more than once, each one will be an attribute with its own identity. """ self.assertEquals(self.rememberer.invocations, 0) self.assertEquals(self.rememberer.otherInvocations, 0) firstValue1 = self.rememberer.value1 self.assertEquals(self.rememberer.invocations, 1) self.assertEquals(self.rememberer.otherInvocations, 0) firstValue2 = self.rememberer.value2 self.assertEquals(self.rememberer.otherInvocations, 1) self.assertNotIdentical(firstValue1, firstValue2) secondValue2 = self.rememberer.value2 self.assertIdentical(firstValue2, secondValue2) Epsilon-0.7.1/epsilon/test/test_setuphelper.py0000644000175000017500000000453612461004115023513 0ustar mithrandimithrandi00000000000000 import sys from twisted.trial import unittest from twisted.python import log from twisted.python.reflect import namedAny from epsilon.setuphelper import _regeneratePluginCache class TestCacheRegeneration(unittest.TestCase): removeModules = [] def setUp(self): self.removedModules = [] for modname in self.removeModules: try: module = namedAny(modname) self.removedModules.append(module) except: print 'COULD NOT LOAD', modname self.sysmodules = sys.modules.copy() self.syspath = sys.path[:] for module in self.removedModules: for ent in self.syspath: if module.__file__.startswith(ent): while ent in sys.path: sys.path.remove(ent) rem = 0 for modname in self.sysmodules: if modname.startswith(module.__name__): rem += 1 sys.modules.pop(modname) assert rem, 'NO HITS: %r:%r' % (module,module.__name__) def testRegeneratingIt(self): for mod in self.removedModules: self.failIf(mod.__name__ in sys.modules, 'Started with %r loaded: %r' % (mod.__name__, sys.path)) _regeneratePluginCache(['axiom', 'xmantissa']) log.flushErrors(ImportError) # This is necessary since there are Axiom # plugins that depend on Mantissa, so when # Axiom is installed, Mantissa-dependent # powerups are, but Mantissa isn't some # harmless tracebacks are printed. for mod in self.removedModules: self.failIf(mod.__name__ in sys.modules, 'Loaded %r: %r' % (mod.__name__, sys.path)) testRegeneratingIt.skip = """ This test really ought to be the dependency-direction test from old Quotient. As it currently stands it's just broken. """ def tearDown(self): sys.path[:] = self.syspath sys.modules.clear() sys.modules.update(self.sysmodules) class WithoutAxiom(TestCacheRegeneration): removeModules = ['axiom'] class WithoutMantissa(TestCacheRegeneration): removeModules = ['xmantissa'] class WithoutEither(TestCacheRegeneration): removeModules = ['xmantissa', 'axiom'] Epsilon-0.7.1/epsilon/test/test_structlike.py0000644000175000017500000001414412605014102023335 0ustar mithrandimithrandi00000000000000 """ Tests for L{epsilon.structlike}. """ import threading from epsilon.structlike import record from twisted.internet import reactor from twisted.internet.defer import gatherResults from twisted.internet.threads import deferToThreadPool from twisted.python.threadpool import ThreadPool from twisted.trial import unittest class MyRecord(record('something somethingElse')): """ A sample record subclass. """ class StructLike(unittest.TestCase): def _testme(self, TestStruct): x = TestStruct() self.assertEquals(x.x, 1) self.assertEquals(x.y, 2) self.assertEquals(x.z, 3) y = TestStruct('3', '2', '1') self.assertEquals(y.x, '3') self.assertEquals(y.y, '2') self.assertEquals(y.z, '1') z = TestStruct(z='z', x='x', y='y') self.assertEquals(z.x, 'x') self.assertEquals(z.y, 'y') self.assertEquals(z.z, 'z') a = TestStruct('abc') self.assertEquals(a.x, 'abc') self.assertEquals(a.y, 2) self.assertEquals(a.z, 3) b = TestStruct(y='123') self.assertEquals(b.x, 1) self.assertEquals(b.y, '123') self.assertEquals(b.z, 3) def testWithPositional(self): self._testme(record('x y z', x=1, y=2, z=3)) def testWithPositionalSubclass(self): class RecordSubclass(record('x y z', x=1, y=2, z=3)): pass self._testme(RecordSubclass) def testWithoutPositional(self): self._testme(record(x=1, y=2, z=3)) def testWithoutPositionalSubclass(self): class RecordSubclass(record(x=1, y=2, z=3)): pass self._testme(RecordSubclass) def testBreakRecord(self): self.assertRaises(TypeError, record) self.assertRaises(TypeError, record, 'a b c', a=1, c=2) self.assertRaises(TypeError, record, 'a b', c=2) self.assertRaises(TypeError, record, 'a b', a=1) def testUndeclared(self): R = record('a') r = R(1) r.foo = 2 self.assertEquals(r.foo, 2) def testCreateWithNoValuesAndNoDefaults(self): R = record('x') self.assertRaises(TypeError, R) def testUnknownArgs(self): """ Test that passing in unknown keyword and / or positional arguments to a record's initializer causes TypeError to be raised. """ R = record('x') self.assertRaises(TypeError, R, x=5, y=6) self.assertRaises(TypeError, R, 5, 6) def test_typeStringRepresentation(self): """ 'Record' types should have a name which provides information about the slots they contain. """ R = record('xyz abc def') self.assertEquals(R.__name__, "Record") def test_instanceStringRepresentation(self): """ 'Record' instances should provide a string representation which provides information about the values contained in their slots. """ obj = MyRecord(something=1, somethingElse=2) self.assertEquals(repr(obj), 'MyRecord(something=1, somethingElse=2)') def test_instanceStringRepresentationNesting(self): """ Nested L{Record} instances should have nested string representations. """ obj = MyRecord(something=1, somethingElse=2) objRepr = 'MyRecord(something=1, somethingElse=2)' self.assertEquals( repr(MyRecord(obj, obj)), 'MyRecord(something=%s, somethingElse=%s)' % (objRepr, objRepr)) def test_instanceStringRepresentationRecursion(self): """ 'Record' instances should provide a repr that displays 'ClassName(...)' when it would otherwise infinitely recurse. """ obj = MyRecord(something=1, somethingElse=2) obj.somethingElse = obj self.assertEquals( repr(obj), 'MyRecord(something=1, somethingElse=MyRecord(...))') def test_instanceStringRepresentationUnhashableRecursion(self): """ 'Record' instances should display 'ClassName(...)' even for unhashable objects. """ obj = MyRecord(something=1, somethingElse=[]) obj.somethingElse.append(obj) self.assertEquals( repr(obj), 'MyRecord(something=1, somethingElse=[MyRecord(...)])') def test_threadLocality(self): """ An 'Record' repr()'d in two separate threads at the same time should look the same (i.e. the repr state tracking for '...' should be thread-local). """ pool = ThreadPool(2, 2) pool.start() self.addCleanup(pool.stop) class StickyRepr(object): """ This has a __repr__ which will block until a separate thread notifies it that it should return. We use this to create a race condition. """ waited = False def __init__(self): self.set = threading.Event() self.wait = threading.Event() def __repr__(self): if not self.waited: self.set.set() self.wait.wait() return 'sticky' r = StickyRepr() mr = MyRecord(something=1, somethingElse=r) d = deferToThreadPool(reactor, pool, repr, mr) def otherRepr(): # First we wait for the first thread doing a repr() to enter its # __repr__()... r.set.wait() # OK, now it's blocked. Let's make sure that subsequent calls to # this repr() won't block. r.waited = True # Do it! This is a concurrent repr(). result = repr(mr) # Now we're done, wake up the other repr and let it complete. r.wait.set() return result d2 = deferToThreadPool(reactor, pool, otherRepr) def done((thread1repr, thread2repr)): knownGood = 'MyRecord(something=1, somethingElse=sticky)' # self.assertEquals(thread1repr, thread2repr) self.assertEquals(thread1repr, knownGood) self.assertEquals(thread2repr, knownGood) return gatherResults([d, d2]).addCallback(done) Epsilon-0.7.1/epsilon/test/test_unrepr.py0000644000175000017500000000041612461004115022457 0ustar mithrandimithrandi00000000000000from twisted.trial import unittest from epsilon.unrepr import unrepr class UnreprTestCase(unittest.TestCase): def testSimpleUnrepr(self): data = {'x': [u'bob', (1+2j), []], 10: (1, {}, 'two'), (3, 4): 5L} self.assertEquals(unrepr(repr(data)), data) Epsilon-0.7.1/epsilon/test/test_version.py0000644000175000017500000000112512461004115022627 0ustar mithrandimithrandi00000000000000""" Tests for turning simple version strings into twisted.python.versions.Version objects. """ from epsilon import asTwistedVersion from twisted.trial.unittest import SynchronousTestCase class AsTwistedVersionTests(SynchronousTestCase): def test_simple(self): """ A simple version string can be turned into a Version object. """ version = asTwistedVersion("package", "1.2.3") self.assertEqual(version.package, "package") self.assertEqual(version.major, 1) self.assertEqual(version.minor, 2) self.assertEqual(version.micro, 3) Epsilon-0.7.1/epsilon/test/test_view.py0000644000175000017500000003761012461004115022124 0ustar mithrandimithrandi00000000000000 """ Tests for L{epsilon.view}. """ from operator import getitem from twisted.trial.unittest import TestCase from epsilon.view import SlicedView class SlicedViewTests(TestCase): """ Tests for L{SlicedView} """ def test_outOfBoundsPositiveStart(self): """ Verify that the C{__getitem__} of a L{SlicedView} constructed with a slice with a positive start value greater than the maximum allowed index clips that start value to the end of the underlying sequence. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(3, None)) self.assertRaises(IndexError, getitem, view, 0) def test_outOfBoundsNegativeStart(self): """ Verify that the C{__getitem__} of a L{SlicedView} constructed with a slice with a negative start value greater than the maximum allowed index clips that start value to the beginning of the underlying sequence. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-4, None)) self.assertEqual(view[0], 'a') self.assertEqual(view[1], 'b') self.assertEqual(view[2], 'c') self.assertRaises(IndexError, getitem, view, 3) def test_outOfBoundsPositiveStop(self): """ Verify that the C{__getitem__} of a L{SlicedView} constructed with a slice with a positve stop value greater than the maximum allowed index clips that stop value to the end of the underlying sequence. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None, 4)) self.assertEqual(view[0], 'a') self.assertEqual(view[1], 'b') self.assertEqual(view[2], 'c') self.assertRaises(IndexError, getitem, view, 3) def test_outOfBoundsNegativeStop(self): """ Verify that the C{__getitem__} of a L{SlicedView} constructed with a slice with a negative stop value greater than the maximum allowed index clips that stop value to the beginning of the underlying sequence. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None, -4)) self.assertRaises(IndexError, getitem, view, 0) def test_positiveIndices(self): """ Verify that the C{__getitem__} of a L{SlicedView} constructed with a slice with no start or stop value behaves in the same way as the underlying sequence with respect to indexing with positive values. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None)) self.assertEqual(view[0], 'a') self.assertEqual(view[1], 'b') self.assertEqual(view[2], 'c') self.assertRaises(IndexError, getitem, view, 3) def test_negativeIndices(self): """ Similar to L{test_positiveIndices}, but for negative indices. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None)) self.assertEqual(view[-1], 'c') self.assertEqual(view[-2], 'b') self.assertEqual(view[-3], 'a') self.assertRaises(IndexError, getitem, view, -4) def test_length(self): """ Verify that L{SlicedView.__len__} returns the length of the underlying sequence when the SlicedView is constructed with no start or stop values. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None)) self.assertEqual(len(view), 3) def test_lengthEmptySequence(self): """ Verify that L{SlicedView.__len__} works with empty sequences. """ sequence = [] view = SlicedView([], slice(None)) self.assertEqual(len(view), 0) def test_positiveStartLength(self): """ Similar to L{test_length}, but for a L{SlicedView} constructed with a slice with a positive start value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(1, None)) self.assertEqual(len(view), 2) def test_negativeStartLength(self): """ Similar to L{test_length}, but for a L{SlicedView} constructed with a slice with a negative start value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, None)) self.assertEqual(len(view), 2) def test_positiveStopLength(self): """ Similar to L{test_length}, but for a L{SlicedView} constructed with a slice with a positive stop value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None, 2)) self.assertEqual(len(view), 2) def test_negativeStopLength(self): """ Similar to L{test_length}, but for a L{SlicedView} constructed with a slice with a negative stop value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None, -1)) self.assertEqual(len(view), 2) def test_positiveStartPositiveStopLength(self): """ Similar to L{test_length}, but for a L{SlicedView} constructed with a slice with positive start and stop values. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(1, 2)) self.assertEqual(len(view), 1) def test_positiveStartNegativeStopLength(self): """ Similar to L{test_length}, but for a L{SlicedView} constructed with a slice with a positive start value and a negative stop value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(1, -1)) self.assertEqual(len(view), 1) def test_negativeStartPositiveStopLength(self): """ Similar to L{test_length}, but for a L{SlicedView} constructed with a slice with a negative start value and a positive stop value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, 2)) self.assertEqual(len(view), 1) def test_negativeStartNegativeStopLength(self): """ Similar to L{test_length}, but for a L{SlicedView} constructed with a slice with negative start and stop values. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, -1)) self.assertEqual(len(view), 1) def test_extendedSliceLength(self): """ Verify that L{SlicedView.__len__} reports the correct length when a step is present. """ sequence = ['a', 'b', 'c', 'd', 'e'] view = SlicedView(sequence, slice(1, -1, 2)) self.assertEqual(len(view), 2) def test_positiveStartOnlyPositiveIndices(self): """ Verify that the C{__getitem__} of a L{SlicedView} constructed with a slice with only a positive start value returns elements at the requested index plus the slice's start value for positive requested indices. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(1, None)) self.assertEqual(view[0], 'b') self.assertEqual(view[1], 'c') self.assertRaises(IndexError, getitem, view, 2) def test_positiveStartOnlyNegativeIndices(self): """ Similar to L{test_positiveStartOnlyPositiveIndices}, but cover negative requested indices instead of positive ones. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(1, None)) self.assertEqual(view[-1], 'c') self.assertEqual(view[-2], 'b') self.assertRaises(IndexError, getitem, view, -3) def test_negativeStartOnlyPositiveIndices(self): """ Similar to L{test_positiveStartOnlyPositiveIndices}, but for the case of a negative slice start value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, None)) self.assertEqual(view[0], 'b') self.assertEqual(view[1], 'c') self.assertRaises(IndexError, getitem, view, 2) def test_negativeStartOnlyNegativeIndices(self): """ Similar to L{test_negativeStartOnlyPositiveIndices}, but cover negative requested indices instead of positive ones. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, None)) self.assertEqual(view[-1], 'c') self.assertEqual(view[-2], 'b') self.assertRaises(IndexError, getitem, view, -3) def test_positiveStopOnlyPositiveIndices(self): """ Verify that L{__getitem__} of L{SlicedView} constructed with a slice with a positive stop value returns elements at the requested index for indices less than the stop value and raises IndexError for positive requested indices greater than or equal to the stop value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None, 2)) self.assertEqual(view[0], 'a') self.assertEqual(view[1], 'b') self.assertRaises(IndexError, getitem, view, 2) def test_positveStopOnlyNegativeIndices(self): """ Similar to L{test_positiveStopOnlyPositiveIndices}, but cover negative requested indices instead of positive ones. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None, 2)) self.assertEqual(view[-1], 'b') self.assertEqual(view[-2], 'a') self.assertRaises(IndexError, getitem, view, -3) def test_negativeStopOnlyPositiveIndices(self): """ Similar to L{test_positiveStopOnlyPositiveIndices}, but test a L{SlicedView} created with a slice with a negative stop value. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None, -1)) self.assertEqual(view[0], 'a') self.assertEqual(view[1], 'b') self.assertRaises(IndexError, getitem, view, 2) def test_negativeStopOnlyNegativeIndices(self): """ Similar to L{test_negativeStopOnlyPositiveIndices}, but cover negative requested indices instead of positive ones. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(None, -1)) self.assertEqual(view[-1], 'b') self.assertEqual(view[-2], 'a') self.assertRaises(IndexError, getitem, view, -3) def test_positiveStartPositiveStopPositiveIndices(self): """ Verify that L{__getitem__} of L{SlicedView} constructed with a slice with positive start and stop values returns elements at the requested index plus the slice's start value for positive requested indices less than the difference between the stop and start values and raises IndexError for positive requested indices greater than or equal to that difference. """ sequence = ['a', 'b', 'c', 'd'] view = SlicedView(sequence, slice(1, 3)) self.assertEqual(view[0], 'b') self.assertEqual(view[1], 'c') self.assertRaises(IndexError, getitem, view, 2) def test_positiveStartPositiveStopNegativeIndices(self): """ Similar to L{test_positiveStartPositiveStopPositiveIndices}, but cover negative requested indices instead of positive ones. """ sequence = ['a', 'b', 'c', 'd'] view = SlicedView(sequence, slice(1, 3)) self.assertEqual(view[-1], 'c') self.assertEqual(view[-2], 'b') self.assertRaises(IndexError, getitem, view, -3) def test_positiveStartNegativeStopPositiveIndices(self): """ Verify that L{__getitem__} of a L{SlicedView} constructed with a slice with a positive start and a negative stop value returns elements at the requested index plus the slice's start value for positive requested indices within the bounds defined by the stop value and raises an IndexError for positive requested indices outside those bounds. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(1, -1)) self.assertEqual(view[0], 'b') self.assertRaises(IndexError, getitem, view, 1) def test_positiveStartNegativeStopNegativeIndices(self): """ Similar to L{test_positiveStartNegativeStopPositiveIndices}, but cover negative requested indices instead of positive ones. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(1, -1)) self.assertEqual(view[-1], 'b') self.assertRaises(IndexError, getitem, view, -2) def test_negativeStartPositiveStopPositiveIndices(self): """ Similar to L{test_positiveStartNegativeStopPositiveIndices}, but for a negative slice start and positive slice stop. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, 2)) self.assertEqual(view[0], 'b') self.assertRaises(IndexError, getitem, view, 1) def test_negativeStartPositiveStopNegativeIndices(self): """ Similar to L{test_negativeStartPositiveStopPositiveIndices}, but cover negative requested indices instead of positive ones. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, 2)) self.assertEqual(view[-1], 'b') self.assertRaises(IndexError, getitem, view, -2) def test_negativeStartNegativeStopPositiveIndices(self): """ Similar to L{test_negativeStartPositiveStopPositiveIndices}, but for a negative slice stop. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, -1)) self.assertEqual(view[0], 'b') self.assertRaises(IndexError, getitem, view, 1) def test_negativeStartNegativeStopNegativeIndices(self): """ Similar to L{test_negativeStartPositiveStopPositiveIndices}, but cover negative requested indices instead of positive ones. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(-2, -1)) self.assertEqual(view[-1], 'b') self.assertRaises(IndexError, getitem, view, -2) def test_positiveStepPositiveIndices(self): """ Verify that a positive step produces the correct results, skipping over the appropriate elements. """ sequence = ['a', 'b', 'c', 'd', 'e'] view = SlicedView(sequence, slice(1, -1, 2)) self.assertEqual(view[0], 'b') self.assertEqual(view[1], 'd') self.assertRaises(IndexError, getitem, view, 2) def test_positiveStepNegativeIndices(self): """ Verify that a negative step produces the correct results, skipping over the appropriate elements. """ sequence = ['a', 'b', 'c', 'd', 'e'] view = SlicedView(sequence, slice(1, -1, 2)) self.assertEqual(view[-1], 'd') self.assertEqual(view[-2], 'b') self.assertRaises(IndexError, getitem, view, -3) def test_negativeStepPositiveIndices(self): """ Verify that a negative step produces the correct results, skipping over the appropriate elements. """ sequence = ['a', 'b', 'c', 'd', 'e'] view = SlicedView(sequence, slice(-1, 1, -2)) self.assertEqual(view[0], 'e') self.assertEqual(view[1], 'c') self.assertRaises(IndexError, getitem, view, 2) def test_negativeStepNegativeIndices(self): """ Verify that a negative step produces the correct results, skipping over the appropriate elements. """ sequence = ['a', 'b', 'c', 'd', 'e'] view = SlicedView(sequence, slice(-1, 1, -2)) self.assertEqual(view[-1], 'c') self.assertEqual(view[-2], 'e') self.assertRaises(IndexError, getitem, view, -3) def test_slice(self): """ Verify a L{SlicedView} itself can be sliced. """ sequence = ['a', 'b', 'c'] view = SlicedView(sequence, slice(1, None)) viewView = view[1:] self.assertIdentical(viewView.sequence, view) self.assertEqual(viewView.bounds, slice(1, None, None)) Epsilon-0.7.1/epsilon/test/utils.py0000644000175000017500000000406712461004115021253 0ustar mithrandimithrandi00000000000000 from zope.interface import implements from twisted.internet import interfaces class FileWrapper: """A wrapper around a file-like object to make it behave as a Transport. This doesn't actually stream the file to the attached protocol, and is thus useful mainly as a utility for debugging protocols. """ implements(interfaces.ITransport) closed = 0 disconnecting = 0 disconnected = 0 producer = None streamingProducer = 0 def __init__(self, file): self.file = file def write(self, data): try: self.file.write(data) except: self.handleException() # self._checkProducer() def _checkProducer(self): # Cheating; this is called at "idle" times to allow producers to be # found and dealt with if self.producer: self.producer.resumeProducing() def registerProducer(self, producer, streaming): """From abstract.FileDescriptor """ self.producer = producer self.streamingProducer = streaming if not streaming: producer.resumeProducing() def unregisterProducer(self): self.producer = None def stopConsuming(self): self.unregisterProducer() self.loseConnection() def writeSequence(self, iovec): self.write("".join(iovec)) def loseConnection(self): self.disconnecting = True def getPeer(self): # XXX: According to ITransport, this should return an IAddress! return 'file', 'file' def getHost(self): # XXX: According to ITransport, this should return an IAddress! return 'file' def handleException(self): pass def resumeProducing(self): # Never sends data anyways pass def pauseProducing(self): # Never sends data anyways pass def stopProducing(self): self.loseConnection() # Additional Q2Q Transport requirements def getQ2QPeer(self): from vertex import q2q return q2q.Q2QAddress('file.domain', 'peer.resource') Epsilon-0.7.1/epsilon/__init__.py0000644000175000017500000000064412606176061020703 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test -*- from epsilon._version import get_versions __version__ = get_versions()['version'] del get_versions from twisted.python import versions def asTwistedVersion(packageName, versionString): return versions.Version( packageName, *map(int, versionString.split('+', 1)[0].split("."))) version = asTwistedVersion("epsilon", __version__) __all__ = ['__version__', 'version'] Epsilon-0.7.1/epsilon/ampauth.py0000644000175000017500000002253012461004115020566 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_ampauth -*- # Copyright (c) 2008 Divmod. See LICENSE for details. """ This module provides integration between L{AMP} and L{cred}. """ from hashlib import sha1 from zope.interface import implements from twisted.python.randbytes import secureRandom from twisted.cred.error import UnauthorizedLogin from twisted.cred.credentials import IUsernameHashedPassword, IUsernamePassword from twisted.cred.checkers import ICredentialsChecker from twisted.protocols.amp import IBoxReceiver, String, Command, AMP from twisted.internet.protocol import ServerFactory from epsilon.iepsilon import IOneTimePad from epsilon.structlike import record __metaclass__ = type class UnhandledCredentials(Exception): """ L{login} was passed a credentials object which did not provide a recognized credentials interface. """ class OTPLogin(Command): """ Command to initiate a login attempt where a one-time pad is to be used in place of username/password credentials. """ arguments = [('pad', String())] errors = { # Invalid username or password UnauthorizedLogin: 'UNAUTHORIZED_LOGIN', # No IBoxReceiver avatar NotImplementedError: 'NOT_IMPLEMENTED_ERROR'} class PasswordLogin(Command): """ Command to initiate a username/password-based login attempt. The response to this command is a challenge which must be responded to based on the correct password associated with the username given to this command. """ arguments = [('username', String())] response = [('challenge', String())] def _calcResponse(challenge, nonce, password): """ Compute the response to the given challenge. @type challenge: C{str} @param challenge: An arbitrary byte string, probably received in response to (or generated for) the L{PasswordLogin} command. @type nonce: C{str} @param nonce: An arbitrary byte string, generated by the client to include in the hash to avoid making the client an oracle. @type password: C{str} @param password: The known correct password for the account being authenticated. @rtype: C{str} @return: A hash constructed from the three parameters. """ return sha1('%s %s %s' % (challenge, nonce, password)).digest() class PasswordChallengeResponse(Command): """ Command to respond to a challenge issued in the response to a L{PasswordLogin} command and complete a username/password-based login attempt. @param cnonce: A randomly generated string used only in this response. @param response: The SHA-1 hash of the challenge, cnonce, and password. """ arguments = [('cnonce', String()), ('response', String())] errors = { # Invalid username or password UnauthorizedLogin: 'UNAUTHORIZED_LOGIN', # No IBoxReceiver avatar NotImplementedError: 'NOT_IMPLEMENTED_ERROR'} @classmethod def determineFrom(cls, challenge, password): """ Create a nonce and use it, along with the given challenge and password, to generate the parameters for a response. @return: A C{dict} suitable to be used as the keyword arguments when calling this command. """ nonce = secureRandom(16) response = _calcResponse(challenge, nonce, password) return dict(cnonce=nonce, response=response) class _AMPUsernamePassword(record('username challenge nonce response')): """ L{IUsernameHashedPassword} implementation used by L{PasswordLogin} and related commands. """ implements(IUsernameHashedPassword) def checkPassword(self, password): """ Check the given plaintext password against the response in this credentials object. @type password: C{str} @param password: The known correct password associated with C{self.username}. @return: A C{bool}, C{True} if this credentials object agrees with the given password, C{False} otherwise. """ if isinstance(password, unicode): password = password.encode('utf-8') correctResponse = _calcResponse(self.challenge, self.nonce, password) return correctResponse == self.response class _AMPOneTimePad(record('padValue')): """ L{IOneTimePad} implementation used by L{OTPLogin}. @ivar padValue: The value of the one-time pad. @type padValue: C{str} """ implements(IOneTimePad) class CredReceiver(AMP): """ Integration between AMP and L{twisted.cred}. This implementation is limited to a single authentication per connection. A future implementation may use I{routes} to allow multiple authentications over the same connection. @ivar portal: The L{Portal} against which login will be performed. This is expected to be set by the factory which creates instances of this class. @ivar logout: C{None} or a no-argument callable. This is set to the logout object returned by L{Portal.login} and is set while an avatar is logged in. @ivar challenge: The C{str} which was sent as a challenge in response to the L{PasswordLogin} command. If multiple L{PasswordLogin} commands are sent, this is the challenge sent in response to the most recent of them. It is not set before L{PasswordLogin} is received. @ivar username: The C{str} which was received for the I{username} parameter of the L{PasswordLogin} command. The lifetime is the same as that of the I{challenge} attribute. """ portal = None logout = None @PasswordLogin.responder def passwordLogin(self, username): """ Generate a new challenge for the given username. """ self.challenge = secureRandom(16) self.username = username return {'challenge': self.challenge} def _login(self, credentials): """ Actually login to our portal with the given credentials. """ d = self.portal.login(credentials, None, IBoxReceiver) def cbLoggedIn((interface, avatar, logout)): self.logout = logout self.boxReceiver = avatar self.boxReceiver.startReceivingBoxes(self.boxSender) return {} d.addCallback(cbLoggedIn) return d @PasswordChallengeResponse.responder def passwordChallengeResponse(self, cnonce, response): """ Verify the response to a challenge. """ return self._login(_AMPUsernamePassword( self.username, self.challenge, cnonce, response)) @OTPLogin.responder def otpLogin(self, pad): """ Verify the given pad. """ return self._login(_AMPOneTimePad(pad)) def connectionLost(self, reason): """ If a login has happened, perform a logout. """ AMP.connectionLost(self, reason) if self.logout is not None: self.logout() self.boxReceiver = self.logout = None class OneTimePadChecker(record('pads')): """ Checker which validates one-time pads. @ivar pads: Mapping between valid one-time pads and avatar IDs. @type pads: C{dict} """ implements(ICredentialsChecker) credentialInterfaces = (IOneTimePad,) # ICredentialsChecker def requestAvatarId(self, credentials): if credentials.padValue in self.pads: return self.pads.pop(credentials.padValue) raise UnauthorizedLogin('Unknown one-time pad') class CredAMPServerFactory(ServerFactory): """ Server factory useful for creating L{CredReceiver} instances. This factory takes care of associating a L{Portal} with L{CredReceiver} instances it creates. @ivar portal: The portal which will be used by L{CredReceiver} instances created by this factory. """ protocol = CredReceiver def __init__(self, portal): self.portal = portal def buildProtocol(self, addr): proto = ServerFactory.buildProtocol(self, addr) proto.portal = self.portal return proto def login(client, credentials): """ Authenticate using the given L{AMP} instance. The protocol must be connected to a server with responders for L{PasswordLogin} and L{PasswordChallengeResponse}. @param client: A connected L{AMP} instance which will be used to issue authentication commands. @param credentials: An object providing L{IUsernamePassword} which will be used to authenticate this connection to the server. @return: A L{Deferred} which fires when authentication has succeeded or which fails with L{UnauthorizedLogin} if the server rejects the authentication attempt. """ if not IUsernamePassword.providedBy(credentials): raise UnhandledCredentials() d = client.callRemote( PasswordLogin, username=credentials.username) def cbChallenge(response): args = PasswordChallengeResponse.determineFrom( response['challenge'], credentials.password) d = client.callRemote(PasswordChallengeResponse, **args) return d.addCallback(lambda ignored: client) d.addCallback(cbChallenge) return d __all__ = [ 'UnhandledCredentials', 'OTPLogin', 'OneTimePadChecker', 'PasswordLogin', 'PasswordChallengeResponse', 'CredReceiver', 'CredAMPServerFactory', 'login'] Epsilon-0.7.1/epsilon/amprouter.py0000644000175000017500000001440712461004115021151 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_amprouter -*- # Copyright (c) 2008 Divmod. See LICENSE for details. """ This module provides an implementation of I{Routes}, a system for multiplexing multiple L{IBoxReceiver}/I{IBoxSender} pairs over a single L{AMP} connection. """ from itertools import count from zope.interface import implements from twisted.protocols.amp import IBoxReceiver, IBoxSender from epsilon.structlike import record __metaclass__ = type _ROUTE = '_route' _unspecified = object() class RouteNotConnected(Exception): """ An attempt was made to send AMP boxes through a L{Route} which is not yet connected to anything. """ class Route(record('router receiver localRouteName remoteRouteName', remoteRouteName=_unspecified)): """ Wrap up a route name and a box sender to transparently add the route name to boxes sent through this box sender. @ivar router: The L{Router} which created this route. This will be used for route tear down and for its L{IBoxSender}, to send boxes. @ivar receiver: The receiver which will be started with this object as its sender. @type localRouteName: C{unicode} @ivar localRouteName: The name of this route as known by the other side of the AMP connection. AMP boxes with this route are expected to be routed to this object. @type remoteRouteName: C{unicode} or L{NoneType} @ivar remoteRouteName: The name of the route which will be added to all boxes sent to this sender. If C{None}, no route will be added. """ implements(IBoxSender) def connectTo(self, remoteRouteName): """ Set the name of the route which will be added to outgoing boxes. """ self.remoteRouteName = remoteRouteName # This route must not be started before its router is started. If # sender is None, then the router is not started. When the router is # started, it will start this route. if self.router._sender is not None: self.start() def unbind(self): """ Remove the association between this route and its router. """ del self.router._routes[self.localRouteName] def start(self): """ Associate this object with a receiver as its L{IBoxSender}. """ self.receiver.startReceivingBoxes(self) def stop(self, reason): """ Shut down the underlying receiver. """ self.receiver.stopReceivingBoxes(reason) def sendBox(self, box): """ Add the route and send the box. """ if self.remoteRouteName is _unspecified: raise RouteNotConnected() if self.remoteRouteName is not None: box[_ROUTE] = self.remoteRouteName.encode('ascii') self.router._sender.sendBox(box) def unhandledError(self, failure): """ Pass failures through to the wrapped L{IBoxSender} without modification. """ self.router._sender.unhandledError(failure) class Router: """ An L{IBoxReceiver} implementation which demultiplexes boxes from an AMP connection being used with zero, one, or more routes. @ivar _sender: An L{IBoxSender} provider which is used to allow L{IBoxReceiver}s added to this router to send boxes. @ivar _unstarted: A C{dict} similar to C{_routes} set before C{startReceivingBoxes} is called and containing all routes which have been added but not yet started. These are started and moved to the C{_routes} dict when the router is started. @ivar _routes: A C{dict} mapping local route identifiers to L{IBoxReceivers} associated with them. This is only initialized after C{startReceivingBoxes} is called. @ivar _routeCounter: A L{itertools.count} instance used to generate unique identifiers for routes in this router. """ implements(IBoxReceiver) _routes = None _sender = None def __init__(self): self._routeCounter = count() self._unstarted = {} def createRouteIdentifier(self): """ Return a route identifier which is not yet associated with a route on this dispatcher. @rtype: C{unicode} """ return unicode(self._routeCounter.next()) def bindRoute(self, receiver, routeName=_unspecified): """ Create a new route to associate the given route name with the given receiver. @type routeName: C{unicode} or L{NoneType} @param routeName: The identifier for the newly created route. If C{None}, boxes with no route in them will be delivered to this receiver. @rtype: L{Route} """ if routeName is _unspecified: routeName = self.createRouteIdentifier() # self._sender may yet be None; if so, this route goes into _unstarted # and will have its sender set correctly in startReceivingBoxes below. route = Route(self, receiver, routeName) mapping = self._routes if mapping is None: mapping = self._unstarted mapping[routeName] = route return route def startReceivingBoxes(self, sender): """ Initialize route tracking objects. """ self._sender = sender for routeName, route in self._unstarted.iteritems(): # Any route which has been bound but which does not yet have a # remote route name should not yet be started. These will be # started in Route.connectTo. if route.remoteRouteName is not _unspecified: route.start() self._routes = self._unstarted self._unstarted = None def ampBoxReceived(self, box): """ Dispatch the given box to the L{IBoxReceiver} associated with the route indicated by the box, or handle it directly if there is no route. """ route = box.pop(_ROUTE, None) self._routes[route].receiver.ampBoxReceived(box) def stopReceivingBoxes(self, reason): """ Stop all the L{IBoxReceiver}s which have been added to this router. """ for routeName, route in self._routes.iteritems(): route.stop(reason) self._routes = None __all__ = ['Router', 'Route'] Epsilon-0.7.1/epsilon/asplode.py0000644000175000017500000000156112461004115020557 0ustar mithrandimithrandi00000000000000 import sys, os from datetime import date def status(x): sys.stderr.write(x+'\n') sys.stderr.flush() def splode(linerator, proj, capproj): current = None for line in linerator: line = line.replace('_project_', proj) line = line.replace('_Project_', capproj) line = line.replace('_date_', str(date.today())) ls = line.split("###file:") if len(ls) > 1: fname = ls[1].strip() if current is not None: current.close() try: os.makedirs(os.path.dirname(fname)) except: pass current = file(fname, 'wb') status('Created: ' + fname) else: current.write(line) current.close() def main(argv): splode(sys.stdin.readlines(), 'zoop', 'Zoop') if __name__ == '__main__': main(sys.argv) Epsilon-0.7.1/epsilon/caseless.py0000644000175000017500000000652412461004115020736 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_caseless -*- """ Helpers for case-insensitive string handling. """ class Caseless(object): """ Case-insensitive string wrapper type. This wrapper is intended for use with strings that have case-insensitive semantics, such as HTTP/MIME header values. It implements comparison-based operations case-insensitively, avoiding the need to manually call C{lower} where appropriate, or keep track of which strings are case-insensitive throughout various function calls. Example usage: >>> Caseless('Spam') == Caseless('spam') True >>> 'spam' in Caseless('Eggs and Spam') True >>> sorted(['FOO', 'bar'], key=Caseless) ['bar', 'FOO'] >>> d = {Caseless('Content-type'): Caseless('Text/Plain')} >>> d[Caseless('Content-Type')].startswith('text/') True Note: String methods that return modified strings (such as C{decode}/C{encode}, C{join}, C{partition}, C{replace}, C{strip}/C{split}) don't have an unambiguous return types with regards to case sensitivity, so they are not implemented by L{Caseless}. They should be accessed on the underlying cased string instead. (Excepted are methods like C{lower}/C{upper}, whose return case is unambiguous.) @ivar cased: the wrapped string-like object """ def __init__(self, cased): if isinstance(cased, Caseless): cased = cased.cased self.cased = cased def __repr__(self): return '%s(%r)' % (type(self).__name__, self.cased) # Methods delegated to cased def __str__(self): return str(self.cased) def __unicode__(self): return unicode(self.cased) def __len__(self): return len(self.cased) def __getitem__(self, key): return self.cased[key] def __iter__(self): return iter(self.cased) def lower(self): return self.cased.lower() def upper(self): return self.cased.upper() def title(self): return self.cased.title() def swapcase(self): return self.cased.swapcase() # Methods delegated to lower() def __cmp__(self, other): return cmp(self.lower(), other.lower()) def __hash__(self): return hash(self.lower()) def __contains__(self, substring): return substring.lower() in self.lower() def startswith(self, prefix, *rest): if isinstance(prefix, tuple): lprefix = tuple(s.lower() for s in prefix) else: lprefix = prefix.lower() return self.lower().startswith(lprefix, *rest) def endswith(self, suffix, *rest): if isinstance(suffix, tuple): lsuffix = tuple(s.lower() for s in suffix) else: lsuffix = suffix.lower() return self.lower().endswith(lsuffix, *rest) def count(self, substring, *rest): return self.lower().count(substring.lower(), *rest) def find(self, substring, *rest): return self.lower().find(substring.lower(), *rest) def index(self, substring, *rest): return self.lower().index(substring.lower(), *rest) def rfind(self, substring, *rest): return self.lower().rfind(substring.lower(), *rest) def rindex(self, substring, *rest): return self.lower().rindex(substring.lower(), *rest) Epsilon-0.7.1/epsilon/cooperator.py0000644000175000017500000000135312461004115021304 0ustar mithrandimithrandi00000000000000 from twisted.application.service import Service from twisted.internet.task import SchedulerStopped, Cooperator, coiterate def iterateInReactor(i, delay=None): """ Cooperatively iterate over the given iterator. @see: L{twisted.internet.task.coiterate}. """ return coiterate(i) class SchedulingService(Service): """ Simple L{IService} implementation. """ def __init__(self): self.coop = Cooperator(started=False) def addIterator(self, iterator): return self.coop.coiterate(iterator) def startService(self): self.coop.start() def stopService(self): self.coop.stop() __all__ = [ 'SchedulerStopped', 'Cooperator', 'SchedulingService', 'iterateInReactor'] Epsilon-0.7.1/epsilon/descriptor.py0000644000175000017500000001075012461004115021306 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_descriptor -*- """ Provides an 'attribute' class for one-use descriptors. """ attribute = None class _MetaAttribute(type): def __new__(meta, name, bases, dict): # for reals, yo. for kw in ['get', 'set', 'delete']: if kw in dict: dict[kw] = staticmethod(dict[kw]) secretClass = type.__new__(meta, name, bases, dict) if attribute is None: return secretClass return secretClass() class attribute(object): """ Convenience class for providing one-shot descriptors, similar to 'property'. For example: >>> from epsilon.descriptor import attribute >>> class Dynamo(object): ... class dynamic(attribute): ... def get(self): ... self.dynCount += 1 ... return self.dynCount ... def set(self, value): ... self.dynCount += value ... dynCount = 0 ... >>> d = Dynamo() >>> d.dynamic 1 >>> d.dynamic 2 >>> d.dynamic = 6 >>> d.dynamic 9 >>> d.dynamic 10 >>> del d.dynamic Traceback (most recent call last): ... AttributeError: attribute cannot be removed """ __metaclass__ = _MetaAttribute def __get__(self, oself, type): """ Private implementation of descriptor interface. """ if oself is None: return self return self.get(oself) def __set__(self, oself, value): """ Private implementation of descriptor interface. """ return self.set(oself, value) def __delete__(self, oself): """ Private implementation of descriptor interface. """ return self.delete(oself) def set(self, value): """ Implement this method to provide attribute setting. Default behavior is that attributes are not settable. """ raise AttributeError('read only attribute') def get(self): """ Implement this method to provide attribute retrieval. Default behavior is that unset attributes do not have any value. """ raise AttributeError('attribute has no value') def delete(self): """ Implement this method to provide attribute deletion. Default behavior is that attributes cannot be deleted. """ raise AttributeError('attribute cannot be removed') def requiredAttribute(requiredAttributeName): """ Utility for defining attributes on base classes/mixins which require their values to be supplied by their derived classes. C{None} is a common, but almost never suitable default value for these kinds of attributes, as it may cause operations in the derived class to fail silently in peculiar ways. If a C{requiredAttribute} is accessed before having its value changed, a C{AttributeError} will be raised with a helpful error message. @param requiredAttributeName: The name of the required attribute. @type requiredAttributeName: C{str} Example: >>> from epsilon.descriptor import requiredAttribute ... >>> class FooTestMixin: ... expectedResult = requiredAttribute('expectedResult') ... >>> class BrokenFooTestCase(TestCase, FooTestMixin): ... pass ... >>> brokenFoo = BrokenFooTestCase() >>> print brokenFoo.expectedResult Traceback (most recent call last): ... AttributeError: Required attribute 'expectedResult' has not been changed from its default value on ''. ... >>> class WorkingFooTestCase(TestCase, FooTestMixin): ... expectedResult = 7 ... >>> workingFoo = WorkingFooTestCase() >>> print workingFoo.expectedResult ... 7 >>> """ class RequiredAttribute(attribute): def get(self): if requiredAttributeName not in self.__dict__: raise AttributeError( ('Required attribute %r has not been changed' ' from its default value on %r' % ( requiredAttributeName, self))) return self.__dict__[requiredAttributeName] def set(self, value): self.__dict__[requiredAttributeName] = value return RequiredAttribute __all__ = ['attribute', 'requiredAttribute'] Epsilon-0.7.1/epsilon/expose.py0000644000175000017500000001055412461004115020435 0ustar mithrandimithrandi00000000000000# Copright 2008 Divmod, Inc. See LICENSE file for details. # -*- test-case-name: epsilon.test.test_expose -*- """ This module provides L{Exposer}, a utility for creating decorators that expose methods on types for a particular purpose. The typical usage of this module is for an infrastructure layer (usually one that allows methods to be invoked from the network, directly or indirectly) to provide an explicit API for exposing those methods securely. For example, a sketch of a finger protocol implementation which could use this to expose the results of certain methods as finger results:: # tx_finger.py fingermethod = Exposer("This object exposes finger methods.") ... class FingerProtocol(Protocol): def __init__(self, fingerModel): self.model = fingerModel ... def fingerQuestionReceived(self, whichUser): try: method = fingermethod.get(self.model, whichUser) except MethodNotExposed: method = lambda : "Unknown user" return method() # myfingerserver.py from tx_finger import fingermethod ... class MyFingerModel(object): @fingermethod.expose("bob") def someMethod(self): return "Bob is great." Assuming lots of protocol code to hook everything together, this would then allow you to use MyFingerModel and 'finger bob' to get the message 'Bob is great.' """ import inspect from types import FunctionType class MethodNotExposed(Exception): """ The requested method was not exposed for the purpose requested. More specifically, L{Exposer.get} was used to retrieve a key from an object which does not expose that key with that exposer. """ class NameRequired(Exception): """ L{Exposer.expose} was used to decorate a non-function object without having a key explicitly specified. """ class Exposer(object): """ This is an object that can expose and retrieve methods on classes. @ivar _exposed: a dict mapping exposed keys to exposed function objects. """ def __init__(self, doc): """ Create an exposer. """ self.__doc__ = doc self._exposed = {} def expose(self, key=None): """ Expose the decorated method for this L{Exposer} with the given key. A method which is exposed will be able to be retrieved by this L{Exposer}'s C{get} method with that key. If no key is provided, the key is the method name of the exposed method. Use like so:: class MyClass: @someExposer.expose() def foo(): ... or:: class MyClass: @someExposer.expose('foo') def unrelatedMethodName(): ... @param key: a hashable object, used by L{Exposer.get} to look up the decorated method later. If None, the key is the exposed method's name. @return: a 1-argument callable which records its input as exposed, then returns it. """ def decorator(function): rkey = key if rkey is None: if isinstance(function, FunctionType): rkey = function.__name__ else: raise NameRequired() if rkey not in self._exposed: self._exposed[rkey] = [] self._exposed[rkey].append(function) return function return decorator def get(self, obj, key): """ Retrieve 'key' from an instance of a class which previously exposed it. @param key: a hashable object, previously passed to L{Exposer.expose}. @return: the object which was exposed with the given name on obj's key. @raise MethodNotExposed: when the key in question was not exposed with this exposer. """ if key not in self._exposed: raise MethodNotExposed() rightFuncs = self._exposed[key] T = obj.__class__ seen = {} for subT in inspect.getmro(T): for name, value in subT.__dict__.items(): for rightFunc in rightFuncs: if value is rightFunc: if name in seen: raise MethodNotExposed() return value.__get__(obj, T) seen[name] = True raise MethodNotExposed() Epsilon-0.7.1/epsilon/extime.py0000644000175000017500000011011112604672606020432 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_extime -*- """ Extended date/time formatting and miscellaneous functionality. See the class 'Time' for details. """ import datetime import re from email.Utils import formatdate, parsedate_tz _EPOCH = datetime.datetime.utcfromtimestamp(0) class InvalidPrecision(Exception): """ L{Time.asHumanly} was passed an invalid precision value. """ def sanitizeStructTime(struct): """ Convert struct_time tuples with possibly invalid values to valid ones by substituting the closest valid value. """ maxValues = (9999, 12, 31, 23, 59, 59) minValues = (1, 1, 1, 0, 0, 0) newstruct = [] for value, maxValue, minValue in zip(struct[:6], maxValues, minValues): newstruct.append(max(minValue, min(value, maxValue))) return tuple(newstruct) + struct[6:] def _timedeltaToSignHrMin(offset): """ Return a (sign, hour, minute) triple for the offset described by timedelta. sign is a string, either "+" or "-". In the case of 0 offset, sign is "+". """ minutes = round((offset.days * 3600000000 * 24 + offset.seconds * 1000000 + offset.microseconds) / 60000000.0) if minutes < 0: sign = '-' minutes = -minutes else: sign = '+' return (sign, minutes // 60, minutes % 60) def _timedeltaToSeconds(offset): """ Convert a datetime.timedelta instance to simply a number of seconds. For example, you can specify purely second intervals with timedelta's constructor: >>> td = datetime.timedelta(seconds=99999999) but then you can't get them out again: >>> td.seconds 35199 This allows you to: >>> import epsilon.extime >>> epsilon.extime._timedeltaToSeconds(td) 99999999.0 @param offset: a L{datetime.timedelta} representing an interval that we wish to know the total number of seconds for. @return: a number of seconds @rtype: float """ return ((offset.days * 60*60*24) + (offset.seconds) + (offset.microseconds * 1e-6)) class FixedOffset(datetime.tzinfo): _zeroOffset = datetime.timedelta() def __init__(self, hours, minutes): self.offset = datetime.timedelta(minutes = hours * 60 + minutes) def utcoffset(self, dt): return self.offset def tzname(self, dt): return _timedeltaToSignHrMin(self.offset) def dst(self, tz): return self._zeroOffset def __repr__(self): return '<%s.%s object at 0x%x offset %r>' % ( self.__module__, type(self).__name__, id(self), self.offset) class Time(object): """An object representing a well defined instant in time. A Time object unambiguously addresses some time, independent of timezones, contorted base-60 counting schemes, leap seconds, and the effects of general relativity. It provides methods for returning a representation of this time in various ways that a human or a programmer might find more useful in various applications. Every Time instance has an attribute 'resolution'. This can be ignored, or the instance can be considered to address a span of time. This resolution is determined by the value used to initalize the instance, or the resolution of the internal representation, whichever is greater. It is mostly useful when using input formats that allow the specification of whole days or weeks. For example, ISO 8601 allows one to state a time as, "2005-W03", meaning "the third week of 2005". In this case the resolution is set to one week. Other formats are considered to express only an instant in time, such as a POSIX timestamp, because the resolution of the time is limited only by the hardware's representation of a real number. Timezones are significant only for instances with a resolution greater than one day. When the timezone is insignificant, the result of methods like asISO8601TimeAndDate is the same for any given tzinfo parameter. Sort order is determined by the start of the period in UTC. For example, "today" sorts after "midnight today, central Europe", and before "midnight today, US Eastern". For applications that need to store a mix of timezone dependent and independent instances, it may be wise to store them separately, since the time between the start and end of today in the local timezone may not include the start of today in UTC, and thus not independent instances addressing the whole day. In other words, the desired sort order (the one where just "Monday" sorts before any more precise time in "Monday", and after any in "Sunday") of Time instances is dependant on the timezone context. Date arithmetic and boolean operations operate on instants in time, not periods. In this case, the start of the period is used as the value, and the result has a resolution of 0. For containment tests with the 'in' operator, the period addressed by the instance is used. The methods beginning with 'from' are constructors to create instances from various formats. Some of them are textual formats, and others are other time types commonly found in Python code. Likewise, methods beginning with 'as' return the represented time in various formats. Some of these methods should try to reflect the resolution of the instance. However, they don't yet. For formats with both a constructor and a formatter, d == fromFu(d.asFu()) @type resolution: datetime.timedelta @ivar resolution: the length of the period to which this instance could refer. For example, "Today, 13:38" could refer to any time between 13:38 until but not including 13:39. In this case resolution would be timedelta(minutes=1). """ # the instance variable _time is the internal representation of time. It # is a naive datetime object which is always UTC. A UTC tzinfo would be # great, if one existed, and anyway it complicates pickling. class Precision(object): MINUTES = object() SECONDS = object() _timeFormat = { Precision.MINUTES: '%I:%M %p', Precision.SECONDS: '%I:%M:%S %p'} rfc2822Weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] rfc2822Months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] resolution = datetime.timedelta.resolution # # Methods to create new instances # def __init__(self): """Return a new Time instance representing the time now. See also the fromFu methods to create new instances from other types of initializers. """ self._time = datetime.datetime.utcnow() def _fromWeekday(klass, match, tzinfo, now): weekday = klass.weekdays.index(match.group('weekday').lower()) dtnow = now.asDatetime().replace( hour=0, minute=0, second=0, microsecond=0) daysInFuture = (weekday - dtnow.weekday()) % len(klass.weekdays) if daysInFuture == 0: daysInFuture = 7 self = klass.fromDatetime(dtnow + datetime.timedelta(days=daysInFuture)) assert self.asDatetime().weekday() == weekday self.resolution = datetime.timedelta(days=1) return self def _fromTodayOrTomorrow(klass, match, tzinfo, now): dtnow = now.asDatetime().replace( hour=0, minute=0, second=0, microsecond=0) when = match.group(0).lower() if when == 'tomorrow': dtnow += datetime.timedelta(days=1) elif when == 'yesterday': dtnow -= datetime.timedelta(days=1) else: assert when == 'today' self = klass.fromDatetime(dtnow) self.resolution = datetime.timedelta(days=1) return self def _fromTime(klass, match, tzinfo, now): minute = int(match.group('minute')) hour = int(match.group('hour')) ampm = (match.group('ampm') or '').lower() if ampm: if not 1 <= hour <= 12: raise ValueError, 'hour %i is not in 1..12' % (hour,) if hour == 12 and ampm == 'am': hour = 0 elif ampm == 'pm': hour += 12 if not 0 <= hour <= 23: raise ValueError, 'hour %i is not in 0..23' % (hour,) dtnow = now.asDatetime(tzinfo).replace(second=0, microsecond=0) dtthen = dtnow.replace(hour=hour, minute=minute) if dtthen < dtnow: dtthen += datetime.timedelta(days=1) self = klass.fromDatetime(dtthen) self.resolution = datetime.timedelta(minutes=1) return self def _fromNoonOrMidnight(klass, match, tzinfo, now): when = match.group(0).lower() if when == 'noon': hour = 12 else: assert when == 'midnight' hour = 0 dtnow = now.asDatetime(tzinfo).replace( minute=0, second=0, microsecond=0) dtthen = dtnow.replace(hour=hour) if dtthen < dtnow: dtthen += datetime.timedelta(days=1) self = klass.fromDatetime(dtthen) self.resolution = datetime.timedelta(minutes=1) return self def _fromNow(klass, match, tzinfo, now): # coerce our 'now' argument to an instant return now + datetime.timedelta(0) weekdays = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday'] humanlyPatterns = [ (re.compile(r""" \b ((next|this)\s+)? (?P monday | tuesday | wednesday | thursday | friday | saturday | sunday ) \b """, re.IGNORECASE | re.VERBOSE), _fromWeekday), (re.compile(r"\b(today|tomorrow|yesterday)\b", re.IGNORECASE), _fromTodayOrTomorrow), (re.compile(r""" \b (?P\d{1,2}):(?P\d{2}) (\s*(?Pam|pm))? \b """, re.IGNORECASE | re.VERBOSE), _fromTime), (re.compile(r"\b(noon|midnight)\b", re.IGNORECASE), _fromNoonOrMidnight), (re.compile(r"\b(now)\b", re.IGNORECASE), _fromNow), ] _fromWeekday = classmethod(_fromWeekday) _fromTodayOrTomorrow = classmethod(_fromTodayOrTomorrow) _fromTime = classmethod(_fromTime) _fromNoonOrMidnight = classmethod(_fromNoonOrMidnight) _fromNow = classmethod(_fromNow) def fromHumanly(klass, humanStr, tzinfo=None, now=None): """Return a new Time instance from a string a human might type. @param humanStr: the string to be parsed. @param tzinfo: A tzinfo instance indicating the timezone to assume if none is specified in humanStr. If None, assume UTC. @param now: A Time instance to be considered "now" for when interpreting relative dates like "tomorrow". If None, use the real now. Total crap now, it just supports weekdays, "today" and "tomorrow" for now. This is pretty insufficient and useless, but good enough for some demo functionality, or something. """ humanStr = humanStr.strip() if now is None: now = Time() if tzinfo is None: tzinfo = FixedOffset(0, 0) for pattern, creator in klass.humanlyPatterns: match = pattern.match(humanStr) if not match \ or match.span()[1] != len(humanStr): continue try: return creator(klass, match, tzinfo, now) except ValueError: continue raise ValueError, 'could not parse date: %r' % (humanStr,) fromHumanly = classmethod(fromHumanly) iso8601pattern = re.compile(r""" ^ (?P \d{4}) ( # a year may optionally be followed by one of: # - a month # - a week # - a specific day, and an optional time # a specific day is one of: # - a month and day # - week and weekday # - a day of the year ( -? (?P \d{2}) | -? W (?P \d{2}) | ( -? (?P \d{2}) -? (?P \d{2}) | -? W (?P \d{2}) -? (?P \d) | -? (?P \d{3}) ) ( T (?P \d{2}) ( :? (?P \d{2}) ( :? (?P \d{2}) ( [\.,] (?P \d+) )? )? )? ( (?P Z) | (?P [+\-]\d{2}) ( :? (?P \d{2}) )? )? )? )? )? $""", re.VERBOSE) def fromISO8601TimeAndDate(klass, iso8601string, tzinfo=None): """Return a new Time instance from a string formated as in ISO 8601. If the given string contains no timezone, it is assumed to be in the timezone specified by the parameter `tzinfo`, or UTC if tzinfo is None. An input string with an explicit timezone will always override tzinfo. If the given iso8601string does not contain all parts of the time, they will default to 0 in the timezone given by `tzinfo`. WARNING: this function is incomplete. ISO is dumb and their standards are not free. Only a subset of all valid ISO 8601 dates are parsed, because I can't find a formal description of the format. However, common ones should work. """ def calculateTimezone(): if groups['zulu'] == 'Z': return FixedOffset(0, 0) else: tzhour = groups.pop('tzhour') tzmin = groups.pop('tzmin') if tzhour is not None: return FixedOffset(int(tzhour), int(tzmin or 0)) return tzinfo or FixedOffset(0, 0) def coerceGroups(): groups['month'] = groups['month1'] or groups['month2'] groups['week'] = groups['week1'] or groups['week2'] # don't include fractional seconds, because it's not an integer. defaultTo0 = ['hour', 'minute', 'second'] defaultTo1 = ['month', 'day', 'week', 'weekday', 'dayofyear'] if groups['fractionalsec'] is None: groups['fractionalsec'] = '0' for key in defaultTo0: if groups[key] is None: groups[key] = 0 for key in defaultTo1: if groups[key] is None: groups[key] = 1 groups['fractionalsec'] = float('.'+groups['fractionalsec']) for key in defaultTo0 + defaultTo1 + ['year']: groups[key] = int(groups[key]) for group, min, max in [ # some years have only 52 weeks ('week', 1, 53), ('weekday', 1, 7), ('month', 1, 12), ('day', 1, 31), ('hour', 0, 24), ('minute', 0, 59), # Sometime in the 22nd century AD, two leap seconds will be # required every year. In the 25th century AD, four every # year. We'll ignore that for now though because it would be # tricky to get right and we certainly don't need it for our # target applications. In other words, post-singularity # Martian users, please do not rely on this code for # compatibility with Greater Galactic Protectorate of Earth # date/time formatting! Apologies, but no library I know of in # Python is sufficient for processing their dates and times # without ADA bindings to get the radiation-safety zone counter # correct. -glyph ('second', 0, 61), # don't forget leap years ('dayofyear', 1, 366)]: if not min <= groups[group] <= max: raise ValueError, '%s must be in %i..%i' % (group, min, max) def determineResolution(): if match.group('fractionalsec') is not None: return max(datetime.timedelta.resolution, datetime.timedelta( microseconds=1 * 10 ** -len( match.group('fractionalsec')) * 1000000)) for testGroup, resolution in [ ('second', datetime.timedelta(seconds=1)), ('minute', datetime.timedelta(minutes=1)), ('hour', datetime.timedelta(hours=1)), ('weekday', datetime.timedelta(days=1)), ('dayofyear', datetime.timedelta(days=1)), ('day', datetime.timedelta(days=1)), ('week1', datetime.timedelta(weeks=1)), ('week2', datetime.timedelta(weeks=1))]: if match.group(testGroup) is not None: return resolution if match.group('month1') is not None \ or match.group('month2') is not None: if self._time.month == 12: return datetime.timedelta(days=31) nextMonth = self._time.replace(month=self._time.month+1) return nextMonth - self._time else: nextYear = self._time.replace(year=self._time.year+1) return nextYear - self._time def calculateDtime(tzinfo): """Calculate a datetime for the start of the addressed period.""" if match.group('week1') is not None \ or match.group('week2') is not None: if not 0 < groups['week'] <= 53: raise ValueError( 'week must be in 1..53 (was %i)' % (groups['week'],)) dtime = datetime.datetime( groups['year'], 1, 4, groups['hour'], groups['minute'], groups['second'], int(round(groups['fractionalsec'] * 1000000)), tzinfo=tzinfo ) dtime -= datetime.timedelta(days = dtime.weekday()) dtime += datetime.timedelta( days = (groups['week']-1) * 7 + groups['weekday'] - 1) if dtime.isocalendar() != ( groups['year'], groups['week'], groups['weekday']): # actually the problem could be an error in my logic, but # nothing should cause this but requesting week 53 of a # year with 52 weeks. raise ValueError('year %04i has no week %02i' % (groups['year'], groups['week'])) return dtime if match.group('dayofyear') is not None: dtime = datetime.datetime( groups['year'], 1, 1, groups['hour'], groups['minute'], groups['second'], int(round(groups['fractionalsec'] * 1000000)), tzinfo=tzinfo ) dtime += datetime.timedelta(days=groups['dayofyear']-1) if dtime.year != groups['year']: raise ValueError( 'year %04i has no day of year %03i' % (groups['year'], groups['dayofyear'])) return dtime else: return datetime.datetime( groups['year'], groups['month'], groups['day'], groups['hour'], groups['minute'], groups['second'], int(round(groups['fractionalsec'] * 1000000)), tzinfo=tzinfo ) match = klass.iso8601pattern.match(iso8601string) if match is None: raise ValueError( '%r could not be parsed as an ISO 8601 date and time' % (iso8601string,)) groups = match.groupdict() coerceGroups() if match.group('hour') is not None: timezone = calculateTimezone() else: timezone = None self = klass.fromDatetime(calculateDtime(timezone)) self.resolution = determineResolution() return self fromISO8601TimeAndDate = classmethod(fromISO8601TimeAndDate) def fromStructTime(klass, structTime, tzinfo=None): """Return a new Time instance from a time.struct_time. If tzinfo is None, structTime is in UTC. Otherwise, tzinfo is a datetime.tzinfo instance coresponding to the timezone in which structTime is. Many of the functions in the standard time module return these things. This will also work with a plain 9-tuple, for parity with the time module. The last three elements, or tm_wday, tm_yday, and tm_isdst are ignored. """ dtime = datetime.datetime(tzinfo=tzinfo, *structTime[:6]) self = klass.fromDatetime(dtime) self.resolution = datetime.timedelta(seconds=1) return self fromStructTime = classmethod(fromStructTime) def fromDatetime(klass, dtime): """Return a new Time instance from a datetime.datetime instance. If the datetime instance does not have an associated timezone, it is assumed to be UTC. """ self = klass.__new__(klass) if dtime.tzinfo is not None: self._time = dtime.astimezone(FixedOffset(0, 0)).replace(tzinfo=None) else: self._time = dtime self.resolution = datetime.timedelta.resolution return self fromDatetime = classmethod(fromDatetime) def fromPOSIXTimestamp(klass, secs): """Return a new Time instance from seconds since the POSIX epoch. The POSIX epoch is midnight Jan 1, 1970 UTC. According to POSIX, leap seconds don't exist, so one UTC day is exactly 86400 seconds, even if it wasn't. @param secs: a number of seconds, represented as an integer, long or float. """ self = klass.fromDatetime(_EPOCH + datetime.timedelta(seconds=secs)) self.resolution = datetime.timedelta() return self fromPOSIXTimestamp = classmethod(fromPOSIXTimestamp) def fromRFC2822(klass, rfc822string): """ Return a new Time instance from a string formated as described in RFC 2822. @type rfc822string: str @raise ValueError: if the timestamp is not formatted properly (or if certain obsoleted elements of the specification are used). @return: a new L{Time} """ # parsedate_tz is going to give us a "struct_time plus", a 10-tuple # containing the 9 values a struct_time would, i.e.: (tm_year, tm_mon, # tm_day, tm_hour, tm_min, tm_sec, tm_wday, tm_yday, tm_isdst), plus a # bonus "offset", which is an offset (in _seconds_, of all things). maybeStructTimePlus = parsedate_tz(rfc822string) if maybeStructTimePlus is None: raise ValueError, 'could not parse RFC 2822 date %r' % (rfc822string,) structTimePlus = sanitizeStructTime(maybeStructTimePlus) offsetInSeconds = structTimePlus[-1] if offsetInSeconds is None: offsetInSeconds = 0 self = klass.fromStructTime( structTimePlus, FixedOffset( hours=0, minutes=offsetInSeconds // 60)) self.resolution = datetime.timedelta(seconds=1) return self fromRFC2822 = classmethod(fromRFC2822) # # Methods to produce various formats # def asPOSIXTimestamp(self): """Return this time as a timestamp as specified by POSIX. This timestamp is the count of the number of seconds since Midnight, Jan 1 1970 UTC, ignoring leap seconds. """ mytimedelta = self._time - _EPOCH return _timedeltaToSeconds(mytimedelta) def asDatetime(self, tzinfo=None): """Return this time as an aware datetime.datetime instance. The returned datetime object has the specified tzinfo, or a tzinfo describing UTC if the tzinfo parameter is None. """ if tzinfo is None: tzinfo = FixedOffset(0, 0) if not self.isTimezoneDependent(): return self._time.replace(tzinfo=tzinfo) else: return self._time.replace(tzinfo=FixedOffset(0, 0)).astimezone(tzinfo) def asNaiveDatetime(self, tzinfo=None): """Return this time as a naive datetime.datetime instance. The returned datetime object has its tzinfo set to None, but is in the timezone given by the tzinfo parameter, or UTC if the parameter is None. """ return self.asDatetime(tzinfo).replace(tzinfo=None) def asRFC2822(self, tzinfo=None, includeDayOfWeek=True): """Return this Time formatted as specified in RFC 2822. RFC 2822 specifies the format of email messages. RFC 2822 says times in email addresses should reflect the local timezone. If tzinfo is a datetime.tzinfo instance, the returned formatted string will reflect that timezone. Otherwise, the timezone will be '-0000', which RFC 2822 defines as UTC, but with an unknown local timezone. RFC 2822 states that the weekday is optional. The parameter includeDayOfWeek indicates whether or not to include it. """ dtime = self.asDatetime(tzinfo) if tzinfo is None: rfcoffset = '-0000' else: rfcoffset = '%s%02i%02i' % _timedeltaToSignHrMin(dtime.utcoffset()) rfcstring = '' if includeDayOfWeek: rfcstring += self.rfc2822Weekdays[dtime.weekday()] + ', ' rfcstring += '%i %s %4i %02i:%02i:%02i %s' % ( dtime.day, self.rfc2822Months[dtime.month - 1], dtime.year, dtime.hour, dtime.minute, dtime.second, rfcoffset) return rfcstring def asRFC1123(self): """ Return the time formatted as specified in RFC 1123. Useful when setting the max-age value of an HTTP cookie, which requires the timezone be represented as the string 'GMT', rather than an offset, e.g., '-0000' """ return formatdate(self.asPOSIXTimestamp(), False, True) def asISO8601TimeAndDate(self, includeDelimiters=True, tzinfo=None, includeTimezone=True): """Return this time formatted as specified by ISO 8861. ISO 8601 allows optional dashes to delimit dates and colons to delimit times. The parameter includeDelimiters (default True) defines the inclusion of these delimiters in the output. If tzinfo is a datetime.tzinfo instance, the output time will be in the timezone given. If it is None (the default), then the timezone string will not be included in the output, and the time will be in UTC. The includeTimezone parameter coresponds to the inclusion of an explicit timezone. The default is True. """ if not self.isTimezoneDependent(): tzinfo = None dtime = self.asDatetime(tzinfo) if includeDelimiters: dateSep = '-' timeSep = ':' else: dateSep = timeSep = '' if includeTimezone: if tzinfo is None: timezone = '+00%s00' % (timeSep,) else: sign, hour, min = _timedeltaToSignHrMin(dtime.utcoffset()) timezone = '%s%02i%s%02i' % (sign, hour, timeSep, min) else: timezone = '' microsecond = ('%06i' % (dtime.microsecond,)).rstrip('0') if microsecond: microsecond = '.' + microsecond parts = [ ('%04i' % (dtime.year,), datetime.timedelta(days=366)), ('%s%02i' % (dateSep, dtime.month), datetime.timedelta(days=31)), ('%s%02i' % (dateSep, dtime.day), datetime.timedelta(days=1)), ('T', datetime.timedelta(hours=1)), ('%02i' % (dtime.hour,), datetime.timedelta(hours=1)), ('%s%02i' % (timeSep, dtime.minute), datetime.timedelta(minutes=1)), ('%s%02i' % (timeSep, dtime.second), datetime.timedelta(seconds=1)), (microsecond, datetime.timedelta(microseconds=1)), (timezone, datetime.timedelta(hours=1)) ] formatted = '' for part, minResolution in parts: if self.resolution <= minResolution: formatted += part return formatted def asStructTime(self, tzinfo=None): """Return this time represented as a time.struct_time. tzinfo is a datetime.tzinfo instance coresponding to the desired timezone of the output. If is is the default None, UTC is assumed. """ dtime = self.asDatetime(tzinfo) if tzinfo is None: return dtime.utctimetuple() else: return dtime.timetuple() def asHumanly(self, tzinfo=None, now=None, precision=Precision.MINUTES): """Return this time as a short string, tailored to the current time. Parts of the date that can be assumed are omitted. Consequently, the output string depends on the current time. This is the format used for displaying dates in most user visible places in the quotient web UI. By default, the current time is determined by the system clock. The current time used for formatting the time can be changed by providing a Time instance as the parameter 'now'. @param precision: The smallest unit of time that will be represented in the returned string. Valid values are L{Time.Precision.MINUTES} and L{Time.Precision.SECONDS}. @raise InvalidPrecision: if the specified precision is not either L{Time.Precision.MINUTES} or L{Time.Precision.SECONDS}. """ try: timeFormat = Time._timeFormat[precision] except KeyError: raise InvalidPrecision( 'Use Time.Precision.MINUTES or Time.Precision.SECONDS') if now is None: now = Time().asDatetime(tzinfo) else: now = now.asDatetime(tzinfo) dtime = self.asDatetime(tzinfo) # Same day? if dtime.date() == now.date(): if self.isAllDay(): return 'all day' return dtime.strftime(timeFormat).lower() else: res = str(dtime.date().day) + dtime.strftime(' %b') # day + month # Different year? if not dtime.date().year == now.date().year: res += dtime.strftime(' %Y') if not self.isAllDay(): res += dtime.strftime(', %s' % (timeFormat,)).lower() return res # # methods to return related times # def getBounds(self, tzinfo=None): """ Return a pair describing the bounds of self. This returns a pair (min, max) of Time instances. It is not quite the same as (self, self + self.resolution). This is because timezones are insignificant for instances with a resolution greater or equal to 1 day. To illustrate the problem, consider a Time instance:: T = Time.fromHumanly('today', tzinfo=anything) This will return an equivalent instance independent of the tzinfo used. The hour, minute, and second of this instance are 0, and its resolution is one day. Now say we have a sorted list of times, and we want to get all times for 'today', where whoever said 'today' is in a timezone that's 5 hours ahead of UTC. The start of 'today' in this timezone is UTC 05:00. The example instance T above is before this, but obviously it is today. The min and max times this returns are such that all potentially matching instances are within this range. However, this range might contain unmatching instances. As an example of this, if 'today' is April first 2005, then Time.fromISO8601TimeAndDate('2005-04-01T00:00:00') sorts in the same place as T from above, but is not in the UTC+5 'today'. TIME IS FUN! """ if self.resolution >= datetime.timedelta(days=1) \ and tzinfo is not None: time = self._time.replace(tzinfo=tzinfo) else: time = self._time return ( min(self.fromDatetime(time), self.fromDatetime(self._time)), max(self.fromDatetime(time + self.resolution), self.fromDatetime(self._time + self.resolution)) ) def oneDay(self): """Return a Time instance representing the day of the start of self. The returned new instance will be set to midnight of the day containing the first instant of self in the specified timezone, and have a resolution of datetime.timedelta(days=1). """ day = self.__class__.fromDatetime(self.asDatetime().replace( hour=0, minute=0, second=0, microsecond=0)) day.resolution = datetime.timedelta(days=1) return day # # useful predicates # def isAllDay(self): """Return True iff this instance represents exactly all day.""" return self.resolution == datetime.timedelta(days=1) def isTimezoneDependent(self): """Return True iff timezone is relevant for this instance. Timezone is only relevent for instances with a resolution better than one day. """ return self.resolution < datetime.timedelta(days=1) # # other magic methods # def __cmp__(self, other): if not isinstance(other, Time): raise TypeError("Cannot meaningfully compare %r with %r" % (self, other)) return cmp(self._time, other._time) def __eq__(self, other): if isinstance(other, Time): return cmp(self._time, other._time) == 0 return False def __ne__(self, other): return not (self == other) def __repr__(self): return 'extime.Time.fromDatetime(%r)' % (self._time,) __str__ = asISO8601TimeAndDate def __contains__(self, other): """Test if another Time instance is entirely within the period addressed by this one.""" if not isinstance(other, Time): raise TypeError( '%r is not a Time instance; can not test for containment' % (other,)) if other._time < self._time: return False if self._time + self.resolution < other._time + other.resolution: return False return True def __add__(self, addend): if not isinstance(addend, datetime.timedelta): raise TypeError, 'expected a datetime.timedelta instance' return Time.fromDatetime(self._time + addend) def __sub__(self, subtrahend): """ Implement subtraction of an interval or another time from this one. @type subtrahend: L{datetime.timedelta} or L{Time} @param subtrahend: The object to be subtracted from this one. @rtype: L{datetime.timedelta} or L{Time} @return: If C{subtrahend} is a L{datetime.timedelta}, the result is a L{Time} instance which is offset from this one by that amount. If C{subtrahend} is a L{Time}, the result is a L{datetime.timedelta} instance which gives the difference between it and this L{Time} instance. """ if isinstance(subtrahend, datetime.timedelta): return Time.fromDatetime(self._time - subtrahend) if isinstance(subtrahend, Time): return self.asDatetime() - subtrahend.asDatetime() return NotImplemented Epsilon-0.7.1/epsilon/hotfix.py0000644000175000017500000000657012461004115020436 0ustar mithrandimithrandi00000000000000 import inspect class NoSuchHotfix(Exception): """ Man you must be pretty stupid. """ _alreadyInstalled = set() def require(packageName, fixName): if (packageName, fixName) in _alreadyInstalled: return if (packageName, fixName) == ('twisted', 'filepath_copyTo'): from twisted.python import filepath if filepath.FilePath('a') != filepath.FilePath('a'): from epsilon.hotfixes import filepath_copyTo filepath_copyTo.install() elif (packageName, fixName) == ('twisted', 'timeoutmixin_calllater'): from twisted.protocols import policies if not hasattr(policies.TimeoutMixin, 'callLater'): from epsilon.hotfixes import timeoutmixin_calllater timeoutmixin_calllater.install() elif (packageName, fixName) == ('twisted', 'delayedcall_seconds'): from twisted.internet import base args = inspect.getargs(base.DelayedCall.__init__.func_code)[0] if 'seconds' not in args: from epsilon.hotfixes import delayedcall_seconds delayedcall_seconds.install() elif (packageName, fixName) == ('twisted', 'deferredgenerator_tfailure'): from twisted.internet import defer result = [] def test(): d = defer.waitForDeferred(defer.succeed(1)) yield d result.append(d.getResult()) defer.deferredGenerator(test)() if result == [1]: from epsilon.hotfixes import deferredgenerator_tfailure deferredgenerator_tfailure.install() else: assert result == [None] elif (packageName, fixName) == ("twisted", "proto_helpers_stringtransport"): from twisted.test.proto_helpers import StringTransport st = StringTransport() try: st.write(u'foo') except TypeError, e: pass else: from epsilon.hotfixes import proto_helpers_stringtransport proto_helpers_stringtransport.install() elif (packageName, fixName) == ("twisted", "internet_task_Clock"): from twisted.internet.task import Clock from twisted.internet import base from twisted import version from epsilon.hotfixes import internet_task_clock if internet_task_clock.clockIsBroken(): internet_task_clock.install() elif (packageName, fixName) == ("twisted", "trial_assertwarns"): from twisted.trial.unittest import TestCase if not hasattr(TestCase, "failUnlessWarns"): from epsilon.hotfixes import trial_assertwarns trial_assertwarns.install() elif (packageName, fixName) == ("twisted", "plugin_package_paths"): try: from twisted.plugin import pluginPackagePaths except ImportError: from epsilon.hotfixes import plugin_package_paths plugin_package_paths.install() elif (packageName, fixName) == ("twisted", "loopbackasync_reentrancy"): # This one is really hard to detect reasonably. Invoking the code # involves triggering the reactor, which it would be good to avoid. from twisted import version if (version.major, version.minor) < (8, 2): from epsilon.hotfixes import loopbackasync_reentrancy loopbackasync_reentrancy.install() else: raise NoSuchHotfix(packageName, fixName) _alreadyInstalled.add((packageName, fixName)) Epsilon-0.7.1/epsilon/iepsilon.py0000644000175000017500000000145312461004115020752 0ustar mithrandimithrandi00000000000000# Copyright (c) 2008 Divmod. See LICENSE for details. """ Epsilon interfaces. """ from zope.interface import Attribute from twisted.cred.credentials import ICredentials class IOneTimePad(ICredentials): """ A type of opaque credential for authenticating users, which can be used only a single time. This interface should also be responsible for authenticating. See #2784. """ padValue = Attribute( """ C{str} giving the value of the one-time pad. The value will be compared by a L{twisted.cred.checkers.ICredentialsChecker} (e.g. L{epsilon.ampauth.OneTimePadChecker}) against all valid one-time pads. If there is a match, login will be successful and the pad will be invalidated (further attempts to use it will fail). """) Epsilon-0.7.1/epsilon/juice.py0000644000175000017500000010152612461004115020231 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_juice -*- # Copyright 2005 Divmod, Inc. See LICENSE file for details __metaclass__ = type import warnings, pprint from twisted.internet.main import CONNECTION_LOST from twisted.internet.defer import Deferred, maybeDeferred, fail from twisted.internet.protocol import ServerFactory, ClientFactory from twisted.internet.ssl import Certificate from twisted.python.failure import Failure from twisted.python import log, filepath from epsilon.liner import LineReceiver from epsilon import extime ASK = '_ask' ANSWER = '_answer' COMMAND = '_command' ERROR = '_error' ERROR_CODE = '_error_code' ERROR_DESCRIPTION = '_error_description' LENGTH = '_length' BODY = 'body' debug = False class JuiceBox(dict): """ I am a packet in the JUICE protocol. """ def __init__(self, __body='', **kw): self.update(kw) if __body: assert isinstance(__body, str), "body must be a string: %r" % ( repr(__body),) self['body'] = __body def body(): def get(self): warnings.warn("body attribute of boxes is now just a regular field", stacklevel=2) return self['body'] def set(self, newbody): warnings.warn("body attribute of boxes is now just a regular field", stacklevel=2) self['body'] = newbody return get,set body = property(*body()) def copy(self): newBox = self.__class__() newBox.update(self) return newBox def serialize(self, delimiter='\r\n', escaped='\r\n '): assert LENGTH not in self L = [] for (k, v) in self.iteritems(): if k == BODY: k = LENGTH v = str(len(self[BODY])) L.append(k.replace('_', '-').title()) L.append(': ') L.append(v.replace(delimiter, escaped)) L.append(delimiter) L.append(delimiter) if BODY in self: L.append(self[BODY]) bytes = ''.join(L) return bytes def sendTo(self, proto): """ Serialize and send this box to a Juice instance. By the time it is being sent, several keys are required. I must have exactly ONE of:: -ask -answer -error If the '-ask' header is set, then the '-command' header must also be set. """ proto.sendPacket(self) # juice.Box => JuiceBox Box = JuiceBox class TLSBox(JuiceBox): def __repr__(self): return 'TLS(**%s)' % (super(TLSBox, self).__repr__(),) def __init__(self, __certificate, __verify=None, __sslstarted=None, **kw): super(TLSBox, self).__init__(**kw) self.certificate = __certificate self.verify = __verify self.sslstarted = __sslstarted def sendTo(self, proto): super(TLSBox, self).sendTo(proto) if self.verify is None: proto.startTLS(self.certificate) else: proto.startTLS(self.certificate, self.verify) if self.sslstarted is not None: self.sslstarted() class QuitBox(JuiceBox): def __repr__(self): return 'Quit(**%s)' % (super(QuitBox, self).__repr__(),) def sendTo(self, proto): super(QuitBox, self).sendTo(proto) proto.transport.loseConnection() class _SwitchBox(JuiceBox): def __repr__(self): return 'Switch(**%s)' % (super(_SwitchBox, self).__repr__(),) def __init__(self, __proto, **kw): super(_SwitchBox, self).__init__(**kw) self.innerProto = __proto def sendTo(self, proto): super(_SwitchBox, self).sendTo(proto) proto._switchTo(self.innerProto) class NegotiateBox(JuiceBox): def __repr__(self): return 'Negotiate(**%s)' % (super(NegotiateBox, self).__repr__(),) def sendTo(self, proto): super(NegotiateBox, self).sendTo(proto) proto._setProtocolVersion(int(self['version'])) class JuiceError(Exception): pass class RemoteJuiceError(JuiceError): """ This error indicates that something went wrong on the remote end of the connection, and the error was serialized and transmitted to you. """ def __init__(self, errorCode, description, fatal=False): """Create a remote error with an error code and description. """ Exception.__init__(self, "Remote[%s]: %s" % (errorCode, description)) self.errorCode = errorCode self.description = description self.fatal = fatal class UnhandledRemoteJuiceError(RemoteJuiceError): def __init__(self, description): errorCode = "UNHANDLED" RemoteJuiceError.__init__(self, errorCode, description) class JuiceBoxError(JuiceError): pass class MalformedJuiceBox(JuiceBoxError): pass class UnhandledCommand(JuiceError): pass class IncompatibleVersions(JuiceError): pass class _Transactor: def __init__(self, store, callable): self.store = store self.callable = callable def __call__(self, box): return self.store.transact(self.callable, box) def __repr__(self): return '' % (self.store, self.callable) class DispatchMixin: baseDispatchPrefix = 'juice_' autoDispatchPrefix = 'command_' wrapper = None def _auto(self, aCallable, proto, namespace=None): if aCallable is None: return None command = aCallable.command if namespace not in command.namespaces: # if you're in the wrong namespace, you are very likely not allowed # to invoke the command you are trying to invoke. some objects # have commands exposed in a separate namespace for security # reasons, since the security model is a role : namespace mapping. log.msg('WRONG NAMESPACE: %r, %r' % (namespace, command.namespaces)) return None def doit(box): kw = stringsToObjects(box, command.arguments, proto) for name, extraArg in command.extra: kw[name] = extraArg.fromTransport(proto.transport) # def checkIsDict(result): # if not isinstance(result, dict): # raise RuntimeError("%r returned %r, not dictionary" % ( # aCallable, result)) # return result def checkKnownErrors(error): key = error.trap(*command.allErrors) code = command.allErrors[key] desc = str(error.value) return Failure(RemoteJuiceError( code, desc, error in command.fatalErrors)) return maybeDeferred(aCallable, **kw).addCallback( command.makeResponse, proto).addErrback( checkKnownErrors) return doit def _wrap(self, aCallable): if aCallable is None: return None wrap = self.wrapper if wrap is not None: return wrap(aCallable) else: return aCallable def normalizeCommand(self, cmd): """Return the canonical form of a command. """ return cmd.upper().strip().replace('-', '_') def lookupFunction(self, proto, name, namespace): """Return a callable to invoke when executing the named command. """ # Try to find a method to be invoked in a transaction first # Otherwise fallback to a "regular" method fName = self.autoDispatchPrefix + name fObj = getattr(self, fName, None) if fObj is not None: # pass the namespace along return self._auto(fObj, proto, namespace) assert namespace is None, 'Old-style parsing' # Fall back to simplistic command dispatching - we probably want to get # rid of this eventually, there's no reason to do extra work and write # fewer docs all the time. fName = self.baseDispatchPrefix + name return getattr(self, fName, None) def dispatchCommand(self, proto, cmd, box, namespace=None): fObj = self.lookupFunction(proto, self.normalizeCommand(cmd), namespace) if fObj is None: return fail(UnhandledCommand(cmd)) return maybeDeferred(self._wrap(fObj), box) PYTHON_KEYWORDS = [ 'and', 'del', 'for', 'is', 'raise', 'assert', 'elif', 'from', 'lambda', 'return', 'break', 'else', 'global', 'not', 'try', 'class', 'except', 'if', 'or', 'while', 'continue', 'exec', 'import', 'pass', 'yield', 'def', 'finally', 'in', 'print'] def normalizeKey(key): lkey = key.lower().replace('-', '_') if lkey in PYTHON_KEYWORDS: return lkey.title() return lkey def parseJuiceHeaders(lines): """ Create a JuiceBox from a list of header lines. @param lines: a list of lines. """ b = JuiceBox() bodylen = 0 key = None for L in lines: if L[0] == ' ': # continuation assert key is not None b[key] += '\r\n'+L[1:] continue parts = L.split(': ', 1) if len(parts) != 2: raise MalformedJuiceBox("Wrong number of parts: %r" % (L,)) key, value = parts key = normalizeKey(key) b[key] = value return int(b.pop(LENGTH, 0)), b class JuiceParserBase(DispatchMixin): def __init__(self): self._outstandingRequests = {} def _puke(self, failure): log.msg("Juice server or network failure " "unhandled by client application:") log.err(failure) log.msg( "Dropping connection! " "To avoid, add errbacks to ALL remote commands!") if self.transport is not None: self.transport.loseConnection() _counter = 0L def _nextTag(self): self._counter += 1 return '%x' % (self._counter,) def failAllOutgoing(self, reason): OR = self._outstandingRequests.items() self._outstandingRequests = None # we can never send another request for key, value in OR: value.errback(reason) def juiceBoxReceived(self, box): if debug: log.msg("Juice receive: %s" % pprint.pformat(dict(box.iteritems()))) if ANSWER in box: question = self._outstandingRequests.pop(box[ANSWER]) question.addErrback(self._puke) self._wrap(question.callback)(box) elif ERROR in box: question = self._outstandingRequests.pop(box[ERROR]) question.addErrback(self._puke) self._wrap(question.errback)( Failure(RemoteJuiceError(box[ERROR_CODE], box[ERROR_DESCRIPTION]))) elif COMMAND in box: cmd = box[COMMAND] def sendAnswer(answerBox): if ASK not in box: return if self.transport is None: return answerBox[ANSWER] = box[ASK] answerBox.sendTo(self) def sendError(error): if ASK not in box: return error if error.check(RemoteJuiceError): code = error.value.errorCode desc = error.value.description if error.value.fatal: errorBox = QuitBox() else: errorBox = JuiceBox() else: errorBox = QuitBox() log.err(error) # here is where server-side logging happens # if the error isn't handled code = 'UNHANDLED' desc = "Unhandled Remote System Exception " errorBox[ERROR] = box[ASK] errorBox[ERROR_DESCRIPTION] = desc errorBox[ERROR_CODE] = code if self.transport is not None: errorBox.sendTo(self) return None # intentionally stop the error here: don't log the # traceback if it's handled, do log it (earlier) if # it isn't self.dispatchCommand(self, cmd, box).addCallbacks(sendAnswer, sendError ).addErrback(self._puke) else: raise RuntimeError( "Empty packet received over connection-oriented juice: %r" % (box,)) def sendBoxCommand(self, command, box, requiresAnswer=True): """ Send a command across the wire with the given C{juice.Box}. Returns a Deferred which fires with the response C{juice.Box} when it is received, or fails with a C{juice.RemoteJuiceError} if an error is received. If the Deferred fails and the error is not handled by the caller of this method, the failure will be logged and the connection dropped. """ if self._outstandingRequests is None: return fail(CONNECTION_LOST) box[COMMAND] = command tag = self._nextTag() if requiresAnswer: box[ASK] = tag result = self._outstandingRequests[tag] = Deferred() else: result = None box.sendTo(self) return result class Argument: optional = False def __init__(self, optional=False): self.optional = optional def retrieve(self, d, name): if self.optional: value = d.get(name) if value is not None: del d[name] else: value = d.pop(name) return value def fromBox(self, name, strings, objects, proto): st = self.retrieve(strings, name) if self.optional and st is None: objects[name] = None else: objects[name] = self.fromStringProto(st, proto) def toBox(self, name, strings, objects, proto): obj = self.retrieve(objects, name) if self.optional and obj is None: # strings[name] = None return else: strings[name] = self.toStringProto(obj, proto) def fromStringProto(self, inString, proto): return self.fromString(inString) def toStringProto(self, inObject, proto): return self.toString(inObject) def fromString(self, inString): raise NotImplementedError() def toString(self, inObject): raise NotImplementedError() class JuiceList(Argument): def __init__(self, subargs): self.subargs = subargs def fromStringProto(self, inString, proto): boxes = parseString(inString) values = [stringsToObjects(box, self.subargs, proto) for box in boxes] return values def toStringProto(self, inObject, proto): return ''.join([objectsToStrings( objects, self.subargs, Box(), proto ).serialize() for objects in inObject]) class ListOf(Argument): def __init__(self, subarg, delimiter=', '): self.subarg = subarg self.delimiter = delimiter def fromStringProto(self, inString, proto): strings = inString.split(self.delimiter) L = [self.subarg.fromStringProto(string, proto) for string in strings] return L def toStringProto(self, inObject, proto): L = [] for inSingle in inObject: outString = self.subarg.toStringProto(inSingle, proto) assert self.delimiter not in outString L.append(outString) return self.delimiter.join(L) class Integer(Argument): fromString = int def toString(self, inObject): return str(int(inObject)) class String(Argument): def toString(self, inObject): return inObject def fromString(self, inString): return inString class EncodedString(Argument): def __init__(self, encoding): self.encoding = encoding def toString(self, inObject): return inObject.encode(self.encoding) def fromString(self, inString): return inString.decode(self.encoding) # Temporary backwards compatibility for Exponent Body = String class Unicode(String): def toString(self, inObject): # assert isinstance(inObject, unicode) return String.toString(self, inObject.encode('utf-8')) def fromString(self, inString): # assert isinstance(inString, str) return String.fromString(self, inString).decode('utf-8') class Path(Unicode): def fromString(self, inString): return filepath.FilePath(Unicode.fromString(self, inString)) def toString(self, inObject): return Unicode.toString(self, inObject.path) class Float(Argument): fromString = float toString = str class Base64Binary(Argument): def toString(self, inObject): return inObject.encode('base64').replace('\n', '') def fromString(self, inString): return inString.decode('base64') class Time(Argument): def toString(self, inObject): return inObject.asISO8601TimeAndDate() def fromString(self, inString): return extime.Time.fromISO8601TimeAndDate(inString) class ExtraArg: def fromTransport(self, inTransport): raise NotImplementedError() class Peer(ExtraArg): def fromTransport(self, inTransport): return inTransport.getQ2QPeer() class PeerDomain(ExtraArg): def fromTransport(self, inTransport): return inTransport.getQ2QPeer().domain class PeerUser(ExtraArg): def fromTransport(self, inTransport): return inTransport.getQ2QPeer().resource class Host(ExtraArg): def fromTransport(self, inTransport): return inTransport.getQ2QHost() class HostDomain(ExtraArg): def fromTransport(self, inTransport): return inTransport.getQ2QHost().domain class HostUser(ExtraArg): def fromTransport(self, inTransport): return inTransport.getQ2QHost().resource class Boolean(Argument): def fromString(self, inString): if inString == 'True': return True elif inString == 'False': return False else: raise RuntimeError("Bad boolean value: %r" % (inString,)) def toString(self, inObject): if inObject: return 'True' else: return 'False' class Command: class __metaclass__(type): def __new__(cls, name, bases, attrs): re = attrs['reverseErrors'] = {} er = attrs['allErrors'] = {} for v, k in attrs.get('errors',{}).iteritems(): re[k] = v er[v] = k for v, k in attrs.get('fatalErrors',{}).iteritems(): re[k] = v er[v] = k return type.__new__(cls, name, bases, attrs) arguments = [] response = [] extra = [] namespaces = [None] # This is set to [None] on purpose: None means # "no namespace", not "empty list". "empty # list" will make your command invalid in _all_ # namespaces, effectively uncallable. errors = {} fatalErrors = {} commandType = Box responseType = Box def commandName(): def get(self): return self.__class__.__name__ raise NotImplementedError("Missing command name") return get, commandName = property(*commandName()) def __init__(self, **kw): self.structured = kw givenArgs = [normalizeKey(k) for k in kw.keys()] forgotten = [] for name, arg in self.arguments: if normalizeKey(name) not in givenArgs and not arg.optional: forgotten.append(normalizeKey(name)) # for v in kw.itervalues(): # if v is None: # from pprint import pformat # raise RuntimeError("ARGH: %s" % pformat(kw)) if forgotten: if len(forgotten) == 1: plural = 'an argument' else: plural = 'some arguments' raise RuntimeError("You forgot %s to %r: %s" % ( plural, self.commandName, ', '.join(forgotten))) forgotten = [] def makeResponse(cls, objects, proto): try: return objectsToStrings(objects, cls.response, cls.responseType(), proto) except: log.msg("Exception in %r.makeResponse" % (cls,)) raise makeResponse = classmethod(makeResponse) def do(self, proto, namespace=None, requiresAnswer=True): if namespace is not None: cmd = namespace + ":" + self.commandName else: cmd = self.commandName def _massageError(error): error.trap(RemoteJuiceError) rje = error.value return Failure(self.reverseErrors.get(rje.errorCode, UnhandledRemoteJuiceError)(rje.description)) d = proto.sendBoxCommand( cmd, objectsToStrings(self.structured, self.arguments, self.commandType(), proto), requiresAnswer) if requiresAnswer: d.addCallback(stringsToObjects, self.response, proto) d.addCallback(self.addExtra, proto.transport) d.addErrback(_massageError) return d def addExtra(self, d, transport): for name, extraArg in self.extra: d[name] = extraArg.fromTransport(transport) return d class ProtocolSwitchCommand(Command): """Use this command to switch from something Juice-derived to a different protocol mid-connection. This can be useful to use juice as the connection-startup negotiation phase. Since TLS is a different layer entirely, you can use Juice to negotiate the security parameters of your connection, then switch to a different protocol, and the connection will remain secured. """ def __init__(self, __protoToSwitchToFactory, **kw): self.protoToSwitchToFactory = __protoToSwitchToFactory super(ProtocolSwitchCommand, self).__init__(**kw) def makeResponse(cls, innerProto, proto): return _SwitchBox(innerProto) makeResponse = classmethod(makeResponse) def do(self, proto, namespace=None): d = super(ProtocolSwitchCommand, self).do(proto) proto._lock() def switchNow(ign): innerProto = self.protoToSwitchToFactory.buildProtocol(proto.transport.getPeer()) proto._switchTo(innerProto, self.protoToSwitchToFactory) return ign def die(ign): proto.transport.loseConnection() return ign def handle(ign): self.protoToSwitchToFactory.clientConnectionFailed(None, Failure(CONNECTION_LOST)) return ign return d.addCallbacks(switchNow, handle).addErrback(die) class Negotiate(Command): commandName = 'Negotiate' arguments = [('versions', ListOf(Integer()))] response = [('version', Integer())] responseType = NegotiateBox class Juice(LineReceiver, JuiceParserBase): """ JUICE (JUice Is Concurrent Events) is a simple connection-oriented request/response protocol. Packets, or "boxes", are collections of RFC2822-inspired headers, plus a body. Note that this is NOT a literal interpretation of any existing RFC, 822, 2822 or otherwise, but a simpler version that does not do line continuations, does not specify any particular format for header values, dispatches semantic meanings of most headers on the -Command header rather than giving them global meaning, and allows multiple sets of headers (messages, or JuiceBoxes) on a connection. All headers whose names begin with a dash ('-') are reserved for use by the protocol. All others are for application use - their meaning depends on the value of the "-Command" header. """ protocolName = 'juice-base' hostCertificate = None MAX_LENGTH = 1024 * 1024 isServer = property(lambda self: self._issueGreeting, doc=""" True if this is a juice server, e.g. it is going to issue or has issued a server greeting upon connection. """) isClient = property(lambda self: not self._issueGreeting, doc=""" True if this is a juice server, e.g. it is not going to issue or did not issue a server greeting upon connection. """) def __init__(self, issueGreeting): """ @param issueGreeting: whether to issue a greeting when connected. This should be set on server-side Juice protocols. """ JuiceParserBase.__init__(self) self._issueGreeting = issueGreeting def __repr__(self): return '<%s %s/%s at 0x%x>' % (self.__class__.__name__, self.isClient and 'client' or 'server', self.innerProtocol, id(self)) __locked = False def _lock(self): """ Lock this Juice instance so that no further Juice traffic may be sent. This is used when sending a request to switch underlying protocols. You probably want to subclass ProtocolSwitchCommand rather than calling this directly. """ self.__locked = True innerProtocol = None def _switchTo(self, newProto, clientFactory=None): """ Switch this Juice instance to a new protocol. You need to do this 'simultaneously' on both ends of a connection; the easiest way to do this is to use a subclass of ProtocolSwitchCommand. """ assert self.innerProtocol is None, "Protocol can only be safely switched once." self.setRawMode() self.innerProtocol = newProto self.innerProtocolClientFactory = clientFactory newProto.makeConnection(self.transport) innerProtocolClientFactory = None def juiceBoxReceived(self, box): if self.__locked and COMMAND in box and ASK in box: # This is a command which will trigger an answer, and we can no # longer answer anything, so don't bother delivering it. return return super(Juice, self).juiceBoxReceived(box) def sendPacket(self, completeBox): """ Send a juice.Box to my peer. Note: transport.write is never called outside of this method. """ assert not self.__locked, "You cannot send juice packets when a connection is locked" if self._startingTLSBuffer is not None: self._startingTLSBuffer.append(completeBox) else: if debug: log.msg("Juice send: %s" % pprint.pformat(dict(completeBox.iteritems()))) self.transport.write(completeBox.serialize()) def sendCommand(self, command, __content='', __answer=True, **kw): box = JuiceBox(__content, **kw) return self.sendBoxCommand(command, box, requiresAnswer=__answer) _outstandingRequests = None _justStartedTLS = False def makeConnection(self, transport): self._transportPeer = transport.getPeer() self._transportHost = transport.getHost() log.msg("%s %s connection established (HOST:%s PEER:%s)" % (self.isClient and "client" or "server", self.__class__.__name__, self._transportHost, self._transportPeer)) self._outstandingRequests = {} self._requestBuffer = [] LineReceiver.makeConnection(self, transport) _startingTLSBuffer = None def prepareTLS(self): self._startingTLSBuffer = [] def startTLS(self, certificate, *verifyAuthorities): if self.hostCertificate is None: self.hostCertificate = certificate self._justStartedTLS = True self.transport.startTLS(certificate.options(*verifyAuthorities)) stlsb = self._startingTLSBuffer if stlsb is not None: self._startingTLSBuffer = None for box in stlsb: self.sendPacket(box) else: raise RuntimeError( "Previously authenticated connection between %s and %s " "is trying to re-establish as %s" % ( self.hostCertificate, Certificate.peerFromTransport(self.transport), (certificate, verifyAuthorities))) def dataReceived(self, data): # If we successfully receive any data after TLS has been started, that # means the connection was secured properly. Make a note of that fact. if self._justStartedTLS: self._justStartedTLS = False return LineReceiver.dataReceived(self, data) def connectionLost(self, reason): log.msg("%s %s connection lost (HOST:%s PEER:%s)" % ( self.isClient and 'client' or 'server', self.__class__.__name__, self._transportHost, self._transportPeer)) self.failAllOutgoing(reason) if self.innerProtocol is not None: self.innerProtocol.connectionLost(reason) if self.innerProtocolClientFactory is not None: self.innerProtocolClientFactory.clientConnectionLost(None, reason) def lineReceived(self, line): if line: self._requestBuffer.append(line) else: buf = self._requestBuffer self._requestBuffer = [] bodylen, b = parseJuiceHeaders(buf) if bodylen: self._bodyRemaining = bodylen self._bodyBuffer = [] self._pendingBox = b self.setRawMode() else: self.juiceBoxReceived(b) def rawDataReceived(self, data): if self.innerProtocol is not None: self.innerProtocol.dataReceived(data) return self._bodyRemaining -= len(data) if self._bodyRemaining <= 0: if self._bodyRemaining < 0: self._bodyBuffer.append(data[:self._bodyRemaining]) extraData = data[self._bodyRemaining:] else: self._bodyBuffer.append(data) extraData = '' self._pendingBox['body'] = ''.join(self._bodyBuffer) self._bodyBuffer = None b, self._pendingBox = self._pendingBox, None self.juiceBoxReceived(b) if self.innerProtocol is not None: self.innerProtocol.makeConnection(self.transport) if extraData: self.innerProtocol.dataReceived(extraData) else: self.setLineMode(extraData) else: self._bodyBuffer.append(data) protocolVersion = 0 def _setProtocolVersion(self, version): # if we ever want to actually mangle encodings, this is the place to do # it! self.protocolVersion = version return version def renegotiateVersion(self, newVersion): assert newVersion in VERSIONS, ( "This side of the connection doesn't support version %r" % (newVersion,)) v = VERSIONS[:] v.remove(newVersion) return Negotiate(versions=[newVersion]).do(self).addCallback( lambda ver: self._setProtocolVersion(ver['version'])) def command_NEGOTIATE(self, versions): for version in versions: if version in VERSIONS: return dict(version=version) raise IncompatibleVersions() command_NEGOTIATE.command = Negotiate VERSIONS = [1] from cStringIO import StringIO class _ParserHelper(Juice): def __init__(self): Juice.__init__(self, False) self.boxes = [] self.results = Deferred() def getPeer(self): return 'string' def getHost(self): return 'string' disconnecting = False def juiceBoxReceived(self, box): self.boxes.append(box) # Synchronous helpers def parse(cls, fileObj): p = cls() p.makeConnection(p) p.dataReceived(fileObj.read()) return p.boxes parse = classmethod(parse) def parseString(cls, data): return cls.parse(StringIO(data)) parseString = classmethod(parseString) parse = _ParserHelper.parse parseString = _ParserHelper.parseString def stringsToObjects(strings, arglist, proto): objects = {} myStrings = strings.copy() for argname, argparser in arglist: argparser.fromBox(argname, myStrings, objects, proto) return objects def objectsToStrings(objects, arglist, strings, proto): myObjects = {} for (k, v) in objects.items(): myObjects[normalizeKey(k)] = v for argname, argparser in arglist: argparser.toBox(argname, strings, myObjects, proto) return strings class JuiceServerFactory(ServerFactory): protocol = Juice def buildProtocol(self, addr): prot = self.protocol(True) prot.factory = self return prot class JuiceClientFactory(ClientFactory): protocol = Juice def buildProtocol(self, addr): prot = self.protocol(False) prot.factory = self return prot Epsilon-0.7.1/epsilon/liner.py0000644000175000017500000000323612461004115020242 0ustar mithrandimithrandi00000000000000# Copyright 2005 Divmod, Inc. See LICENSE file for details # -*- test-case-name: vertex.test.test_juice -*- __metaclass__ = type from twisted.internet.protocol import Protocol class LineReceiver(Protocol): lineMode = True MAX_LINE_LENGTH = 1024 * 1024 buffer = '' delimiter = '\r\n' def lineReceived(self, line): pass def rawDataReceived(self, data): pass def setLineMode(self, extra=''): self.lineMode = True if extra: self.dataReceived(extra) def isDisconnecting(self): if self.transport is None: # XXX This _ought_ to be horribly broken but in fact it is # not. TODO: Investigate further. -glyph return False if self.transport.disconnecting: return True return False def setRawMode(self): self.lineMode = False def dataReceived(self, data): buffer = self.buffer buffer += data delimiter = self.delimiter begin = 0 raw = False while self.lineMode: end = buffer.find(delimiter, begin) if end == -1: break line = buffer[begin:end] self.lineReceived(line) if self.isDisconnecting(): self.buffer = '' return begin = end + len(delimiter) else: raw = True if begin: buffer = buffer[begin:] if raw: self.buffer = '' if self.isDisconnecting(): return if buffer: self.rawDataReceived(buffer) else: self.buffer = buffer Epsilon-0.7.1/epsilon/modal.py0000644000175000017500000001045312461004115020224 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_modes -*- import new class ModalMethod(object): """A descriptor wrapping multiple implementations of a particular method. When called on an instance, the implementation used will be selected based on an attribute of the instance. There are no unbound ModalMethods at this point. @ivar name: The name of this method. @ivar methods: A mapping of modes to callable objects. @ivar modeAttribute: The name of the attribute on instances which is bound to the instance's current mode. """ def __init__(self, name, methods, modeAttribute): self.name = name self.methods = methods self.modeAttribute = modeAttribute def __get__(self, instance, owner): if instance is None: raise AttributeError(self.name) try: mode = getattr(instance, self.modeAttribute) except AttributeError: raise AttributeError( "Mode attribute %r missing from %r, " "cannot get %r" % (self.modeAttribute, instance, self.name)) try: func = self.methods[mode] except KeyError: raise AttributeError( "Method %r missing from mode %r on %r" % (self.name, mode, instance)) return new.instancemethod(func, instance, owner) class mode(object): """ Base class for mode definitions. Subclass this in classes of type ModalType and provide the implementations of various methods for that particular mode as methods of the mode subclass. The subclass should have the same name as the mode it is defining. """ # XXX fix the simple, but wrong, __dict__ magic in ModalType.__new__ so # that this __enter__ and __exit__ are actually called, maybe we can even # do some logging or something. def __exit__(self): """ The mode has just been exited. """ def __enter__(self): """ The mode has just been entered. """ def _getInheritedAttribute(classname, attrname, bases, attrs): try: return attrs[attrname] except KeyError: for base in bases: try: return _getInheritedAttribute(classname, attrname, base.__bases__, base.__dict__) except TypeError: pass else: raise TypeError('%r does not define required attribute %r' % (classname, attrname)) class ModalType(type): """Metaclass for defining modal classes. @type modeAttribute: C{str} @ivar modeAttribute: The attribute to which the current mode is bound. Classes should not define the attribute this names; it will be bound automatically to the value of initialMode. @type initialMode: C{str} (for now) @ivar initialMode: The mode in which instances will start. """ def __new__(cls, name, bases, attrs): modeAttribute = _getInheritedAttribute(name, 'modeAttribute', bases, attrs) initialMode = attrs['initialMode'] = _getInheritedAttribute(name, 'initialMode', bases, attrs) # Dict mapping names of methods to another dict. The inner # dict maps names of modes to implementations of that method # for that mode. implementations = {} keepAttrs = {'mode': initialMode} for (k, v) in attrs.iteritems(): if isinstance(v, type) and issubclass(v, mode): for (methName, methDef) in v.__dict__.iteritems(): if methName not in ('__module__', '__file__', '__name__'): implementations.setdefault(methName, {})[k] = methDef keepAttrs[k] = v for (methName, methDefs) in implementations.iteritems(): keepAttrs[methName] = ModalMethod(methName, methDefs, modeAttribute) return super(ModalType, cls).__new__(cls, name, bases, keepAttrs) class Modal(object): __metaclass__ = ModalType modeAttribute = 'mode' initialMode = 'nil' class nil(mode): def __enter__(self): pass def __exit__(self): pass def transitionTo(self, stateName): self.__exit__() self.mode = stateName self.__enter__() Epsilon-0.7.1/epsilon/pending.py0000644000175000017500000000113612461004115020552 0ustar mithrandimithrandi00000000000000 from twisted.internet.defer import Deferred from twisted.python.failure import Failure class PendingEvent(object): def __init__(self): self.listeners = [] def deferred(self): d = Deferred() self.listeners.append(d) return d def callback(self, result): l = self.listeners self.listeners = [] for d in l: d.callback(result) def errback(self, result=None): if result is None: result = Failure() l = self.listeners self.listeners = [] for d in l: d.errback(result) Epsilon-0.7.1/epsilon/process.py0000644000175000017500000000376112461004115020612 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_process -*- # Copyright (c) 2008 Divmod. See LICENSE for details. """ Process and stdio related functionality. """ import os, sys, imp from zope.interface import implements from twisted.internet import reactor from twisted.application.service import IService, Service from twisted.internet.stdio import StandardIO from epsilon.structlike import record def spawnProcess(processProtocol, executable, args=(), env={}, path=None, uid=None, gid=None, usePTY=0, packages=()): """Launch a process with a particular Python environment. All arguments as to reactor.spawnProcess(), except for the addition of an optional packages iterable. This should be of strings naming packages the subprocess is to be able to import. """ env = env.copy() pythonpath = [] for pkg in packages: p = os.path.split(imp.find_module(pkg)[1])[0] if p.startswith(os.path.join(sys.prefix, 'lib')): continue pythonpath.append(p) pythonpath = list(set(pythonpath)) pythonpath.extend(env.get('PYTHONPATH', '').split(os.pathsep)) env['PYTHONPATH'] = os.pathsep.join(pythonpath) return reactor.spawnProcess(processProtocol, executable, args, env, path, uid, gid, usePTY) def spawnPythonProcess(processProtocol, args=(), env={}, path=None, uid=None, gid=None, usePTY=0, packages=()): """Launch a Python process All arguments as to spawnProcess(), except the executable argument is omitted. """ return spawnProcess(processProtocol, sys.executable, args, env, path, uid, gid, usePTY, packages) class StandardIOService(record('protocol'), Service): """ Service for connecting a protocol to stdio. """ def startService(self): """ Connect C{self.protocol} to standard io. """ StandardIO(self.protocol) Epsilon-0.7.1/epsilon/react.py0000644000175000017500000000175512461004115020233 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_react -*- # Copyright (c) 2008 Divmod. See LICENSE for details. """ Utilities for running the reactor for a while. """ from twisted.python.log import err def react(reactor, main, argv): """ Call C{main} and run the reactor until the L{Deferred} it returns fires. @param reactor: An unstarted L{IReactorCore} provider which will be run and later stopped. @param main: A callable which returns a L{Deferred}. It should take as many arguments as there are elements in the list C{argv}. @param argv: A list of arguments to pass to C{main}. @return: C{None} """ stopping = [] reactor.addSystemEventTrigger('before', 'shutdown', stopping.append, True) finished = main(reactor, *argv) finished.addErrback(err, "main function encountered error") def cbFinish(ignored): if not stopping: reactor.callWhenRunning(reactor.stop) finished.addCallback(cbFinish) reactor.run() Epsilon-0.7.1/epsilon/remember.py0000644000175000017500000000237612461004115020733 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_remember -*- """ This module implements a utility for managing the lifecycle of attributes related to a particular object. """ from epsilon.structlike import record class remembered(record('creationFunction')): """ This descriptor decorator is applied to a function to create an attribute which will be created on-demand, but remembered for the lifetime of the instance to which it is attached. Subsequent accesses of the attribute will return the remembered value. @ivar creationFunction: the decorated function, to be called to create the value. This should be a 1-argument callable, that takes only a 'self' parameter, like a method. """ value = None def __get__(self, oself, type): """ Retrieve the value if already cached, otherwise, call the C{creationFunction} to create it. """ remembername = "_remembered_" + self.creationFunction.func_name rememberedval = oself.__dict__.get(remembername, None) if rememberedval is not None: return rememberedval rememberme = self.creationFunction(oself) oself.__dict__[remembername] = rememberme return rememberme __all__ = ['remembered'] Epsilon-0.7.1/epsilon/setuphelper.py0000644000175000017500000000542212461004115021470 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_setuphelper -*- # For great justice, take off every zig. import sys, os, pprint, traceback from distutils.core import setup def pluginModules(moduleNames): from twisted.python.reflect import namedAny for moduleName in moduleNames: try: yield namedAny(moduleName) except ImportError: pass except ValueError, ve: if ve.args[0] != 'Empty module name': traceback.print_exc() except: traceback.print_exc() def _regeneratePluginCache(pluginPackages): print 'Regenerating cache with path: ', pprint.pprint(sys.path) from twisted import plugin for pluginModule in pluginModules([ p + ".plugins" for p in pluginPackages]): # Not just *some* zigs, mind you - *every* zig: print 'Full plugin list for %r: ' % (pluginModule.__name__) pprint.pprint(list(plugin.getPlugins(plugin.IPlugin, pluginModule))) def regeneratePluginCache(dist, pluginPackages): if 'install' in dist.commands: sys.path.insert(0, os.path.abspath(dist.command_obj['install'].install_lib)) _regeneratePluginCache(pluginPackages) def autosetup(**kw): packages = [] datafiles = {} pluginPackages = [] for (dirpath, dirnames, filenames) in os.walk(os.curdir): dirnames[:] = [p for p in dirnames if not p.startswith('.')] pkgName = dirpath[2:].replace('/', '.') if '__init__.py' in filenames: # The current directory is a Python package packages.append(pkgName) elif 'plugins' in dirnames: # The current directory is for the Twisted plugin system pluginPackages.append(pkgName) packages.append(pkgName) for package in packages: if '.' in package: continue D = datafiles[package] = [] print 'Files in package %r:' % (package,) pprint.pprint(os.listdir(package)) for (dirpath, dirnames, filenames) in os.walk(package): dirnames[:] = [p for p in dirnames if not p.startswith('.')] for filename in filenames: if filename == 'dropin.cache': continue if (os.path.splitext(filename)[1] not in ('.py', '.pyc', '.pyo') or '__init__.py' not in filenames): D.append(os.path.join(dirpath[len(package)+1:], filename)) autoresult = { 'packages': packages, 'package_data': datafiles, } print 'Automatically determined setup() args:' pprint.pprint(autoresult, indent=4) assert 'packages' not in kw assert 'package_data' not in kw kw.update(autoresult) distobj = setup(**kw) regeneratePluginCache(distobj, pluginPackages) return distobj Epsilon-0.7.1/epsilon/spewer.py0000644000175000017500000000610412461004115020433 0ustar mithrandimithrandi00000000000000 import sys import signal import threading from twisted.application import service from twisted.python import reflect, log class CannotFindFunction(ValueError): pass class Tracer(object): skip = object() installed = False def install(self): self.installed = True sys.settrace(self.trace) threading.settrace(self.trace) def uninstall(self): self.installed = False sys.settrace(None) threading.setttrace(None) def toggle(self): if self.installed: self.uninstall() else: self.install() def trace(self, frame, event, arg): r = getattr(self, 'trace_' + event.upper())(frame, arg) if r is self.skip: return None elif r is None: return self.trace else: return r def trace_CALL(self, frame, arg): pass def trace_LINE(self, frame, arg): pass def trace_RETURN(self, frame, arg): pass def trace_EXCEPTION(self, frame, arg): pass def extractArgs(frame): co = frame.f_code dict = frame.f_locals n = co.co_argcount if co.co_flags & 4: n = n+1 if co.co_flags & 8: n = n+1 result = {} for i in range(n): name = co.co_varnames[i] result[name] = dict.get(name, "*** undefined ***") return result def formatArgs(args): return ', '.join(['='.join((k, reflect.safe_repr(v))) for (k, v) in args.iteritems()]) class Spewer(Tracer): callDepth = 0 def trace_CALL(self, frame, arg): self.callDepth += 1 frameSelf = frame.f_locals.get('self') if frameSelf is not None: if hasattr(frameSelf, '__class__'): k = reflect.qual(frameSelf.__class__) else: k = reflect.qual(type(frameSelf)) k = k + '.' else: k = '' print ("%X %s%s%s(%s)" % ( id(threading.currentThread()), self.callDepth * ' ', k, frame.f_code.co_name, formatArgs(extractArgs(frame)))) def trace_RETURN(self, frame, arg): if arg is not None: print ("%X %s<= %s" % ( id(threading.currentThread()), self.callDepth * ' ', reflect.safe_repr(arg),)) self.callDepth = max(0, self.callDepth - 1) def trace_EXCEPTION(self, frame, arg): print ("%X %s^- %s" % ( id(threading.currentThread()), self.callDepth * ' ', reflect.safe_repr(arg),)) self.callDepth = max(0, self.callDepth - 1) class SignalService(service.Service): def __init__(self, sigmap): self.sigmap = sigmap def startService(self): service.Service.startService(self) self.oldsigmap = {} for sig, handler in self.sigmap.items(): self.oldsigmap[sig] = signal.signal(sig, handler) def stopService(self): for sig, handler in self.oldsigmap.items(): signal.signal(sig, handler) del self.oldsigmap service.Service.stopService(self) Epsilon-0.7.1/epsilon/structlike.py0000644000175000017500000001427512461004115021327 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_structlike -*- """ This module implements convenience objects for classes which have initializers and repr()s that describe a fixed set of attributes. """ from twisted.python import context _NOT_SPECIFIED = object() class _RecursiveReprer(object): """ This object maintains state so that repr()s can tell when they are recursing and not do so. """ def __init__(self): self.active = {} def recursiveRepr(self, stuff, thunk=repr): """ Recursive repr(). """ ID = id(stuff) if ID in self.active: return '%s(...)' % (stuff.__class__.__name__,) else: try: self.active[ID] = stuff return thunk(stuff) finally: del self.active[ID] def _contextualize(contextFactory, contextReceiver): """ Invoke a callable with an argument derived from the current execution context (L{twisted.python.context}), or automatically created if none is yet present in the current context. This function, with a better name and documentation, should probably be somewhere in L{twisted.python.context}. Calling context.get() and context.call() individually is perilous because you always have to handle the case where the value you're looking for isn't present; this idiom forces you to supply some behavior for that case. @param contextFactory: An object which is both a 0-arg callable and hashable; used to look up the value in the context, set the value in the context, and create the value (by being called). @param contextReceiver: A function that receives the value created or identified by contextFactory. It is a 1-arg callable object, called with the result of calling the contextFactory, or retrieving the contextFactory from the context. """ value = context.get(contextFactory, _NOT_SPECIFIED) if value is not _NOT_SPECIFIED: return contextReceiver(value) else: return context.call({contextFactory: contextFactory()}, _contextualize, contextFactory, contextReceiver) class StructBehavior(object): __names__ = [] __defaults__ = [] def __init__(self, *args, **kw): super(StructBehavior, self).__init__() # Turn all the args into kwargs if len(args) > len(self.__names__): raise TypeError( "Got %d positional arguments but expected no more than %d" % (len(args), len(self.__names__))) for n, v in zip(self.__names__, args): if n in kw: raise TypeError("Got multiple values for argument " + n) kw[n] = v # Fill in defaults for n, v in zip(self.__names__[::-1], self.__defaults__[::-1]): if n not in kw: kw[n] = v for n in self.__names__: if n not in kw: raise TypeError('Specify a value for %r' % (n,)) setattr(self, n, kw.pop(n)) if kw: raise TypeError('Got unexpected arguments: ' + ', '.join(kw)) def __repr__(self): """ Generate a string representation. """ def doit(rr): def _recordrepr(self2): """ Internal implementation of repr() for this record. """ return '%s(%s)' % ( self.__class__.__name__, ', '.join(["%s=%s" % (n, repr(getattr(self, n, None))) for n in self.__names__])) return rr.recursiveRepr(self, _recordrepr) return _contextualize(_RecursiveReprer, doit) def record(*a, **kw): """ Are you tired of typing class declarations that look like this:: class StuffInfo: def __init__(self, a=None, b=None, c=None, d=None, e=None, f=None, g=None, h=None, i=None, j=None): self.a = a self.b = b self.c = c self.d = d # ... Epsilon can help! That's right - for a limited time only, this function returns a class which provides a shortcut. The above can be simplified to:: StuffInfo = record(a=None, b=None, c=None, d=None, e=None, f=None, g=None, h=None, i=None, j=None) if the arguments are required, rather than having defaults, it could be even shorter:: StuffInfo = record('a b c d e f g h i j') Put more formally: C{record} optionally takes one positional argument, a L{str} representing attribute names as whitespace-separated identifiers; it also takes an arbitrary number of keyword arguments, which map attribute names to their default values. If no positional argument is provided, the names of attributes will be inferred from the names of the defaults instead. """ if len(a) == 1: attributeNames = a[0].split() elif len(a) == 0: if not kw: raise TypeError("Attempted to define a record with no attributes.") attributeNames = kw.keys() attributeNames.sort() else: raise TypeError( "record must be called with zero or one positional arguments") # Work like Python: allow defaults specified backwards from the end defaults = [] for attributeName in attributeNames: default = kw.pop(attributeName, _NOT_SPECIFIED) if defaults: if default is _NOT_SPECIFIED: raise TypeError( "You must specify default values like in Python; " "backwards from the end of the argument list, " "with no gaps") else: defaults.append(default) elif default is not _NOT_SPECIFIED: defaults.append(default) else: # This space left intentionally blank. pass if kw: raise TypeError("The following defaults did not apply: %r" % (kw,)) return type('Record<%s>' % (' '.join(attributeNames),), (StructBehavior,), dict(__names__=attributeNames, __defaults__=defaults)) Epsilon-0.7.1/epsilon/unrepr.py0000644000175000017500000000251112461004115020437 0ustar mithrandimithrandi00000000000000import compiler def unrepr(s): """ Convert a string produced by python's repr() into the corresponding data structure, without calling eval(). """ return Builder().build(getObj(s)) def getObj(s): s="a="+s return compiler.parse(s).getChildren()[1].getChildren()[0].getChildren()[1] class UnknownType(Exception): pass class Builder: def build(self, o): m = getattr(self, 'build_'+o.__class__.__name__, None) if m is None: raise UnknownType(o.__class__.__name__) return m(o) def build_List(self, o): return map(self.build, o.getChildren()) def build_Const(self, o): return o.value def build_Dict(self, o): d = {} i = iter(map(self.build, o.getChildren())) for el in i: d[el] = i.next() return d def build_Tuple(self, o): return tuple(self.build_List(o)) def build_Name(self, o): if o.name == 'None': return None raise UnknownType('Name') def build_Add(self, o): real, imag = map(self.build_Const, o.getChildren()) try: real = float(real) except TypeError: raise UnknownType('Add') if not isinstance(imag, complex) or imag.real != 0.0: raise UnknownType('Add') return real+imag Epsilon-0.7.1/epsilon/view.py0000644000175000017500000000300012461004115020070 0ustar mithrandimithrandi00000000000000# -*- test-case-name: epsilon.test.test_view -*- """ Utility functionality for creating wrapping sequences so as to transform their indices in some manner. """ class SlicedView(object): """ Wrapper around a sequence which allows indexing and non-extended slicing, adjusting all indices using a transformation defined by a L{slice} object. For example:: s = ['a', 'b'] t = SlicedView(s, slice(1, None)) t[0] == 'b' @ivar sequence: The underlying sequence from which to retrieve elements. @ivar bounds: A C{slice} instance defining the boundaries of this view. """ def __init__(self, sequence, bounds): self.sequence = sequence self.bounds = bounds def _getIndices(self): start, stop, step = self.bounds.indices(len(self.sequence)) indices = xrange(start, stop, step) return indices def __getitem__(self, index): """ Compute the index in the underlying sequence of the given view index and return the corresponding element. @raise IndexError: If C{index} is out of bounds for the view. @raise ValueError: If C{self.bounds} is out of bounds for C{self.sequence}. """ if isinstance(index, slice): return SlicedView(self, index) return self.sequence[self._getIndices()[index]] def __len__(self): """ Compute the length of this view onto the sequence and return it. """ return len(self._getIndices()) Epsilon-0.7.1/epsilon/_version.py0000644000175000017500000000072712606214253020766 0ustar mithrandimithrandi00000000000000 # This file was generated by 'versioneer.py' (0.15) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json import sys version_json = ''' { "dirty": false, "error": null, "full-revisionid": "a36710813df371d41a839d97f15e9580a8807034", "version": "0.7.1" } ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) Epsilon-0.7.1/LICENSE0000644000175000017500000000203612461004115016110 0ustar mithrandimithrandi00000000000000Copyright (c) 2005 Divmod Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.Epsilon-0.7.1/MANIFEST.in0000644000175000017500000000014412606176061016652 0ustar mithrandimithrandi00000000000000include LICENSE include NAME.txt include NEWS.txt include versioneer.py include epsilon/_version.py Epsilon-0.7.1/NAME.txt0000644000175000017500000000107012461004115016361 0ustar mithrandimithrandi00000000000000 See: http://mathworld.wolfram.com/Epsilon.html The constant 'epsilon' is a value that is as close as possible to zero without being zero. It is frequently used by computer scientists to refer to values which are negligeable. Divmod Epsilon is named for that because it is a small body of code upon which all of our other projects depend. It has no particular theme associated with it, except to remain small and lightweight, and enforce certain conventions and to provide common conveniences that do not belong in any lower level of infrastructure. Epsilon-0.7.1/NEWS.txt0000644000175000017500000001043012606214132016420 0ustar mithrandimithrandi000000000000000.7.0 (2014-01-15): Major: - Only Python 2.6 and 2.7 are supported now. 2.4, 2.5 is deprecated. - setup.py now uses setuptools, and stores its dependencies. This means you no longer need to manually install dependencies. - setup.py no longer requires Twisted for egg_info, making it easier to install Epsilon using pip. - Significant improvements to PyPy support. PyPy is now a supported platform, with CI support. - epsilon.release is now removed. It relied on a bunch of machinery specific to divmod that no longer existed. - epsilon.sslverify is now removed. Use twisted.internet.ssl instead. - epsilon.asTwistedVersion takes a string version ("1.2.3") and turns it into a twisted.python.versions.Version. Minor: - Several deprecation warnings have been cleaned up. 0.6.0 (2009-11-25): - Disable loopback hotfix on Twisted 8.2 and newer. - Remove the implementation of Cooperator and use Twisted's implementation instead. - Use Twisted's deferLater implementation. - Add a service for communicating via stdio. - Add a `precision` argument to `Time.asHumanly` to control the precision of the returned string. 0.5.12 (2008-12-09): - Added support for AMP authentication via one-time pads. 0.5.11 (2008-10-02): - epsilon.amprouter added, providing support for multiplexing unrelated AMP communications over the same connection. 0.5.10 (2008-08-12): - Added the epsilon.caseless module, with case-insensitive string wrappers. - Better repr() for epsilon.structlike.record added. - epsilon.juice now uses twisted.internet.ssl instead of epsilon.sslverify. 0.5.9 (2008-01-18): 0.5.8 (2007-11-27): - extime.Time.asHumanly() no longer shows a time of day for all-day timestamps. 0.5.7 (2007-04-27): - view.SlicedView added, allowing slicing and indexing of large sequences without copying. 0.5.6 (2006-11-20): - Added a --quiet option to Epsilon's certcreate and use it in a few unit tests to avoid spewing garbage during test runs. 0.5.5 (2006-10-21): - extime.Time now accepts RFC2822-like dates with invalid fields: it rounds them to the nearest valid value. 0.5.4 (2006-10-17): - extime.Time now accepts RFC2822-like dates with no timezone. 0.5.3 (2006-09-20): - structlike.Record now raises TypeError on unexpected args. 0.5.2 (2006-09-12): - extime.Time now avoids time_t overflow bugs. 0.5.1 (2006-06-22): - Added hotfix for twisted.test.proto_helpers.StringTransport. 0.5.0 (2006-06-12): - Replaced '%y' with '%Y' in Time.asHumanly() output - the year is now four digits, rather than two. - Added new 'epsilon.structlike' functionality for simple record. - All uses of defer.wait and deferredResult were removed from the tests. - Added epsilon.juice, an asynchronous messaging protocol slated for inclusion in Twisted. Improved a few features, such as the repr() of JuiceBox instances. This was moved from Vertex. - Added epsilon.sslverify, a set of utilities for dealing with PyOpenSSL using simple high-level objects, performing operations such as signing and verifying certificates. This was also moved from Vertex, and slated for inclusion in Twisted. - Added epsilon.spewer, a prettier version of the spewer in twisted.python.util. - Added "benchmark" tool for measuring and reporting run-times of python programs. 0.4.0 (2005-12-20): - Disabled crazy sys.modules hackery in test_setuphelper - Added module for creating a directory structure from a string template - Added support for 'now' to Time.fromHumanly() - Added a structured "hotfix" system to abstract and formalize monkey patches and version testing logic away from code which requires it. 0.3.2 (2005-11-05): - Added automatic support for Twisted plugins to autosetup 0.3.1 (2005-11-02): - Removed bogus dependency on Axiom. 0.3.0 (2005-11-02): - Added SchedulingService, an IService implementation, to epsilon.cooperator - Added autosetup, a utility to actually include files in distutils releases, to epsilon.setuphelper 0.2.1 (2005-10-25): - Added 'short()' to epsilon.versions.Version - fixed setup.py to use epsilon.version.short() rather than static string. 0.2.0 (2005-10-25): - Added epsilon.modal.ModalType, metaclass for writing classes that behave in some respects like state machines Epsilon-0.7.1/README0000644000175000017500000000045012461004115015761 0ustar mithrandimithrandi00000000000000 Divmod Epsilon ============== Epsilon is a set of utility modules, commonly used by all Divmod projects. This is intended mainly as a support package for code used by Divmod projects, and not as an external library. However, it contains many useful modules and you can feel free to use them! Epsilon-0.7.1/setup.py0000644000175000017500000000146312606176061016633 0ustar mithrandimithrandi00000000000000import versioneer from setuptools import setup, find_packages setup( version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), name="Epsilon", description="A set of utility modules used by Divmod projects", url="https://github.com/twisted/epsilon", install_requires=[ "Twisted>=13.2.0", "PyOpenSSL>=0.13" ], packages=find_packages(), scripts=['bin/benchmark', 'bin/certcreate'], license="MIT", platforms=["any"], classifiers=[ "Development Status :: 5 - Production/Stable", "Framework :: Twisted", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Topic :: Internet", "Topic :: Security", "Topic :: Utilities"]) Epsilon-0.7.1/versioneer.py0000644000175000017500000017201212606176061017653 0ustar mithrandimithrandi00000000000000 # Version: 0.15 """ The Versioneer ============== * like a rocketeer, but for versions! * https://github.com/warner/python-versioneer * Brian Warner * License: Public Domain * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy * [![Latest Version] (https://pypip.in/version/versioneer/badge.svg?style=flat) ](https://pypi.python.org/pypi/versioneer/) * [![Build Status] (https://travis-ci.org/warner/python-versioneer.png?branch=master) ](https://travis-ci.org/warner/python-versioneer) This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control system, and maybe making new tarballs. ## Quick Install * `pip install versioneer` to somewhere to your $PATH * add a `[versioneer]` section to your setup.cfg (see below) * run `versioneer install` in your source tree, commit the results ## Version Identifiers Source trees come from a variety of places: * a version-control system checkout (mostly used by developers) * a nightly tarball, produced by build automation * a snapshot tarball, produced by a web-based VCS browser, like github's "tarball from tag" feature * a release tarball, produced by "setup.py sdist", distributed through PyPI Within each source tree, the version identifier (either a string or a number, this tool is format-agnostic) can come from a variety of places: * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows about recent "tags" and an absolute revision-id * the name of the directory into which the tarball was unpacked * an expanded VCS keyword ($Id$, etc) * a `_version.py` created by some earlier build step For released software, the version identifier is closely related to a VCS tag. Some projects use tag names that include more than just the version string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool needs to strip the tag prefix to extract the version identifier. For unreleased software (between tags), the version identifier should provide enough information to help developers recreate the same tree, while also giving them an idea of roughly how old the tree is (after version 1.2, before version 1.3). Many VCS systems can report a description that captures this, for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has uncommitted changes. The version identifier is used for multiple purposes: * to allow the module to self-identify its version: `myproject.__version__` * to choose a name and prefix for a 'setup.py sdist' tarball ## Theory of Operation Versioneer works by adding a special `_version.py` file into your source tree, where your `__init__.py` can import it. This `_version.py` knows how to dynamically ask the VCS tool for version information at import time. `_version.py` also contains `$Revision$` markers, and the installation process marks `_version.py` to have this marker rewritten with a tag name during the `git archive` command. As a result, generated tarballs will contain enough information to get the proper version. To allow `setup.py` to compute a version too, a `versioneer.py` is added to the top level of your source tree, next to `setup.py` and the `setup.cfg` that configures it. This overrides several distutils/setuptools commands to compute the version when invoked, and changes `setup.py build` and `setup.py sdist` to replace `_version.py` with a small static file that contains just the generated version data. ## Installation First, decide on values for the following configuration variables: * `VCS`: the version control system you use. Currently accepts "git". * `style`: the style of version string to be produced. See "Styles" below for details. Defaults to "pep440", which looks like `TAG[+DISTANCE.gSHORTHASH[.dirty]]`. * `versionfile_source`: A project-relative pathname into which the generated version strings should be written. This is usually a `_version.py` next to your project's main `__init__.py` file, so it can be imported at runtime. If your project uses `src/myproject/__init__.py`, this should be `src/myproject/_version.py`. This file should be checked in to your VCS as usual: the copy created below by `setup.py setup_versioneer` will include code that parses expanded VCS keywords in generated tarballs. The 'build' and 'sdist' commands will replace it with a copy that has just the calculated version string. This must be set even if your project does not have any modules (and will therefore never import `_version.py`), since "setup.py sdist" -based trees still need somewhere to record the pre-calculated version strings. Anywhere in the source tree should do. If there is a `__init__.py` next to your `_version.py`, the `setup.py setup_versioneer` command (described below) will append some `__version__`-setting assignments, if they aren't already present. * `versionfile_build`: Like `versionfile_source`, but relative to the build directory instead of the source directory. These will differ when your setup.py uses 'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`, then you will probably have `versionfile_build='myproject/_version.py'` and `versionfile_source='src/myproject/_version.py'`. If this is set to None, then `setup.py build` will not attempt to rewrite any `_version.py` in the built tree. If your project does not have any libraries (e.g. if it only builds a script), then you should use `versionfile_build = None` and override `distutils.command.build_scripts` to explicitly insert a copy of `versioneer.get_version()` into your generated script. * `tag_prefix`: a string, like 'PROJECTNAME-', which appears at the start of all VCS tags. If your tags look like 'myproject-1.2.0', then you should use tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this should be an empty string. * `parentdir_prefix`: a optional string, frequently the same as tag_prefix, which appears at the start of all unpacked tarball filenames. If your tarball unpacks into 'myproject-1.2.0', this should be 'myproject-'. To disable this feature, just omit the field from your `setup.cfg`. This tool provides one script, named `versioneer`. That script has one mode, "install", which writes a copy of `versioneer.py` into the current directory and runs `versioneer.py setup` to finish the installation. To versioneer-enable your project: * 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and populating it with the configuration values you decided earlier (note that the option names are not case-sensitive): ```` [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = "" parentdir_prefix = myproject- ```` * 2: Run `versioneer install`. This will do the following: * copy `versioneer.py` into the top of your source tree * create `_version.py` in the right place (`versionfile_source`) * modify your `__init__.py` (if one exists next to `_version.py`) to define `__version__` (by calling a function from `_version.py`) * modify your `MANIFEST.in` to include both `versioneer.py` and the generated `_version.py` in sdist tarballs `versioneer install` will complain about any problems it finds with your `setup.py` or `setup.cfg`. Run it multiple times until you have fixed all the problems. * 3: add a `import versioneer` to your setup.py, and add the following arguments to the setup() call: version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), * 4: commit these changes to your VCS. To make sure you won't forget, `versioneer install` will mark everything it touched for addition using `git add`. Don't forget to add `setup.py` and `setup.cfg` too. ## Post-Installation Usage Once established, all uses of your tree from a VCS checkout should get the current version string. All generated tarballs should include an embedded version string (so users who unpack them will not need a VCS tool installed). If you distribute your project through PyPI, then the release process should boil down to two steps: * 1: git tag 1.0 * 2: python setup.py register sdist upload If you distribute it through github (i.e. users use github to generate tarballs with `git archive`), the process is: * 1: git tag 1.0 * 2: git push; git push --tags Versioneer will report "0+untagged.NUMCOMMITS.gHASH" until your tree has at least one tag in its history. ## Version-String Flavors Code which uses Versioneer can learn about its version string at runtime by importing `_version` from your main `__init__.py` file and running the `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can import the top-level `versioneer.py` and run `get_versions()`. Both functions return a dictionary with different flavors of version information: * `['version']`: A condensed version string, rendered using the selected style. This is the most commonly used value for the project's version string. The default "pep440" style yields strings like `0.11`, `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section below for alternative styles. * `['full-revisionid']`: detailed revision identifier. For Git, this is the full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that this is only accurate if run in a VCS checkout, otherwise it is likely to be False or None * `['error']`: if the version string could not be computed, this will be set to a string describing the problem, otherwise it will be None. It may be useful to throw an exception in setup.py if this is set, to avoid e.g. creating tarballs with a version string of "unknown". Some variants are more useful than others. Including `full-revisionid` in a bug report should allow developers to reconstruct the exact code being tested (or indicate the presence of local changes that should be shared with the developers). `version` is suitable for display in an "about" box or a CLI `--version` output: it can be easily compared against release notes and lists of bugs fixed in various releases. The installer adds the following text to your `__init__.py` to place a basic version in `YOURPROJECT.__version__`: from ._version import get_versions __version__ = get_versions()['version'] del get_versions ## Styles The setup.cfg `style=` configuration controls how the VCS information is rendered into a version string. The default style, "pep440", produces a PEP440-compliant string, equal to the un-prefixed tag name for actual releases, and containing an additional "local version" section with more detail for in-between builds. For Git, this is TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and that this commit is two revisions ("+2") beyond the "0.11" tag. For released software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". Other styles are available. See details.md in the Versioneer source tree for descriptions. ## Debugging Versioneer tries to avoid fatal errors: if something goes wrong, it will tend to return a version of "0+unknown". To investigate the problem, run `setup.py version`, which will run the version-lookup code in a verbose mode, and will display the full contents of `get_versions()` (including the `error` string, which may help identify what went wrong). ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) * edit `setup.cfg`, if necessary, to include any new configuration settings indicated by the release notes * re-run `versioneer install` in your source tree, to replace `SRC/_version.py` * commit any changed files ### Upgrading to 0.15 Starting with this version, Versioneer is configured with a `[versioneer]` section in your `setup.cfg` file. Earlier versions required the `setup.py` to set attributes on the `versioneer` module immediately after import. The new version will refuse to run (raising an exception during import) until you have provided the necessary `setup.cfg` section. In addition, the Versioneer package provides an executable named `versioneer`, and the installation process is driven by running `versioneer install`. In 0.14 and earlier, the executable was named `versioneer-installer` and was run without an argument. ### Upgrading to 0.14 0.14 changes the format of the version string. 0.13 and earlier used hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a plus-separated "local version" section strings, with dot-separated components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old format, but should be ok with the new one. ### Upgrading from 0.11 to 0.12 Nothing special. ### Upgrading from 0.10 to 0.11 You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running `setup.py setup_versioneer`. This will enable the use of additional version-control systems (SVN, etc) in the future. ## Future Directions This tool is designed to make it easily extended to other version-control systems: all VCS-specific components are in separate directories like src/git/ . The top-level `versioneer.py` script is assembled from these components by running make-versioneer.py . In the future, make-versioneer.py will take a VCS name as an argument, and will construct a version of `versioneer.py` that is specific to the given VCS. It might also take the configuration arguments that are currently provided manually during installation by editing setup.py . Alternatively, it might go the other direction and include code from all supported VCS systems, reducing the number of intermediate scripts. ## License To make Versioneer easier to embed, all its code is hereby released into the public domain. The `_version.py` that it creates is also in the public domain. """ from __future__ import print_function try: import configparser except ImportError: import ConfigParser as configparser import errno import json import os import re import subprocess import sys class VersioneerConfig: pass def get_root(): # we require that all commands are run from the project root, i.e. the # directory that contains setup.py, setup.cfg, and versioneer.py . root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): err = ("Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " "or in a way that lets it use sys.argv[0] to find the root " "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools # tree) execute all dependencies in a single python process, so # "versioneer" may be imported multiple times, and python's shared # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. me = os.path.realpath(os.path.abspath(__file__)) if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]: print("Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(me), versioneer_py)) except NameError: pass return root def get_config_from_root(root): # This might raise EnvironmentError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") parser = configparser.SafeConfigParser() with open(setup_cfg, "r") as f: parser.readfp(f) VCS = parser.get("versioneer", "VCS") # mandatory def get(parser, name): if parser.has_option("versioneer", name): return parser.get("versioneer", name) return None cfg = VersioneerConfig() cfg.VCS = VCS cfg.style = get(parser, "style") or "" cfg.versionfile_source = get(parser, "versionfile_source") cfg.versionfile_build = get(parser, "versionfile_build") cfg.tag_prefix = get(parser, "tag_prefix") cfg.parentdir_prefix = get(parser, "parentdir_prefix") cfg.verbose = get(parser, "verbose") return cfg class NotThisMethod(Exception): pass # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) return None return stdout LONG_VERSION_PY['git'] = ''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.15 (https://github.com/warner/python-versioneer) import errno import os import re import subprocess import sys def get_keywords(): # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" keywords = {"refnames": git_refnames, "full": git_full} return keywords class VersioneerConfig: pass def get_config(): # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "%(STYLE)s" cfg.tag_prefix = "%(TAG_PREFIX)s" cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" cfg.verbose = False return cfg class NotThisMethod(Exception): pass LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %%s" %% dispcmd) print(e) return None else: if verbose: print("unable to find command, tried %%s" %% (commands,)) return None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) return None return stdout def versions_from_parentdir(parentdir_prefix, root, verbose): # Source tarballs conventionally unpack into a directory that includes # both the project name and a version string. dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '%%s', but '%%s' doesn't start with " "prefix '%%s'" %% (root, dirname, parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None} @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%%s', no digits" %% ",".join(refs-tags)) if verbose: print("likely tags: %%s" %% ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %%s" %% r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags"} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # this runs 'git' from the root of the source tree. This only gets called # if the git-archive 'subst' keywords were *not* expanded, and # _version.py hasn't already been rewritten with a short version string, # meaning we're inside a checked out source tree. if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %%s" %% root) raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%%s' doesn't start with prefix '%%s'" print(fmt %% (full_tag, tag_prefix)) pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" %% (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits return pieces def plus_or_dot(pieces): if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): # now build up version string, with post-release "local version # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty # exceptions: # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): # TAG[.post.devDISTANCE] . No -dirty # exceptions: # 1: no tags. 0.post.devDISTANCE if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%%d" %% pieces["distance"] else: # exception #1 rendered = "0.post.dev%%d" %% pieces["distance"] return rendered def render_pep440_post(pieces): # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that # .dev0 sorts backwards (a dirty tree will appear "older" than the # corresponding clean one), but you shouldn't be releasing software with # -dirty anyways. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] return rendered def render_pep440_old(pieces): # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty # --always' # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty # --always -long'. The distance/hash is unconditional. # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"]} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%%s'" %% style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} def get_versions(): # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree"} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"} ''' @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs-tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags"} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # this runs 'git' from the root of the source tree. This only gets called # if the git-archive 'subst' keywords were *not* expanded, and # _version.py hasn't already been rewritten with a short version string, # meaning we're inside a checked out source tree. if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %s" % root) raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits return pieces def do_vcs_install(manifest_in, versionfile_source, ipy): GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [manifest_in, versionfile_source] if ipy: files.append(ipy) try: me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = os.path.splitext(me)[0] + ".py" versioneer_file = os.path.relpath(me) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: f = open(".gitattributes", "r") for line in f.readlines(): if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True f.close() except EnvironmentError: pass if not present: f = open(".gitattributes", "a+") f.write("%s export-subst\n" % versionfile_source) f.close() files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) def versions_from_parentdir(parentdir_prefix, root, verbose): # Source tarballs conventionally unpack into a directory that includes # both the project name and a version string. dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '%s', but '%s' doesn't start with " "prefix '%s'" % (root, dirname, parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None} SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.15) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json import sys version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ def versions_from_file(filename): try: with open(filename) as f: contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename, versions): os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) def plus_or_dot(pieces): if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): # now build up version string, with post-release "local version # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty # exceptions: # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): # TAG[.post.devDISTANCE] . No -dirty # exceptions: # 1: no tags. 0.post.devDISTANCE if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that # .dev0 sorts backwards (a dirty tree will appear "older" than the # corresponding clean one), but you shouldn't be releasing software with # -dirty anyways. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty # --always' # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty # --always -long'. The distance/hash is unconditional. # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"]} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} class VersioneerBadRootError(Exception): pass def get_versions(verbose=False): # returns dict with two keys: 'version' and 'full' if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] root = get_root() cfg = get_config_from_root(root) assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose assert cfg.versionfile_source is not None, \ "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) # extract version from first of: _version.py, VCS command (e.g. 'git # describe'), parentdir. This is meant to work for developers using a # source checkout, for users of a tarball created by 'setup.py sdist', # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. get_keywords_f = handlers.get("get_keywords") from_keywords_f = handlers.get("keywords") if get_keywords_f and from_keywords_f: try: keywords = get_keywords_f(versionfile_abs) ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) if verbose: print("got version from expanded keyword %s" % ver) return ver except NotThisMethod: pass try: ver = versions_from_file(versionfile_abs) if verbose: print("got version from file %s %s" % (versionfile_abs, ver)) return ver except NotThisMethod: pass from_vcs_f = handlers.get("pieces_from_vcs") if from_vcs_f: try: pieces = from_vcs_f(cfg.tag_prefix, root, verbose) ver = render(pieces, cfg.style) if verbose: print("got version from VCS %s" % ver) return ver except NotThisMethod: pass try: if cfg.parentdir_prefix: ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) if verbose: print("got version from parentdir %s" % ver) return ver except NotThisMethod: pass if verbose: print("unable to compute version") return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"} def get_version(): return get_versions()["version"] def get_cmdclass(): if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/warner/python-versioneer/issues/52 cmds = {} # we add "version" to both distutils and setuptools from distutils.core import Command class cmd_version(Command): description = "report generated version string" user_options = [] boolean_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe class cmd_build_exe(_build_exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] # we override different "sdist" commands for both environments if "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self): versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds CONFIG_ERROR = """ setup.cfg is missing the necessary Versioneer configuration. You need a section like: [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = "" parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), ...) Please read the docstring in ./versioneer.py for configuration instructions, edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. """ SAMPLE_CONFIG = """ # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] #VCS = git #style = pep440 #versionfile_source = #versionfile_build = #tag_prefix = #parentdir_prefix = """ INIT_PY_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ def do_setup(): root = get_root() try: cfg = get_config_from_root(root) except (EnvironmentError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) return 1 print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() except EnvironmentError: old = "" if INIT_PY_SNIPPET not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: f.write(INIT_PY_SNIPPET) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) ipy = None # Make sure both the top-level "versioneer.py" and versionfile_source # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so # they'll be copied into source distributions. Pip won't be able to # install the package without this. manifest_in = os.path.join(root, "MANIFEST.in") simple_includes = set() try: with open(manifest_in, "r") as f: for line in f: if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) except EnvironmentError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so # it might give some false negatives. Appending redundant 'include' # lines is safe, though. if "versioneer.py" not in simple_includes: print(" appending 'versioneer.py' to MANIFEST.in") with open(manifest_in, "a") as f: f.write("include versioneer.py\n") else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: print(" versionfile_source already in MANIFEST.in") # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-time keyword # substitution. do_vcs_install(manifest_in, cfg.versionfile_source, ipy) return 0 def scan_setup_py(): found = set() setters = False errors = 0 with open("setup.py", "r") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") if "versioneer.get_cmdclass()" in line: found.add("cmdclass") if "versioneer.get_version()" in line: found.add("get_version") if "versioneer.VCS" in line: setters = True if "versioneer.versionfile_source" in line: setters = True if len(found) != 3: print("") print("Your setup.py appears to be missing some important items") print("(but I might be wrong). Please make sure it has something") print("roughly like the following:") print("") print(" import versioneer") print(" setup( version=versioneer.get_version(),") print(" cmdclass=versioneer.get_cmdclass(), ...)") print("") errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print("now lives in setup.cfg, and should be removed from setup.py") print("") errors += 1 return errors if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": errors = do_setup() errors += scan_setup_py() if errors: sys.exit(1) Epsilon-0.7.1/PKG-INFO0000644000175000017500000000105712606214253016211 0ustar mithrandimithrandi00000000000000Metadata-Version: 1.1 Name: Epsilon Version: 0.7.1 Summary: A set of utility modules used by Divmod projects Home-page: https://github.com/twisted/epsilon Author: UNKNOWN Author-email: UNKNOWN License: MIT Description: UNKNOWN Platform: any Classifier: Development Status :: 5 - Production/Stable Classifier: Framework :: Twisted Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python Classifier: Topic :: Internet Classifier: Topic :: Security Classifier: Topic :: Utilities Epsilon-0.7.1/setup.cfg0000644000175000017500000000033512606214253016733 0ustar mithrandimithrandi00000000000000[versioneer] vcs = git style = pep440 versionfile_source = epsilon/_version.py versionfile_build = epsilon/_version.py tag_prefix = parentdir_prefix = epsilon- [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0